mirror of
https://github.com/ansible/awx.git
synced 2026-02-10 22:24:45 -03:30
Compare commits
18 Commits
improve-in
...
priority_t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d65ab1c5ac | ||
|
|
a2decc7c60 | ||
|
|
97d03e434e | ||
|
|
628a0e6a36 | ||
|
|
8fb5862223 | ||
|
|
6f7d5ca8a3 | ||
|
|
0f0f5aa289 | ||
|
|
bc12fa2283 | ||
|
|
03b37037d6 | ||
|
|
5668973d70 | ||
|
|
e6434454ce | ||
|
|
3ba9c026ea | ||
|
|
a206ca22ec | ||
|
|
e961cbe46f | ||
|
|
0ffe04ed9c | ||
|
|
ee739b5fd9 | ||
|
|
abc04e5c88 | ||
|
|
5b17e5c9c3 |
@@ -19,6 +19,8 @@ exclude_also =
|
|||||||
branch = True
|
branch = True
|
||||||
omit =
|
omit =
|
||||||
awx/main/migrations/*
|
awx/main/migrations/*
|
||||||
|
awx/settings/defaults.py
|
||||||
|
awx/settings/*_defaults.py
|
||||||
source =
|
source =
|
||||||
.
|
.
|
||||||
source_pkgs =
|
source_pkgs =
|
||||||
|
|||||||
25
.github/workflows/upload_schema.yml
vendored
25
.github/workflows/upload_schema.yml
vendored
@@ -5,6 +5,7 @@ env:
|
|||||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- devel
|
- devel
|
||||||
@@ -28,12 +29,32 @@ jobs:
|
|||||||
- name: Install python ${{ env.py_version }}
|
- name: Install python ${{ env.py_version }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.py_version }}
|
python-version: ${{ env.py_version }}
|
||||||
|
|
||||||
- name: Log in to registry
|
- name: Log in to registry
|
||||||
run: |
|
run: |
|
||||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||||
|
|
||||||
|
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||||
|
id: generate_key
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
|
||||||
|
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
else
|
||||||
|
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Add private GitHub key to SSH agent
|
||||||
|
uses: webfactory/ssh-agent@v0.9.0
|
||||||
|
with:
|
||||||
|
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||||
|
|
||||||
- name: Pre-pull image to warm build cache
|
- name: Pre-pull image to warm build cache
|
||||||
run: |
|
run: |
|
||||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||||
@@ -56,5 +77,3 @@ jobs:
|
|||||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||||
ansible localhost -c local -m aws_s3 \
|
ansible localhost -c local -m aws_s3 \
|
||||||
-a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"
|
-a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -62,7 +62,8 @@ else:
|
|||||||
|
|
||||||
def prepare_env():
|
def prepare_env():
|
||||||
# Update the default settings environment variable based on current mode.
|
# Update the default settings environment variable based on current mode.
|
||||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
|
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings')
|
||||||
|
os.environ.setdefault('AWX_MODE', MODE)
|
||||||
# Hide DeprecationWarnings when running in production. Need to first load
|
# Hide DeprecationWarnings when running in production. Need to first load
|
||||||
# settings to apply our filter after Django's own warnings filter.
|
# settings to apply our filter after Django's own warnings filter.
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|||||||
@@ -161,7 +161,7 @@ def get_view_description(view, html=False):
|
|||||||
|
|
||||||
|
|
||||||
def get_default_schema():
|
def get_default_schema():
|
||||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
if settings.DYNACONF.is_development_mode:
|
||||||
from awx.api.swagger import schema_view
|
from awx.api.swagger import schema_view
|
||||||
|
|
||||||
return schema_view
|
return schema_view
|
||||||
|
|||||||
@@ -682,11 +682,12 @@ class EmptySerializer(serializers.Serializer):
|
|||||||
class UnifiedJobTemplateSerializer(BaseSerializer):
|
class UnifiedJobTemplateSerializer(BaseSerializer):
|
||||||
# As a base serializer, the capabilities prefetch is not used directly,
|
# As a base serializer, the capabilities prefetch is not used directly,
|
||||||
# instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively.
|
# instead they are derived from the Workflow Job Template Serializer and the Job Template Serializer, respectively.
|
||||||
|
priority = serializers.IntegerField(required=False, min_value=0, max_value=32000)
|
||||||
capabilities_prefetch = []
|
capabilities_prefetch = []
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = UnifiedJobTemplate
|
model = UnifiedJobTemplate
|
||||||
fields = ('*', 'last_job_run', 'last_job_failed', 'next_job_run', 'status', 'execution_environment')
|
fields = ('*', 'last_job_run', 'last_job_failed', 'next_job_run', 'status', 'priority', 'execution_environment')
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(UnifiedJobTemplateSerializer, self).get_related(obj)
|
res = super(UnifiedJobTemplateSerializer, self).get_related(obj)
|
||||||
@@ -2996,6 +2997,7 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
|||||||
'scm_branch',
|
'scm_branch',
|
||||||
'forks',
|
'forks',
|
||||||
'limit',
|
'limit',
|
||||||
|
'priority',
|
||||||
'verbosity',
|
'verbosity',
|
||||||
'extra_vars',
|
'extra_vars',
|
||||||
'job_tags',
|
'job_tags',
|
||||||
@@ -3118,6 +3120,7 @@ class JobTemplateMixin(object):
|
|||||||
class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobOptionsSerializer):
|
class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobOptionsSerializer):
|
||||||
show_capabilities = ['start', 'schedule', 'copy', 'edit', 'delete']
|
show_capabilities = ['start', 'schedule', 'copy', 'edit', 'delete']
|
||||||
capabilities_prefetch = ['admin', 'execute', {'copy': ['project.use', 'inventory.use']}]
|
capabilities_prefetch = ['admin', 'execute', {'copy': ['project.use', 'inventory.use']}]
|
||||||
|
priority = serializers.IntegerField(required=False, min_value=0, max_value=32000)
|
||||||
|
|
||||||
status = serializers.ChoiceField(choices=JobTemplate.JOB_TEMPLATE_STATUS_CHOICES, read_only=True, required=False)
|
status = serializers.ChoiceField(choices=JobTemplate.JOB_TEMPLATE_STATUS_CHOICES, read_only=True, required=False)
|
||||||
|
|
||||||
@@ -3125,6 +3128,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
|||||||
model = JobTemplate
|
model = JobTemplate
|
||||||
fields = (
|
fields = (
|
||||||
'*',
|
'*',
|
||||||
|
'priority',
|
||||||
'host_config_key',
|
'host_config_key',
|
||||||
'ask_scm_branch_on_launch',
|
'ask_scm_branch_on_launch',
|
||||||
'ask_diff_mode_on_launch',
|
'ask_diff_mode_on_launch',
|
||||||
@@ -3252,6 +3256,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
|||||||
'diff_mode',
|
'diff_mode',
|
||||||
'job_slice_number',
|
'job_slice_number',
|
||||||
'job_slice_count',
|
'job_slice_count',
|
||||||
|
'priority',
|
||||||
'webhook_service',
|
'webhook_service',
|
||||||
'webhook_credential',
|
'webhook_credential',
|
||||||
'webhook_guid',
|
'webhook_guid',
|
||||||
@@ -3352,11 +3357,17 @@ class JobRelaunchSerializer(BaseSerializer):
|
|||||||
choices=[('all', _('No change to job limit')), ('failed', _('All failed and unreachable hosts'))],
|
choices=[('all', _('No change to job limit')), ('failed', _('All failed and unreachable hosts'))],
|
||||||
write_only=True,
|
write_only=True,
|
||||||
)
|
)
|
||||||
|
job_type = serializers.ChoiceField(
|
||||||
|
required=False,
|
||||||
|
allow_null=True,
|
||||||
|
choices=NEW_JOB_TYPE_CHOICES,
|
||||||
|
write_only=True,
|
||||||
|
)
|
||||||
credential_passwords = VerbatimField(required=True, write_only=True)
|
credential_passwords = VerbatimField(required=True, write_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Job
|
model = Job
|
||||||
fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'credential_passwords')
|
fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'job_type', 'credential_passwords')
|
||||||
|
|
||||||
def validate_credential_passwords(self, value):
|
def validate_credential_passwords(self, value):
|
||||||
pnts = self.instance.passwords_needed_to_start
|
pnts = self.instance.passwords_needed_to_start
|
||||||
@@ -3696,6 +3707,7 @@ class WorkflowJobTemplateWithSpecSerializer(WorkflowJobTemplateSerializer):
|
|||||||
|
|
||||||
class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
||||||
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
|
priority = serializers.IntegerField(required=False, min_value=0, max_value=32000)
|
||||||
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
scm_branch = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
|
|
||||||
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
@@ -3716,6 +3728,7 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
|||||||
'-controller_node',
|
'-controller_node',
|
||||||
'inventory',
|
'inventory',
|
||||||
'limit',
|
'limit',
|
||||||
|
'priority',
|
||||||
'scm_branch',
|
'scm_branch',
|
||||||
'webhook_service',
|
'webhook_service',
|
||||||
'webhook_credential',
|
'webhook_credential',
|
||||||
@@ -3833,6 +3846,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
|||||||
job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None, choices=NEW_JOB_TYPE_CHOICES)
|
job_type = serializers.ChoiceField(allow_blank=True, allow_null=True, required=False, default=None, choices=NEW_JOB_TYPE_CHOICES)
|
||||||
job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
job_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
limit = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
|
priority = serializers.IntegerField(required=False, min_value=0, max_value=32000)
|
||||||
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
|
||||||
diff_mode = serializers.BooleanField(required=False, allow_null=True, default=None)
|
diff_mode = serializers.BooleanField(required=False, allow_null=True, default=None)
|
||||||
verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES)
|
verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES)
|
||||||
@@ -3851,6 +3865,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
|||||||
'job_tags',
|
'job_tags',
|
||||||
'skip_tags',
|
'skip_tags',
|
||||||
'limit',
|
'limit',
|
||||||
|
'priority',
|
||||||
'skip_tags',
|
'skip_tags',
|
||||||
'diff_mode',
|
'diff_mode',
|
||||||
'verbosity',
|
'verbosity',
|
||||||
@@ -4344,6 +4359,7 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
job_type = serializers.ChoiceField(required=False, choices=NEW_JOB_TYPE_CHOICES, write_only=True)
|
job_type = serializers.ChoiceField(required=False, choices=NEW_JOB_TYPE_CHOICES, write_only=True)
|
||||||
skip_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
skip_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||||
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||||
|
priority = serializers.IntegerField(required=False, write_only=False, min_value=0, max_value=32000)
|
||||||
verbosity = serializers.ChoiceField(required=False, choices=VERBOSITY_CHOICES, write_only=True)
|
verbosity = serializers.ChoiceField(required=False, choices=VERBOSITY_CHOICES, write_only=True)
|
||||||
execution_environment = serializers.PrimaryKeyRelatedField(queryset=ExecutionEnvironment.objects.all(), required=False, write_only=True)
|
execution_environment = serializers.PrimaryKeyRelatedField(queryset=ExecutionEnvironment.objects.all(), required=False, write_only=True)
|
||||||
labels = serializers.PrimaryKeyRelatedField(many=True, queryset=Label.objects.all(), required=False, write_only=True)
|
labels = serializers.PrimaryKeyRelatedField(many=True, queryset=Label.objects.all(), required=False, write_only=True)
|
||||||
@@ -4361,6 +4377,7 @@ class JobLaunchSerializer(BaseSerializer):
|
|||||||
'inventory',
|
'inventory',
|
||||||
'scm_branch',
|
'scm_branch',
|
||||||
'limit',
|
'limit',
|
||||||
|
'priority',
|
||||||
'job_tags',
|
'job_tags',
|
||||||
'skip_tags',
|
'skip_tags',
|
||||||
'job_type',
|
'job_type',
|
||||||
@@ -4546,6 +4563,7 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
|||||||
extra_vars = VerbatimField(required=False, write_only=True)
|
extra_vars = VerbatimField(required=False, write_only=True)
|
||||||
inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
|
inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
|
||||||
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||||
|
priority = serializers.IntegerField(required=False, write_only=False, min_value=0, max_value=32000)
|
||||||
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
scm_branch = serializers.CharField(required=False, write_only=True, allow_blank=True)
|
||||||
workflow_job_template_data = serializers.SerializerMethodField()
|
workflow_job_template_data = serializers.SerializerMethodField()
|
||||||
|
|
||||||
@@ -4685,13 +4703,14 @@ class BulkJobLaunchSerializer(serializers.Serializer):
|
|||||||
)
|
)
|
||||||
inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
|
inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
|
||||||
limit = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
limit = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||||
|
# priority = serializers.IntegerField(write_only=True, required=False, min_value=0, max_value=32000)
|
||||||
scm_branch = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
scm_branch = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||||
skip_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
skip_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||||
job_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
job_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = WorkflowJob
|
model = WorkflowJob
|
||||||
fields = ('name', 'jobs', 'description', 'extra_vars', 'organization', 'inventory', 'limit', 'scm_branch', 'skip_tags', 'job_tags')
|
fields = ('name', 'jobs', 'description', 'extra_vars', 'organization', 'inventory', 'limit', 'priority', 'scm_branch', 'skip_tags', 'job_tags')
|
||||||
read_only_fields = ()
|
read_only_fields = ()
|
||||||
|
|
||||||
def validate(self, attrs):
|
def validate(self, attrs):
|
||||||
|
|||||||
@@ -3435,6 +3435,7 @@ class JobRelaunch(RetrieveAPIView):
|
|||||||
|
|
||||||
copy_kwargs = {}
|
copy_kwargs = {}
|
||||||
retry_hosts = serializer.validated_data.get('hosts', None)
|
retry_hosts = serializer.validated_data.get('hosts', None)
|
||||||
|
job_type = serializer.validated_data.get('job_type', None)
|
||||||
if retry_hosts and retry_hosts != 'all':
|
if retry_hosts and retry_hosts != 'all':
|
||||||
if obj.status in ACTIVE_STATES:
|
if obj.status in ACTIVE_STATES:
|
||||||
return Response(
|
return Response(
|
||||||
@@ -3455,6 +3456,8 @@ class JobRelaunch(RetrieveAPIView):
|
|||||||
)
|
)
|
||||||
copy_kwargs['limit'] = ','.join(retry_host_list)
|
copy_kwargs['limit'] = ','.join(retry_host_list)
|
||||||
|
|
||||||
|
if job_type:
|
||||||
|
copy_kwargs['job_type'] = job_type
|
||||||
new_job = obj.copy_unified_job(**copy_kwargs)
|
new_job = obj.copy_unified_job(**copy_kwargs)
|
||||||
result = new_job.signal_start(**serializer.validated_data['credential_passwords'])
|
result = new_job.signal_start(**serializer.validated_data['credential_passwords'])
|
||||||
if not result:
|
if not result:
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from awx.api.generics import APIView, Response
|
|||||||
from awx.api.permissions import AnalyticsPermission
|
from awx.api.permissions import AnalyticsPermission
|
||||||
from awx.api.versioning import reverse
|
from awx.api.versioning import reverse
|
||||||
from awx.main.utils import get_awx_version
|
from awx.main.utils import get_awx_version
|
||||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_ENDPOINT
|
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
@@ -205,7 +205,7 @@ class AnalyticsGenericView(APIView):
|
|||||||
try:
|
try:
|
||||||
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
||||||
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_ENDPOINT, ['api.console'])
|
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||||
response = client.make_request(
|
response = client.make_request(
|
||||||
method,
|
method,
|
||||||
url,
|
url,
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ from ansible_base.lib.utils.db import advisory_lock
|
|||||||
from awx.main.models import Job
|
from awx.main.models import Job
|
||||||
from awx.main.access import access_registry
|
from awx.main.access import access_registry
|
||||||
from awx.main.utils import get_awx_http_client_headers, set_environ, datetime_hook
|
from awx.main.utils import get_awx_http_client_headers, set_environ, datetime_hook
|
||||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_ENDPOINT
|
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||||
|
|
||||||
__all__ = ['register', 'gather', 'ship']
|
__all__ = ['register', 'gather', 'ship']
|
||||||
|
|
||||||
@@ -379,7 +379,7 @@ def ship(path):
|
|||||||
with set_environ(**settings.AWX_TASK_ENV):
|
with set_environ(**settings.AWX_TASK_ENV):
|
||||||
if rh_user and rh_password:
|
if rh_user and rh_password:
|
||||||
try:
|
try:
|
||||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_ENDPOINT, ['api.console'])
|
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||||
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||||
|
|||||||
46
awx/main/migrations/0197_add_opa_query_path.py
Normal file
46
awx/main/migrations/0197_add_opa_query_path.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
# Generated by Django 4.2.18 on 2025-03-17 16:10
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0196_indirect_managed_node_audit'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='inventory',
|
||||||
|
name='opa_query_path',
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text='The query path for the OPA policy to evaluate prior to job execution. The query path should be formatted as package/rule.',
|
||||||
|
max_length=128,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='jobtemplate',
|
||||||
|
name='opa_query_path',
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text='The query path for the OPA policy to evaluate prior to job execution. The query path should be formatted as package/rule.',
|
||||||
|
max_length=128,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='organization',
|
||||||
|
name='opa_query_path',
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
default=None,
|
||||||
|
help_text='The query path for the OPA policy to evaluate prior to job execution. The query path should be formatted as package/rule.',
|
||||||
|
max_length=128,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -5,7 +5,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0196_indirect_managed_node_audit'),
|
('main', '0197_add_opa_query_path'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
@@ -5,7 +5,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0197_delete_profile'),
|
('main', '0198_delete_profile'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
@@ -6,7 +6,7 @@ from django.db import migrations, models
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0198_remove_sso_app_content'),
|
('main', '0199_remove_sso_app_content'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
@@ -6,7 +6,7 @@ from django.db import migrations
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0199_alter_inventorysource_source_and_more'),
|
('main', '0200_alter_inventorysource_source_and_more'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
@@ -8,7 +8,7 @@ from awx.main.migrations._create_system_jobs import delete_clear_tokens_sjt
|
|||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
('main', '0200_alter_oauth2application_unique_together_and_more'),
|
('main', '0201_alter_oauth2application_unique_together_and_more'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
@@ -0,0 +1,27 @@
|
|||||||
|
# Generated by Django 4.2.16 on 2025-03-11 14:40
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('main', '0201_delete_token_cleanup_job'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='unifiedjob',
|
||||||
|
name='priority',
|
||||||
|
field=models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
help_text='Relative priority to other jobs. The higher the number, the higher the priority. Jobs with equivalent prioirty are started based on available capacity and launch time.',
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='unifiedjobtemplate',
|
||||||
|
name='priority',
|
||||||
|
field=models.PositiveIntegerField(default=0),
|
||||||
|
),
|
||||||
|
]
|
||||||
@@ -550,10 +550,10 @@ class CredentialType(CommonModelNameNotUnique):
|
|||||||
# TODO: User "side-loaded" credential custom_injectors isn't supported
|
# TODO: User "side-loaded" credential custom_injectors isn't supported
|
||||||
ManagedCredentialType.registry[ns] = SimpleNamespace(namespace=ns, name=plugin.name, kind='external', inputs=plugin.inputs, backend=plugin.backend)
|
ManagedCredentialType.registry[ns] = SimpleNamespace(namespace=ns, name=plugin.name, kind='external', inputs=plugin.inputs, backend=plugin.backend)
|
||||||
|
|
||||||
def inject_credential(self, credential, env, safe_env, args, private_data_dir):
|
def inject_credential(self, credential, env, safe_env, args, private_data_dir, container_root=None):
|
||||||
from awx_plugins.interfaces._temporary_private_inject_api import inject_credential
|
from awx_plugins.interfaces._temporary_private_inject_api import inject_credential
|
||||||
|
|
||||||
inject_credential(self, credential, env, safe_env, args, private_data_dir)
|
inject_credential(self, credential, env, safe_env, args, private_data_dir, container_root=container_root)
|
||||||
|
|
||||||
|
|
||||||
class CredentialTypeHelper:
|
class CredentialTypeHelper:
|
||||||
|
|||||||
@@ -565,7 +565,6 @@ class JobEvent(BasePlaybookEvent):
|
|||||||
summaries = dict()
|
summaries = dict()
|
||||||
updated_hosts_list = list()
|
updated_hosts_list = list()
|
||||||
for host in hostnames:
|
for host in hostnames:
|
||||||
updated_hosts_list.append(host.lower())
|
|
||||||
host_id = host_map.get(host)
|
host_id = host_map.get(host)
|
||||||
if host_id not in existing_host_ids:
|
if host_id not in existing_host_ids:
|
||||||
host_id = None
|
host_id = None
|
||||||
@@ -582,6 +581,12 @@ class JobEvent(BasePlaybookEvent):
|
|||||||
summary.failed = bool(summary.dark or summary.failures)
|
summary.failed = bool(summary.dark or summary.failures)
|
||||||
summaries[(host_id, host)] = summary
|
summaries[(host_id, host)] = summary
|
||||||
|
|
||||||
|
# do not count dark / unreachable hosts as updated
|
||||||
|
if not bool(summary.dark):
|
||||||
|
updated_hosts_list.append(host.lower())
|
||||||
|
else:
|
||||||
|
logger.warning(f'host {host.lower()} is dark / unreachable, not marking it as updated')
|
||||||
|
|
||||||
JobHostSummary.objects.bulk_create(summaries.values())
|
JobHostSummary.objects.bulk_create(summaries.values())
|
||||||
|
|
||||||
# update the last_job_id and last_job_host_summary_id
|
# update the last_job_id and last_job_host_summary_id
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ from awx.main.models.mixins import (
|
|||||||
TaskManagerInventoryUpdateMixin,
|
TaskManagerInventoryUpdateMixin,
|
||||||
RelatedJobsMixin,
|
RelatedJobsMixin,
|
||||||
CustomVirtualEnvMixin,
|
CustomVirtualEnvMixin,
|
||||||
|
OpaQueryPathMixin,
|
||||||
)
|
)
|
||||||
from awx.main.models.notifications import (
|
from awx.main.models.notifications import (
|
||||||
NotificationTemplate,
|
NotificationTemplate,
|
||||||
@@ -68,7 +69,7 @@ class InventoryConstructedInventoryMembership(models.Model):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin, OpaQueryPathMixin):
|
||||||
"""
|
"""
|
||||||
an inventory source contains lists and hosts.
|
an inventory source contains lists and hosts.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -51,6 +51,7 @@ from awx.main.models.mixins import (
|
|||||||
RelatedJobsMixin,
|
RelatedJobsMixin,
|
||||||
WebhookMixin,
|
WebhookMixin,
|
||||||
WebhookTemplateMixin,
|
WebhookTemplateMixin,
|
||||||
|
OpaQueryPathMixin,
|
||||||
)
|
)
|
||||||
from awx.main.constants import JOB_VARIABLE_PREFIXES
|
from awx.main.constants import JOB_VARIABLE_PREFIXES
|
||||||
|
|
||||||
@@ -192,7 +193,9 @@ class JobOptions(BaseModel):
|
|||||||
return needed
|
return needed
|
||||||
|
|
||||||
|
|
||||||
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin):
|
class JobTemplate(
|
||||||
|
UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin, OpaQueryPathMixin
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
A job template is a reusable job definition for applying a project (with
|
A job template is a reusable job definition for applying a project (with
|
||||||
playbook) to an inventory source with a given credential.
|
playbook) to an inventory source with a given credential.
|
||||||
@@ -295,6 +298,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
|||||||
'organization',
|
'organization',
|
||||||
'survey_passwords',
|
'survey_passwords',
|
||||||
'labels',
|
'labels',
|
||||||
|
'priority',
|
||||||
'credentials',
|
'credentials',
|
||||||
'job_slice_number',
|
'job_slice_number',
|
||||||
'job_slice_count',
|
'job_slice_count',
|
||||||
@@ -1172,7 +1176,7 @@ class SystemJobTemplate(UnifiedJobTemplate, SystemJobOptions):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_unified_job_field_names(cls):
|
def _get_unified_job_field_names(cls):
|
||||||
return ['name', 'description', 'organization', 'job_type', 'extra_vars']
|
return ['name', 'description', 'organization', 'priority', 'job_type', 'extra_vars']
|
||||||
|
|
||||||
def get_absolute_url(self, request=None):
|
def get_absolute_url(self, request=None):
|
||||||
return reverse('api:system_job_template_detail', kwargs={'pk': self.pk}, request=request)
|
return reverse('api:system_job_template_detail', kwargs={'pk': self.pk}, request=request)
|
||||||
|
|||||||
@@ -42,6 +42,7 @@ __all__ = [
|
|||||||
'TaskManagerInventoryUpdateMixin',
|
'TaskManagerInventoryUpdateMixin',
|
||||||
'ExecutionEnvironmentMixin',
|
'ExecutionEnvironmentMixin',
|
||||||
'CustomVirtualEnvMixin',
|
'CustomVirtualEnvMixin',
|
||||||
|
'OpaQueryPathMixin',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -692,3 +693,16 @@ class WebhookMixin(models.Model):
|
|||||||
logger.debug("Webhook status update sent.")
|
logger.debug("Webhook status update sent.")
|
||||||
else:
|
else:
|
||||||
logger.error("Posting webhook status failed, code: {}\n" "{}\nPayload sent: {}".format(response.status_code, response.text, json.dumps(data)))
|
logger.error("Posting webhook status failed, code: {}\n" "{}\nPayload sent: {}".format(response.status_code, response.text, json.dumps(data)))
|
||||||
|
|
||||||
|
|
||||||
|
class OpaQueryPathMixin(models.Model):
|
||||||
|
class Meta:
|
||||||
|
abstract = True
|
||||||
|
|
||||||
|
opa_query_path = models.CharField(
|
||||||
|
max_length=128,
|
||||||
|
blank=True,
|
||||||
|
null=True,
|
||||||
|
default=None,
|
||||||
|
help_text=_("The query path for the OPA policy to evaluate prior to job execution. The query path should be formatted as package/rule."),
|
||||||
|
)
|
||||||
|
|||||||
@@ -22,12 +22,12 @@ from awx.main.models.rbac import (
|
|||||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||||
)
|
)
|
||||||
from awx.main.models.unified_jobs import UnifiedJob
|
from awx.main.models.unified_jobs import UnifiedJob
|
||||||
from awx.main.models.mixins import ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin
|
from awx.main.models.mixins import ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, OpaQueryPathMixin
|
||||||
|
|
||||||
__all__ = ['Organization', 'Team', 'UserSessionMembership']
|
__all__ = ['Organization', 'Team', 'UserSessionMembership']
|
||||||
|
|
||||||
|
|
||||||
class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin):
|
class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, OpaQueryPathMixin):
|
||||||
"""
|
"""
|
||||||
An organization is the basic unit of multi-tenancy divisions
|
An organization is the basic unit of multi-tenancy divisions
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -354,7 +354,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_unified_job_field_names(cls):
|
def _get_unified_job_field_names(cls):
|
||||||
return set(f.name for f in ProjectOptions._meta.fields) | set(['name', 'description', 'organization'])
|
return set(f.name for f in ProjectOptions._meta.fields) | set(['name', 'description', 'priority', 'organization'])
|
||||||
|
|
||||||
def clean_organization(self):
|
def clean_organization(self):
|
||||||
if self.pk:
|
if self.pk:
|
||||||
|
|||||||
@@ -118,6 +118,11 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
|||||||
default=None,
|
default=None,
|
||||||
editable=False,
|
editable=False,
|
||||||
)
|
)
|
||||||
|
priority = models.PositiveIntegerField(
|
||||||
|
null=False,
|
||||||
|
default=0,
|
||||||
|
editable=True,
|
||||||
|
)
|
||||||
current_job = models.ForeignKey(
|
current_job = models.ForeignKey(
|
||||||
'UnifiedJob',
|
'UnifiedJob',
|
||||||
null=True,
|
null=True,
|
||||||
@@ -585,6 +590,13 @@ class UnifiedJob(
|
|||||||
default=None,
|
default=None,
|
||||||
editable=False,
|
editable=False,
|
||||||
)
|
)
|
||||||
|
priority = models.PositiveIntegerField(
|
||||||
|
default=0,
|
||||||
|
editable=False,
|
||||||
|
help_text=_(
|
||||||
|
"Relative priority to other jobs. The higher the number, the higher the priority. Jobs with equivalent prioirty are started based on available capacity and launch time."
|
||||||
|
),
|
||||||
|
)
|
||||||
emitted_events = models.PositiveIntegerField(
|
emitted_events = models.PositiveIntegerField(
|
||||||
default=0,
|
default=0,
|
||||||
editable=False,
|
editable=False,
|
||||||
|
|||||||
@@ -416,7 +416,7 @@ class WorkflowJobOptions(LaunchTimeConfigBase):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def _get_unified_job_field_names(cls):
|
def _get_unified_job_field_names(cls):
|
||||||
r = set(f.name for f in WorkflowJobOptions._meta.fields) | set(
|
r = set(f.name for f in WorkflowJobOptions._meta.fields) | set(
|
||||||
['name', 'description', 'organization', 'survey_passwords', 'labels', 'limit', 'scm_branch', 'job_tags', 'skip_tags']
|
['name', 'description', 'organization', 'survey_passwords', 'labels', 'limit', 'scm_branch', 'priority', 'job_tags', 'skip_tags']
|
||||||
)
|
)
|
||||||
r.remove('char_prompts') # needed due to copying launch config to launch config
|
r.remove('char_prompts') # needed due to copying launch config to launch config
|
||||||
return r
|
return r
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ class TaskBase:
|
|||||||
UnifiedJob.objects.filter(**filter_args)
|
UnifiedJob.objects.filter(**filter_args)
|
||||||
.exclude(launch_type='sync')
|
.exclude(launch_type='sync')
|
||||||
.exclude(polymorphic_ctype_id=wf_approval_ctype_id)
|
.exclude(polymorphic_ctype_id=wf_approval_ctype_id)
|
||||||
.order_by('created')
|
.order_by('-priority', 'created')
|
||||||
.prefetch_related('dependent_jobs')
|
.prefetch_related('dependent_jobs')
|
||||||
)
|
)
|
||||||
self.all_tasks = [t for t in qs]
|
self.all_tasks = [t for t in qs]
|
||||||
@@ -286,7 +286,7 @@ class WorkflowManager(TaskBase):
|
|||||||
|
|
||||||
@timeit
|
@timeit
|
||||||
def get_tasks(self, filter_args):
|
def get_tasks(self, filter_args):
|
||||||
self.all_tasks = [wf for wf in WorkflowJob.objects.filter(**filter_args)]
|
self.all_tasks = [wf for wf in WorkflowJob.objects.filter(**filter_args).order_by('-priority', 'created')]
|
||||||
|
|
||||||
@timeit
|
@timeit
|
||||||
def _schedule(self):
|
def _schedule(self):
|
||||||
@@ -336,12 +336,14 @@ class DependencyManager(TaskBase):
|
|||||||
|
|
||||||
return bool(((update.finished + timedelta(seconds=cache_timeout))) < tz_now())
|
return bool(((update.finished + timedelta(seconds=cache_timeout))) < tz_now())
|
||||||
|
|
||||||
def get_or_create_project_update(self, project_id):
|
def get_or_create_project_update(self, task):
|
||||||
|
project_id = task.project_id
|
||||||
|
priority = task.priority
|
||||||
project = self.all_projects.get(project_id, None)
|
project = self.all_projects.get(project_id, None)
|
||||||
if project is not None:
|
if project is not None:
|
||||||
latest_project_update = project.project_updates.filter(job_type='check').order_by("-created").first()
|
latest_project_update = project.project_updates.filter(job_type='check').order_by("-created").first()
|
||||||
if self.should_update_again(latest_project_update, project.scm_update_cache_timeout):
|
if self.should_update_again(latest_project_update, project.scm_update_cache_timeout):
|
||||||
project_task = project.create_project_update(_eager_fields=dict(launch_type='dependency'))
|
project_task = project.create_project_update(_eager_fields=dict(launch_type='dependency', priority=priority))
|
||||||
project_task.signal_start()
|
project_task.signal_start()
|
||||||
return [project_task]
|
return [project_task]
|
||||||
else:
|
else:
|
||||||
@@ -349,7 +351,7 @@ class DependencyManager(TaskBase):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def gen_dep_for_job(self, task):
|
def gen_dep_for_job(self, task):
|
||||||
dependencies = self.get_or_create_project_update(task.project_id)
|
dependencies = self.get_or_create_project_update(task)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
start_args = json.loads(decrypt_field(task, field_name="start_args"))
|
start_args = json.loads(decrypt_field(task, field_name="start_args"))
|
||||||
@@ -361,7 +363,7 @@ class DependencyManager(TaskBase):
|
|||||||
continue
|
continue
|
||||||
latest_inventory_update = inventory_source.inventory_updates.order_by("-created").first()
|
latest_inventory_update = inventory_source.inventory_updates.order_by("-created").first()
|
||||||
if self.should_update_again(latest_inventory_update, inventory_source.update_cache_timeout):
|
if self.should_update_again(latest_inventory_update, inventory_source.update_cache_timeout):
|
||||||
inventory_task = inventory_source.create_inventory_update(_eager_fields=dict(launch_type='dependency'))
|
inventory_task = inventory_source.create_inventory_update(_eager_fields=dict(launch_type='dependency', priority=task.priority))
|
||||||
inventory_task.signal_start()
|
inventory_task.signal_start()
|
||||||
dependencies.append(inventory_task)
|
dependencies.append(inventory_task)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -522,9 +522,13 @@ class BaseTask(object):
|
|||||||
|
|
||||||
credentials = self.build_credentials_list(self.instance)
|
credentials = self.build_credentials_list(self.instance)
|
||||||
|
|
||||||
|
container_root = None
|
||||||
|
if settings.IS_K8S and isinstance(self.instance, ProjectUpdate):
|
||||||
|
container_root = private_data_dir
|
||||||
|
|
||||||
for credential in credentials:
|
for credential in credentials:
|
||||||
if credential:
|
if credential:
|
||||||
credential.credential_type.inject_credential(credential, env, self.safe_cred_env, args, private_data_dir)
|
credential.credential_type.inject_credential(credential, env, self.safe_cred_env, args, private_data_dir, container_root=container_root)
|
||||||
|
|
||||||
self.runner_callback.safe_env.update(self.safe_cred_env)
|
self.runner_callback.safe_env.update(self.safe_cred_env)
|
||||||
|
|
||||||
@@ -917,7 +921,6 @@ class RunJob(SourceControlMixin, BaseTask):
|
|||||||
env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='')
|
env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='')
|
||||||
|
|
||||||
path_vars = [
|
path_vars = [
|
||||||
('ANSIBLE_COLLECTIONS_PATHS', 'collections_paths', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
|
|
||||||
('ANSIBLE_ROLES_PATH', 'roles_path', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles'),
|
('ANSIBLE_ROLES_PATH', 'roles_path', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles'),
|
||||||
('ANSIBLE_COLLECTIONS_PATH', 'collections_path', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
|
('ANSIBLE_COLLECTIONS_PATH', 'collections_path', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
|
||||||
]
|
]
|
||||||
@@ -1520,7 +1523,7 @@ class RunInventoryUpdate(SourceControlMixin, BaseTask):
|
|||||||
raise NotImplementedError('Cannot update file sources through the task system.')
|
raise NotImplementedError('Cannot update file sources through the task system.')
|
||||||
|
|
||||||
if inventory_update.source == 'scm' and inventory_update.source_project_update:
|
if inventory_update.source == 'scm' and inventory_update.source_project_update:
|
||||||
env_key = 'ANSIBLE_COLLECTIONS_PATHS'
|
env_key = 'ANSIBLE_COLLECTIONS_PATH'
|
||||||
config_setting = 'collections_paths'
|
config_setting = 'collections_paths'
|
||||||
folder = 'requirements_collections'
|
folder = 'requirements_collections'
|
||||||
default = '~/.ansible/collections:/usr/share/ansible/collections'
|
default = '~/.ansible/collections:/usr/share/ansible/collections'
|
||||||
@@ -1538,12 +1541,12 @@ class RunInventoryUpdate(SourceControlMixin, BaseTask):
|
|||||||
paths = [config_values[config_setting]] + paths
|
paths = [config_values[config_setting]] + paths
|
||||||
paths = [os.path.join(CONTAINER_ROOT, folder)] + paths
|
paths = [os.path.join(CONTAINER_ROOT, folder)] + paths
|
||||||
env[env_key] = os.pathsep.join(paths)
|
env[env_key] = os.pathsep.join(paths)
|
||||||
if 'ANSIBLE_COLLECTIONS_PATHS' in env:
|
if 'ANSIBLE_COLLECTIONS_PATH' in env:
|
||||||
paths = env['ANSIBLE_COLLECTIONS_PATHS'].split(':')
|
paths = env['ANSIBLE_COLLECTIONS_PATH'].split(':')
|
||||||
else:
|
else:
|
||||||
paths = ['~/.ansible/collections', '/usr/share/ansible/collections']
|
paths = ['~/.ansible/collections', '/usr/share/ansible/collections']
|
||||||
paths.append('/usr/share/automation-controller/collections')
|
paths.append('/usr/share/automation-controller/collections')
|
||||||
env['ANSIBLE_COLLECTIONS_PATHS'] = os.pathsep.join(paths)
|
env['ANSIBLE_COLLECTIONS_PATH'] = os.pathsep.join(paths)
|
||||||
|
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
|||||||
@@ -210,6 +210,39 @@ def test_disallowed_http_update_methods(put, patch, post, inventory, project, ad
|
|||||||
patch(url=reverse('api:job_detail', kwargs={'pk': job.pk}), data={}, user=admin_user, expect=405)
|
patch(url=reverse('api:job_detail', kwargs={'pk': job.pk}), data={}, user=admin_user, expect=405)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"job_type",
|
||||||
|
[
|
||||||
|
'run',
|
||||||
|
'check',
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_job_relaunch_with_job_type(post, inventory, project, machine_credential, admin_user, job_type):
|
||||||
|
# Create a job template
|
||||||
|
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
|
||||||
|
|
||||||
|
# Set initial job type
|
||||||
|
init_job_type = 'check' if job_type == 'run' else 'run'
|
||||||
|
|
||||||
|
# Create a job instance
|
||||||
|
job = jt.create_unified_job(_eager_fields={'job_type': init_job_type})
|
||||||
|
|
||||||
|
# Perform the POST request
|
||||||
|
url = reverse('api:job_relaunch', kwargs={'pk': job.pk})
|
||||||
|
r = post(url=url, data={'job_type': job_type}, user=admin_user, expect=201)
|
||||||
|
|
||||||
|
# Assert that the response status code is 201 (Created)
|
||||||
|
assert r.status_code == 201
|
||||||
|
|
||||||
|
# Retrieve the newly created job from the response
|
||||||
|
new_job_id = r.data.get('id')
|
||||||
|
new_job = Job.objects.get(id=new_job_id)
|
||||||
|
|
||||||
|
# Assert that the new job has the correct job type
|
||||||
|
assert new_job.job_type == job_type
|
||||||
|
|
||||||
|
|
||||||
class TestControllerNode:
|
class TestControllerNode:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def project_update(self, project):
|
def project_update(self, project):
|
||||||
|
|||||||
@@ -135,8 +135,9 @@ class TestEvents:
|
|||||||
|
|
||||||
self._create_job_event(ok=dict((hostname, len(hostname)) for hostname in self.hostnames))
|
self._create_job_event(ok=dict((hostname, len(hostname)) for hostname in self.hostnames))
|
||||||
|
|
||||||
# Soft delete 6 host metrics
|
# Soft delete 6 of the 12 host metrics, every even host like "Host 2" or "Host 4"
|
||||||
for hm in HostMetric.objects.filter(id__in=[1, 3, 5, 7, 9, 11]):
|
for host_name in self.hostnames[::2]:
|
||||||
|
hm = HostMetric.objects.get(hostname=host_name.lower())
|
||||||
hm.soft_delete()
|
hm.soft_delete()
|
||||||
|
|
||||||
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=0) & Q(last_deleted__isnull=True))) == 6
|
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=0) & Q(last_deleted__isnull=True))) == 6
|
||||||
@@ -165,7 +166,9 @@ class TestEvents:
|
|||||||
skipped=dict((hostname, len(hostname)) for hostname in self.hostnames[10:12]),
|
skipped=dict((hostname, len(hostname)) for hostname in self.hostnames[10:12]),
|
||||||
)
|
)
|
||||||
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=0) & Q(last_deleted__isnull=True))) == 6
|
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=0) & Q(last_deleted__isnull=True))) == 6
|
||||||
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=1) & Q(last_deleted__isnull=False))) == 6
|
|
||||||
|
# one of those 6 hosts is dark, so will not be counted
|
||||||
|
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=1) & Q(last_deleted__isnull=False))) == 5
|
||||||
|
|
||||||
def _generate_hosts(self, cnt, id_from=0):
|
def _generate_hosts(self, cnt, id_from=0):
|
||||||
self.hostnames = [f'Host {i}' for i in range(id_from, id_from + cnt)]
|
self.hostnames = [f'Host {i}' for i in range(id_from, id_from + cnt)]
|
||||||
|
|||||||
@@ -231,7 +231,7 @@ def test_inventory_update_injected_content(product_name, this_kind, inventory, f
|
|||||||
len([True for k in content.keys() if k.endswith(inventory_filename)]) > 0
|
len([True for k in content.keys() if k.endswith(inventory_filename)]) > 0
|
||||||
), f"'{inventory_filename}' file not found in inventory update runtime files {content.keys()}"
|
), f"'{inventory_filename}' file not found in inventory update runtime files {content.keys()}"
|
||||||
|
|
||||||
env.pop('ANSIBLE_COLLECTIONS_PATHS', None) # collection paths not relevant to this test
|
env.pop('ANSIBLE_COLLECTIONS_PATH', None)
|
||||||
base_dir = os.path.join(DATA, 'plugins')
|
base_dir = os.path.join(DATA, 'plugins')
|
||||||
if not os.path.exists(base_dir):
|
if not os.path.exists(base_dir):
|
||||||
os.mkdir(base_dir)
|
os.mkdir(base_dir)
|
||||||
|
|||||||
@@ -50,13 +50,14 @@ def test_indirect_host_counting(live_tmp_folder, run_job_from_playbook):
|
|||||||
job.refresh_from_db()
|
job.refresh_from_db()
|
||||||
if job.event_queries_processed is False:
|
if job.event_queries_processed is False:
|
||||||
save_indirect_host_entries.delay(job.id, wait_for_events=False)
|
save_indirect_host_entries.delay(job.id, wait_for_events=False)
|
||||||
# This will poll for the background task to finish
|
|
||||||
for _ in range(10):
|
# event_queries_processed only assures the task has started, it might take a minor amount of time to finish
|
||||||
if IndirectManagedNodeAudit.objects.filter(job=job).exists():
|
for _ in range(10):
|
||||||
break
|
if IndirectManagedNodeAudit.objects.filter(job=job).exists():
|
||||||
time.sleep(0.2)
|
break
|
||||||
else:
|
time.sleep(0.2)
|
||||||
raise RuntimeError(f'No IndirectManagedNodeAudit records ever populated for job_id={job.id}')
|
else:
|
||||||
|
raise RuntimeError(f'No IndirectManagedNodeAudit records ever populated for job_id={job.id}')
|
||||||
|
|
||||||
assert IndirectManagedNodeAudit.objects.filter(job=job).count() == 1
|
assert IndirectManagedNodeAudit.objects.filter(job=job).count() == 1
|
||||||
host_audit = IndirectManagedNodeAudit.objects.filter(job=job).first()
|
host_audit = IndirectManagedNodeAudit.objects.filter(job=job).first()
|
||||||
|
|||||||
@@ -1,6 +1,3 @@
|
|||||||
from split_settings.tools import include
|
|
||||||
|
|
||||||
|
|
||||||
LOCAL_SETTINGS = (
|
LOCAL_SETTINGS = (
|
||||||
'ALLOWED_HOSTS',
|
'ALLOWED_HOSTS',
|
||||||
'BROADCAST_WEBSOCKET_PORT',
|
'BROADCAST_WEBSOCKET_PORT',
|
||||||
@@ -16,13 +13,14 @@ LOCAL_SETTINGS = (
|
|||||||
|
|
||||||
|
|
||||||
def test_postprocess_auth_basic_enabled():
|
def test_postprocess_auth_basic_enabled():
|
||||||
locals().update({'__file__': __file__})
|
"""The final loaded settings should have basic auth enabled."""
|
||||||
|
from awx.settings import REST_FRAMEWORK
|
||||||
|
|
||||||
include('../../../settings/defaults.py', scope=locals())
|
assert 'awx.api.authentication.LoggedBasicAuthentication' in REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES']
|
||||||
assert 'awx.api.authentication.LoggedBasicAuthentication' in locals()['REST_FRAMEWORK']['DEFAULT_AUTHENTICATION_CLASSES']
|
|
||||||
|
|
||||||
|
|
||||||
def test_default_settings():
|
def test_default_settings():
|
||||||
|
"""Ensure that all default settings are present in the snapshot."""
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
for k in dir(settings):
|
for k in dir(settings):
|
||||||
@@ -31,3 +29,43 @@ def test_default_settings():
|
|||||||
default_val = getattr(settings.default_settings, k, None)
|
default_val = getattr(settings.default_settings, k, None)
|
||||||
snapshot_val = settings.DEFAULTS_SNAPSHOT[k]
|
snapshot_val = settings.DEFAULTS_SNAPSHOT[k]
|
||||||
assert default_val == snapshot_val, f'Setting for {k} does not match shapshot:\nsnapshot: {snapshot_val}\ndefault: {default_val}'
|
assert default_val == snapshot_val, f'Setting for {k} does not match shapshot:\nsnapshot: {snapshot_val}\ndefault: {default_val}'
|
||||||
|
|
||||||
|
|
||||||
|
def test_django_conf_settings_is_awx_settings():
|
||||||
|
"""Ensure that the settings loaded from dynaconf are the same as the settings delivered to django."""
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.settings import REST_FRAMEWORK
|
||||||
|
|
||||||
|
assert settings.REST_FRAMEWORK == REST_FRAMEWORK
|
||||||
|
|
||||||
|
|
||||||
|
def test_dynaconf_is_awx_settings():
|
||||||
|
"""Ensure that the settings loaded from dynaconf are the same as the settings delivered to django."""
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.settings import REST_FRAMEWORK
|
||||||
|
|
||||||
|
assert settings.DYNACONF.REST_FRAMEWORK == REST_FRAMEWORK
|
||||||
|
|
||||||
|
|
||||||
|
def test_development_settings_can_be_directly_imported(monkeypatch):
|
||||||
|
"""Ensure that the development settings can be directly imported."""
|
||||||
|
monkeypatch.setenv('AWX_MODE', 'development')
|
||||||
|
from django.conf import settings
|
||||||
|
from awx.settings.development import REST_FRAMEWORK
|
||||||
|
from awx.settings.development import DEBUG # actually set on defaults.py and not overridden in development.py
|
||||||
|
|
||||||
|
assert settings.REST_FRAMEWORK == REST_FRAMEWORK
|
||||||
|
assert DEBUG is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_application_name():
|
||||||
|
"""Ensure that the merge_application_name function works as expected."""
|
||||||
|
from awx.settings.functions import merge_application_name
|
||||||
|
|
||||||
|
settings = {
|
||||||
|
"DATABASES__default__ENGINE": "django.db.backends.postgresql",
|
||||||
|
"CLUSTER_HOST_ID": "test-cluster-host-id",
|
||||||
|
}
|
||||||
|
result = merge_application_name(settings)["DATABASES__default__OPTIONS__application_name"]
|
||||||
|
assert result.startswith("awx-")
|
||||||
|
assert "test-cluster" in result
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from typing import Optional, Any
|
|||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
DEFAULT_OIDC_ENDPOINT = 'https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token'
|
DEFAULT_OIDC_TOKEN_ENDPOINT = 'https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token'
|
||||||
|
|
||||||
|
|
||||||
class TokenError(requests.RequestException):
|
class TokenError(requests.RequestException):
|
||||||
|
|||||||
@@ -201,7 +201,7 @@
|
|||||||
# additional_galaxy_env contains environment variables are used for installing roles and collections and will take precedence over items in galaxy_task_env
|
# additional_galaxy_env contains environment variables are used for installing roles and collections and will take precedence over items in galaxy_task_env
|
||||||
additional_galaxy_env:
|
additional_galaxy_env:
|
||||||
# These paths control where ansible-galaxy installs collections and roles on top the filesystem
|
# These paths control where ansible-galaxy installs collections and roles on top the filesystem
|
||||||
ANSIBLE_COLLECTIONS_PATHS: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections"
|
ANSIBLE_COLLECTIONS_PATH: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections"
|
||||||
ANSIBLE_ROLES_PATH: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles"
|
ANSIBLE_ROLES_PATH: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles"
|
||||||
# Put the local tmp directory in same volume as collection destination
|
# Put the local tmp directory in same volume as collection destination
|
||||||
# otherwise, files cannot be moved accross volumes and will cause error
|
# otherwise, files cannot be moved accross volumes and will cause error
|
||||||
|
|||||||
@@ -1,2 +1,82 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
# All Rights Reserved.
|
# All Rights Reserved.
|
||||||
|
import os
|
||||||
|
import copy
|
||||||
|
from ansible_base.lib.dynamic_config import (
|
||||||
|
factory,
|
||||||
|
export,
|
||||||
|
load_envvars,
|
||||||
|
load_python_file_with_injected_context,
|
||||||
|
load_standard_settings_files,
|
||||||
|
toggle_feature_flags,
|
||||||
|
)
|
||||||
|
from .functions import (
|
||||||
|
assert_production_settings,
|
||||||
|
merge_application_name,
|
||||||
|
add_backwards_compatibility,
|
||||||
|
load_extra_development_files,
|
||||||
|
)
|
||||||
|
|
||||||
|
add_backwards_compatibility()
|
||||||
|
|
||||||
|
# Create a the standard DYNACONF instance which will come with DAB defaults
|
||||||
|
# This loads defaults.py and environment specific file e.g: development_defaults.py
|
||||||
|
DYNACONF = factory(
|
||||||
|
__name__,
|
||||||
|
"AWX",
|
||||||
|
environments=("development", "production", "quiet", "kube"),
|
||||||
|
settings_files=["defaults.py"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Store snapshot before loading any custom config file
|
||||||
|
DYNACONF.set(
|
||||||
|
"DEFAULTS_SNAPSHOT",
|
||||||
|
copy.deepcopy(DYNACONF.as_dict(internal=False)),
|
||||||
|
loader_identifier="awx.settings:DEFAULTS_SNAPSHOT",
|
||||||
|
)
|
||||||
|
|
||||||
|
#############################################################################################
|
||||||
|
# Settings loaded before this point will be allowed to be overridden by the database settings
|
||||||
|
# Any settings loaded after this point will be marked as as a read_only database setting
|
||||||
|
#############################################################################################
|
||||||
|
|
||||||
|
# Load extra settings files from the following directories
|
||||||
|
# /etc/tower/conf.d/ and /etc/tower/
|
||||||
|
# this is the legacy location, kept for backwards compatibility
|
||||||
|
settings_dir = os.environ.get('AWX_SETTINGS_DIR', '/etc/tower/conf.d/')
|
||||||
|
settings_files_path = os.path.join(settings_dir, '*.py')
|
||||||
|
settings_file_path = os.environ.get('AWX_SETTINGS_FILE', '/etc/tower/settings.py')
|
||||||
|
load_python_file_with_injected_context(settings_files_path, settings=DYNACONF)
|
||||||
|
load_python_file_with_injected_context(settings_file_path, settings=DYNACONF)
|
||||||
|
|
||||||
|
# Load extra settings files from the following directories
|
||||||
|
# /etc/ansible-automation-platform/{settings,flags,.secrets}.yaml
|
||||||
|
# and /etc/ansible-automation-platform/awx/{settings,flags,.secrets}.yaml
|
||||||
|
# this is the new standard location for all services
|
||||||
|
load_standard_settings_files(DYNACONF)
|
||||||
|
|
||||||
|
# Load optional development only settings files
|
||||||
|
load_extra_development_files(DYNACONF)
|
||||||
|
|
||||||
|
# Check at least one setting file has been loaded in production mode
|
||||||
|
assert_production_settings(DYNACONF, settings_dir, settings_file_path)
|
||||||
|
|
||||||
|
# Load envvars at the end to allow them to override everything loaded so far
|
||||||
|
load_envvars(DYNACONF)
|
||||||
|
|
||||||
|
# This must run after all custom settings are loaded
|
||||||
|
DYNACONF.update(
|
||||||
|
merge_application_name(DYNACONF),
|
||||||
|
loader_identifier="awx.settings:merge_application_name",
|
||||||
|
merge=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Toggle feature flags based on installer settings
|
||||||
|
DYNACONF.update(
|
||||||
|
toggle_feature_flags(DYNACONF),
|
||||||
|
loader_identifier="awx.settings:toggle_feature_flags",
|
||||||
|
merge=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update django.conf.settings with DYNACONF values
|
||||||
|
export(__name__, DYNACONF)
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ def get_application_name(CLUSTER_HOST_ID, function=''):
|
|||||||
|
|
||||||
|
|
||||||
def set_application_name(DATABASES, CLUSTER_HOST_ID, function=''):
|
def set_application_name(DATABASES, CLUSTER_HOST_ID, function=''):
|
||||||
|
"""In place modification of DATABASES to set the application name for the connection."""
|
||||||
# If settings files were not properly passed DATABASES could be {} at which point we don't need to set the app name.
|
# If settings files were not properly passed DATABASES could be {} at which point we don't need to set the app name.
|
||||||
if not DATABASES or 'default' not in DATABASES:
|
if not DATABASES or 'default' not in DATABASES:
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -9,9 +9,6 @@ import tempfile
|
|||||||
import socket
|
import socket
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
from split_settings.tools import include
|
|
||||||
|
|
||||||
|
|
||||||
DEBUG = True
|
DEBUG = True
|
||||||
SQL_DEBUG = DEBUG
|
SQL_DEBUG = DEBUG
|
||||||
|
|
||||||
@@ -1015,16 +1012,15 @@ METRICS_SUBSYSTEM_CONFIG = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# django-ansible-base
|
# django-ansible-base
|
||||||
ANSIBLE_BASE_TEAM_MODEL = 'main.Team'
|
ANSIBLE_BASE_TEAM_MODEL = 'main.Team'
|
||||||
ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization'
|
ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization'
|
||||||
ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
||||||
ANSIBLE_BASE_PERMISSION_MODEL = 'main.Permission'
|
ANSIBLE_BASE_PERMISSION_MODEL = 'main.Permission'
|
||||||
|
|
||||||
from ansible_base.lib import dynamic_config # noqa: E402
|
# Defaults to be overridden by DAB
|
||||||
|
SPECTACULAR_SETTINGS = {}
|
||||||
include(os.path.join(os.path.dirname(dynamic_config.__file__), 'dynamic_settings.py'))
|
OAUTH2_PROVIDER = {}
|
||||||
|
|
||||||
# Add a postfix to the API URL patterns
|
# Add a postfix to the API URL patterns
|
||||||
# example if set to '' API pattern will be /api
|
# example if set to '' API pattern will be /api
|
||||||
|
|||||||
@@ -1,129 +1,13 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# This file exists for backwards compatibility only
|
||||||
# All Rights Reserved.
|
# the current way of running AWX is to point settings to
|
||||||
|
# awx/settings/__init__.py as the entry point for the settings
|
||||||
# Development settings for AWX project.
|
# that is done by exporting: export DJANGO_SETTINGS_MODULE=awx.settings
|
||||||
|
|
||||||
# Python
|
|
||||||
import os
|
import os
|
||||||
import socket
|
|
||||||
import copy
|
|
||||||
import sys
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
# Centos-7 doesn't include the svg mime type
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||||
# /usr/lib64/python/mimetypes.py
|
os.environ.setdefault("AWX_MODE", "development")
|
||||||
import mimetypes
|
|
||||||
|
|
||||||
# Django Split Settings
|
from ansible_base.lib.dynamic_config import export
|
||||||
from split_settings.tools import optional, include
|
from . import DYNACONF # noqa
|
||||||
|
|
||||||
# Load default settings.
|
export(__name__, DYNACONF)
|
||||||
from .defaults import * # NOQA
|
|
||||||
|
|
||||||
# awx-manage shell_plus --notebook
|
|
||||||
NOTEBOOK_ARGUMENTS = ['--NotebookApp.token=', '--ip', '0.0.0.0', '--port', '9888', '--allow-root', '--no-browser']
|
|
||||||
|
|
||||||
# print SQL queries in shell_plus
|
|
||||||
SHELL_PLUS_PRINT_SQL = False
|
|
||||||
|
|
||||||
# show colored logs in the dev environment
|
|
||||||
# to disable this, set `COLOR_LOGS = False` in awx/settings/local_settings.py
|
|
||||||
COLOR_LOGS = True
|
|
||||||
LOGGING['handlers']['console']['()'] = 'awx.main.utils.handlers.ColorHandler' # noqa
|
|
||||||
|
|
||||||
ALLOWED_HOSTS = ['*']
|
|
||||||
|
|
||||||
mimetypes.add_type("image/svg+xml", ".svg", True)
|
|
||||||
mimetypes.add_type("image/svg+xml", ".svgz", True)
|
|
||||||
|
|
||||||
# Disallow sending session cookies over insecure connections
|
|
||||||
SESSION_COOKIE_SECURE = False
|
|
||||||
|
|
||||||
# Disallow sending csrf cookies over insecure connections
|
|
||||||
CSRF_COOKIE_SECURE = False
|
|
||||||
|
|
||||||
# Disable Pendo on the UI for development/test.
|
|
||||||
# Note: This setting may be overridden by database settings.
|
|
||||||
PENDO_TRACKING_STATE = "off"
|
|
||||||
INSIGHTS_TRACKING_STATE = False
|
|
||||||
|
|
||||||
# debug toolbar and swagger assume that requirements/requirements_dev.txt are installed
|
|
||||||
|
|
||||||
INSTALLED_APPS += ['drf_yasg', 'debug_toolbar'] # NOQA
|
|
||||||
|
|
||||||
MIDDLEWARE = ['debug_toolbar.middleware.DebugToolbarMiddleware'] + MIDDLEWARE # NOQA
|
|
||||||
|
|
||||||
DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
|
||||||
|
|
||||||
# Configure a default UUID for development only.
|
|
||||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
|
||||||
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
|
||||||
|
|
||||||
# Ansible base virtualenv paths and enablement
|
|
||||||
# only used for deprecated fields and management commands for them
|
|
||||||
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
|
||||||
|
|
||||||
CLUSTER_HOST_ID = socket.gethostname()
|
|
||||||
|
|
||||||
AWX_CALLBACK_PROFILE = True
|
|
||||||
|
|
||||||
# this modifies FLAGS set by defaults
|
|
||||||
FLAGS['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}] # noqa
|
|
||||||
|
|
||||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
|
||||||
# Disable normal scheduled/triggered task managers (DependencyManager, TaskManager, WorkflowManager).
|
|
||||||
# Allows user to trigger task managers directly for debugging and profiling purposes.
|
|
||||||
# Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development'
|
|
||||||
AWX_DISABLE_TASK_MANAGERS = False
|
|
||||||
|
|
||||||
# Needed for launching runserver in debug mode
|
|
||||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
|
||||||
|
|
||||||
# Store a snapshot of default settings at this point before loading any
|
|
||||||
# customizable config files.
|
|
||||||
this_module = sys.modules[__name__]
|
|
||||||
local_vars = dir(this_module)
|
|
||||||
DEFAULTS_SNAPSHOT = {} # define after we save local_vars so we do not snapshot the snapshot
|
|
||||||
for setting in local_vars:
|
|
||||||
if setting.isupper():
|
|
||||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
|
||||||
|
|
||||||
del local_vars # avoid temporary variables from showing up in dir(settings)
|
|
||||||
del this_module
|
|
||||||
#
|
|
||||||
###############################################################################################
|
|
||||||
#
|
|
||||||
# Any settings defined after this point will be marked as as a read_only database setting
|
|
||||||
#
|
|
||||||
################################################################################################
|
|
||||||
|
|
||||||
# If there is an `/etc/tower/settings.py`, include it.
|
|
||||||
# If there is a `/etc/tower/conf.d/*.py`, include them.
|
|
||||||
include(optional('/etc/tower/settings.py'), scope=locals())
|
|
||||||
include(optional('/etc/tower/conf.d/*.py'), scope=locals())
|
|
||||||
|
|
||||||
# If any local_*.py files are present in awx/settings/, use them to override
|
|
||||||
# default settings for development. If not present, we can still run using
|
|
||||||
# only the defaults.
|
|
||||||
# this needs to stay at the bottom of this file
|
|
||||||
try:
|
|
||||||
if os.getenv('AWX_KUBE_DEVEL', False):
|
|
||||||
include(optional('development_kube.py'), scope=locals())
|
|
||||||
else:
|
|
||||||
include(optional('local_*.py'), scope=locals())
|
|
||||||
except ImportError:
|
|
||||||
traceback.print_exc()
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# The below runs AFTER all of the custom settings are imported
|
|
||||||
# because conf.d files will define DATABASES and this should modify that
|
|
||||||
from .application_name import set_application_name
|
|
||||||
|
|
||||||
set_application_name(DATABASES, CLUSTER_HOST_ID) # NOQA
|
|
||||||
|
|
||||||
del set_application_name
|
|
||||||
|
|
||||||
# Set the value of any feature flags that are defined in the local settings
|
|
||||||
for feature in list(FLAGS.keys()): # noqa: F405
|
|
||||||
if feature in locals():
|
|
||||||
FLAGS[feature][0]['value'] = locals()[feature] # noqa: F405
|
|
||||||
|
|||||||
76
awx/settings/development_defaults.py
Normal file
76
awx/settings/development_defaults.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
# Development settings for AWX project.
|
||||||
|
|
||||||
|
# Python
|
||||||
|
import os
|
||||||
|
import socket
|
||||||
|
|
||||||
|
# Centos-7 doesn't include the svg mime type
|
||||||
|
# /usr/lib64/python/mimetypes.py
|
||||||
|
import mimetypes
|
||||||
|
|
||||||
|
from dynaconf import post_hook
|
||||||
|
|
||||||
|
# awx-manage shell_plus --notebook
|
||||||
|
NOTEBOOK_ARGUMENTS = ['--NotebookApp.token=', '--ip', '0.0.0.0', '--port', '9888', '--allow-root', '--no-browser']
|
||||||
|
|
||||||
|
# print SQL queries in shell_plus
|
||||||
|
SHELL_PLUS_PRINT_SQL = False
|
||||||
|
|
||||||
|
# show colored logs in the dev environment
|
||||||
|
# to disable this, set `COLOR_LOGS = False` in awx/settings/local_settings.py
|
||||||
|
COLOR_LOGS = True
|
||||||
|
LOGGING__handlers__console = '@merge {"()": "awx.main.utils.handlers.ColorHandler"}'
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ['*']
|
||||||
|
|
||||||
|
mimetypes.add_type("image/svg+xml", ".svg", True)
|
||||||
|
mimetypes.add_type("image/svg+xml", ".svgz", True)
|
||||||
|
|
||||||
|
# Disallow sending session cookies over insecure connections
|
||||||
|
SESSION_COOKIE_SECURE = False
|
||||||
|
|
||||||
|
# Disallow sending csrf cookies over insecure connections
|
||||||
|
CSRF_COOKIE_SECURE = False
|
||||||
|
|
||||||
|
# Disable Pendo on the UI for development/test.
|
||||||
|
# Note: This setting may be overridden by database settings.
|
||||||
|
PENDO_TRACKING_STATE = "off"
|
||||||
|
INSIGHTS_TRACKING_STATE = False
|
||||||
|
|
||||||
|
# debug toolbar and swagger assume that requirements/requirements_dev.txt are installed
|
||||||
|
INSTALLED_APPS = "@merge drf_yasg,debug_toolbar"
|
||||||
|
MIDDLEWARE = "@insert 0 debug_toolbar.middleware.DebugToolbarMiddleware"
|
||||||
|
|
||||||
|
DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
||||||
|
|
||||||
|
# Configure a default UUID for development only.
|
||||||
|
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||||
|
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
||||||
|
|
||||||
|
# Ansible base virtualenv paths and enablement
|
||||||
|
# only used for deprecated fields and management commands for them
|
||||||
|
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
||||||
|
|
||||||
|
CLUSTER_HOST_ID = socket.gethostname()
|
||||||
|
|
||||||
|
AWX_CALLBACK_PROFILE = True
|
||||||
|
|
||||||
|
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||||
|
# Disable normal scheduled/triggered task managers (DependencyManager, TaskManager, WorkflowManager).
|
||||||
|
# Allows user to trigger task managers directly for debugging and profiling purposes.
|
||||||
|
# Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development'
|
||||||
|
AWX_DISABLE_TASK_MANAGERS = False
|
||||||
|
|
||||||
|
# Needed for launching runserver in debug mode
|
||||||
|
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||||
|
|
||||||
|
|
||||||
|
# This modifies FLAGS set by defaults, must be deferred to run later
|
||||||
|
@post_hook
|
||||||
|
def set_dev_flags(settings):
|
||||||
|
defaults_flags = settings.get("FLAGS", {})
|
||||||
|
defaults_flags['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}]
|
||||||
|
return {'FLAGS': defaults_flags}
|
||||||
@@ -1,4 +1,13 @@
|
|||||||
BROADCAST_WEBSOCKET_SECRET = '🤖starscream🤖'
|
# This file exists for backwards compatibility only
|
||||||
BROADCAST_WEBSOCKET_PORT = 8052
|
# the current way of running AWX is to point settings to
|
||||||
BROADCAST_WEBSOCKET_VERIFY_CERT = False
|
# awx/settings/__init__.py as the entry point for the settings
|
||||||
BROADCAST_WEBSOCKET_PROTOCOL = 'http'
|
# that is done by exporting: export DJANGO_SETTINGS_MODULE=awx.settings
|
||||||
|
import os
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||||
|
os.environ.setdefault("AWX_MODE", "development,kube")
|
||||||
|
|
||||||
|
from ansible_base.lib.dynamic_config import export
|
||||||
|
from . import DYNACONF # noqa
|
||||||
|
|
||||||
|
export(__name__, DYNACONF)
|
||||||
|
|||||||
@@ -1,15 +1,13 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# This file exists for backwards compatibility only
|
||||||
# All Rights Reserved.
|
# the current way of running AWX is to point settings to
|
||||||
|
# awx/settings/__init__.py as the entry point for the settings
|
||||||
|
# that is done by exporting: export DJANGO_SETTINGS_MODULE=awx.settings
|
||||||
|
import os
|
||||||
|
|
||||||
# Development settings for AWX project, but with DEBUG disabled
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||||
|
os.environ.setdefault("AWX_MODE", "development,quiet")
|
||||||
|
|
||||||
# Load development settings.
|
from ansible_base.lib.dynamic_config import export
|
||||||
from defaults import * # NOQA
|
from . import DYNACONF # noqa
|
||||||
|
|
||||||
# Load development settings.
|
export(__name__, DYNACONF)
|
||||||
from development import * # NOQA
|
|
||||||
|
|
||||||
# Disable capturing DEBUG
|
|
||||||
DEBUG = False
|
|
||||||
TEMPLATE_DEBUG = DEBUG
|
|
||||||
SQL_DEBUG = DEBUG
|
|
||||||
|
|||||||
86
awx/settings/functions.py
Normal file
86
awx/settings/functions.py
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import os
|
||||||
|
from ansible_base.lib.dynamic_config import load_python_file_with_injected_context
|
||||||
|
from dynaconf import Dynaconf
|
||||||
|
from .application_name import get_application_name
|
||||||
|
|
||||||
|
|
||||||
|
def merge_application_name(settings):
|
||||||
|
"""Return a dynaconf merge dict to set the application name for the connection."""
|
||||||
|
data = {}
|
||||||
|
if "sqlite3" not in settings.get("DATABASES__default__ENGINE", ""):
|
||||||
|
data["DATABASES__default__OPTIONS__application_name"] = get_application_name(settings.get("CLUSTER_HOST_ID"))
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def add_backwards_compatibility():
|
||||||
|
"""Add backwards compatibility for AWX_MODE.
|
||||||
|
|
||||||
|
Before dynaconf integration the usage of AWX settings was supported to be just
|
||||||
|
DJANGO_SETTINGS_MODULE=awx.settings.production or DJANGO_SETTINGS_MODULE=awx.settings.development
|
||||||
|
(development_quiet and development_kube were also supported).
|
||||||
|
|
||||||
|
With dynaconf the DJANGO_SETTINGS_MODULE should be set always to "awx.settings" as the only entry point
|
||||||
|
for settings and then "AWX_MODE" can be set to any of production,development,quiet,kube
|
||||||
|
or a combination of them separated by comma.
|
||||||
|
|
||||||
|
E.g:
|
||||||
|
|
||||||
|
export DJANGO_SETTINGS_MODULE=awx.settings
|
||||||
|
export AWX_MODE=production
|
||||||
|
awx-manage [command]
|
||||||
|
dynaconf [command]
|
||||||
|
|
||||||
|
If pointing `DJANGO_SETTINGS_MODULE` to `awx.settings.production` or `awx.settings.development` then
|
||||||
|
this function will set `AWX_MODE` to the correct value.
|
||||||
|
"""
|
||||||
|
django_settings_module = os.getenv("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||||
|
if django_settings_module == "awx.settings":
|
||||||
|
return
|
||||||
|
|
||||||
|
current_mode = os.getenv("AWX_MODE", "")
|
||||||
|
for _module_name in ["development", "production", "development_quiet", "development_kube"]:
|
||||||
|
if django_settings_module == f"awx.settings.{_module_name}":
|
||||||
|
_mode = current_mode.split(",")
|
||||||
|
if "development_" in _module_name and "development" not in current_mode:
|
||||||
|
_mode.append("development")
|
||||||
|
_mode_fragment = _module_name.replace("development_", "")
|
||||||
|
if _mode_fragment not in _mode:
|
||||||
|
_mode.append(_mode_fragment)
|
||||||
|
os.environ["AWX_MODE"] = ",".join(_mode)
|
||||||
|
|
||||||
|
|
||||||
|
def load_extra_development_files(settings: Dynaconf):
|
||||||
|
"""Load optional development only settings files."""
|
||||||
|
if not settings.is_development_mode:
|
||||||
|
return
|
||||||
|
|
||||||
|
if settings.get_environ("AWX_KUBE_DEVEL"):
|
||||||
|
load_python_file_with_injected_context("kube_defaults.py", settings=settings)
|
||||||
|
else:
|
||||||
|
load_python_file_with_injected_context("local_*.py", settings=settings)
|
||||||
|
|
||||||
|
|
||||||
|
def assert_production_settings(settings: Dynaconf, settings_dir: str, settings_file_path: str): # pragma: no cover
|
||||||
|
"""Ensure at least one setting file has been loaded in production mode.
|
||||||
|
Current systems will require /etc/tower/settings.py and
|
||||||
|
new systems will require /etc/ansible-automation-platform/*.yaml
|
||||||
|
"""
|
||||||
|
if "production" not in settings.current_env.lower():
|
||||||
|
return
|
||||||
|
|
||||||
|
required_settings_paths = [
|
||||||
|
os.path.dirname(settings_file_path),
|
||||||
|
"/etc/ansible-automation-platform/",
|
||||||
|
settings_dir,
|
||||||
|
]
|
||||||
|
|
||||||
|
for path in required_settings_paths:
|
||||||
|
if any([path in os.path.dirname(f) for f in settings._loaded_files]):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
from django.core.exceptions import ImproperlyConfigured # noqa
|
||||||
|
|
||||||
|
msg = 'No AWX configuration found at %s.' % required_settings_paths
|
||||||
|
msg += '\nDefine the AWX_SETTINGS_FILE environment variable to '
|
||||||
|
msg += 'specify an alternate path.'
|
||||||
|
raise ImproperlyConfigured(msg)
|
||||||
4
awx/settings/kube_defaults.py
Normal file
4
awx/settings/kube_defaults.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
BROADCAST_WEBSOCKET_SECRET = '🤖starscream🤖'
|
||||||
|
BROADCAST_WEBSOCKET_PORT = 8052
|
||||||
|
BROADCAST_WEBSOCKET_VERIFY_CERT = False
|
||||||
|
BROADCAST_WEBSOCKET_PROTOCOL = 'http'
|
||||||
@@ -1,111 +1,13 @@
|
|||||||
# Copyright (c) 2015 Ansible, Inc.
|
# This file exists for backwards compatibility only
|
||||||
# All Rights Reserved.
|
# the current way of running AWX is to point settings to
|
||||||
|
# awx/settings/__init__.py as the entry point for the settings
|
||||||
# Production settings for AWX project.
|
# that is done by exporting: export DJANGO_SETTINGS_MODULE=awx.settings
|
||||||
|
|
||||||
# Python
|
|
||||||
import os
|
import os
|
||||||
import copy
|
|
||||||
import errno
|
|
||||||
import sys
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
# Django Split Settings
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||||
from split_settings.tools import optional, include
|
os.environ.setdefault("AWX_MODE", "production")
|
||||||
|
|
||||||
# Load default settings.
|
from ansible_base.lib.dynamic_config import export
|
||||||
from .defaults import * # NOQA
|
from . import DYNACONF # noqa
|
||||||
|
|
||||||
DEBUG = False
|
export(__name__, DYNACONF)
|
||||||
TEMPLATE_DEBUG = DEBUG
|
|
||||||
SQL_DEBUG = DEBUG
|
|
||||||
|
|
||||||
# Clear database settings to force production environment to define them.
|
|
||||||
DATABASES = {}
|
|
||||||
|
|
||||||
# Clear the secret key to force production environment to define it.
|
|
||||||
SECRET_KEY = None
|
|
||||||
|
|
||||||
# Hosts/domain names that are valid for this site; required if DEBUG is False
|
|
||||||
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
|
|
||||||
ALLOWED_HOSTS = []
|
|
||||||
|
|
||||||
# Ansible base virtualenv paths and enablement
|
|
||||||
# only used for deprecated fields and management commands for them
|
|
||||||
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
|
||||||
|
|
||||||
# Very important that this is editable (not read_only) in the API
|
|
||||||
AWX_ISOLATION_SHOW_PATHS = [
|
|
||||||
'/etc/pki/ca-trust:/etc/pki/ca-trust:O',
|
|
||||||
'/usr/share/pki:/usr/share/pki:O',
|
|
||||||
]
|
|
||||||
|
|
||||||
# Store a snapshot of default settings at this point before loading any
|
|
||||||
# customizable config files.
|
|
||||||
this_module = sys.modules[__name__]
|
|
||||||
local_vars = dir(this_module)
|
|
||||||
DEFAULTS_SNAPSHOT = {} # define after we save local_vars so we do not snapshot the snapshot
|
|
||||||
for setting in local_vars:
|
|
||||||
if setting.isupper():
|
|
||||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
|
||||||
|
|
||||||
del local_vars # avoid temporary variables from showing up in dir(settings)
|
|
||||||
del this_module
|
|
||||||
#
|
|
||||||
###############################################################################################
|
|
||||||
#
|
|
||||||
# Any settings defined after this point will be marked as as a read_only database setting
|
|
||||||
#
|
|
||||||
################################################################################################
|
|
||||||
|
|
||||||
# Load settings from any .py files in the global conf.d directory specified in
|
|
||||||
# the environment, defaulting to /etc/tower/conf.d/.
|
|
||||||
settings_dir = os.environ.get('AWX_SETTINGS_DIR', '/etc/tower/conf.d/')
|
|
||||||
settings_files = os.path.join(settings_dir, '*.py')
|
|
||||||
|
|
||||||
# Load remaining settings from the global settings file specified in the
|
|
||||||
# environment, defaulting to /etc/tower/settings.py.
|
|
||||||
settings_file = os.environ.get('AWX_SETTINGS_FILE', '/etc/tower/settings.py')
|
|
||||||
|
|
||||||
# Attempt to load settings from /etc/tower/settings.py first, followed by
|
|
||||||
# /etc/tower/conf.d/*.py.
|
|
||||||
try:
|
|
||||||
include(settings_file, optional(settings_files), scope=locals())
|
|
||||||
except ImportError:
|
|
||||||
traceback.print_exc()
|
|
||||||
sys.exit(1)
|
|
||||||
except IOError:
|
|
||||||
from django.core.exceptions import ImproperlyConfigured
|
|
||||||
|
|
||||||
included_file = locals().get('__included_file__', '')
|
|
||||||
if not included_file or included_file == settings_file:
|
|
||||||
# The import doesn't always give permission denied, so try to open the
|
|
||||||
# settings file directly.
|
|
||||||
try:
|
|
||||||
e = None
|
|
||||||
open(settings_file)
|
|
||||||
except IOError:
|
|
||||||
pass
|
|
||||||
if e and e.errno == errno.EACCES:
|
|
||||||
SECRET_KEY = 'permission-denied'
|
|
||||||
LOGGING = {}
|
|
||||||
else:
|
|
||||||
msg = 'No AWX configuration found at %s.' % settings_file
|
|
||||||
msg += '\nDefine the AWX_SETTINGS_FILE environment variable to '
|
|
||||||
msg += 'specify an alternate path.'
|
|
||||||
raise ImproperlyConfigured(msg)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
# The below runs AFTER all of the custom settings are imported
|
|
||||||
# because conf.d files will define DATABASES and this should modify that
|
|
||||||
from .application_name import set_application_name
|
|
||||||
|
|
||||||
set_application_name(DATABASES, CLUSTER_HOST_ID) # NOQA
|
|
||||||
|
|
||||||
del set_application_name
|
|
||||||
|
|
||||||
# Set the value of any feature flags that are defined in the local settings
|
|
||||||
for feature in list(FLAGS.keys()): # noqa: F405
|
|
||||||
if feature in locals():
|
|
||||||
FLAGS[feature][0]['value'] = locals()[feature] # noqa: F405
|
|
||||||
|
|||||||
30
awx/settings/production_defaults.py
Normal file
30
awx/settings/production_defaults.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
|
||||||
|
# Production settings for AWX project.
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
DEBUG = False
|
||||||
|
TEMPLATE_DEBUG = DEBUG
|
||||||
|
SQL_DEBUG = DEBUG
|
||||||
|
|
||||||
|
# Clear database settings to force production environment to define them.
|
||||||
|
DATABASES = {}
|
||||||
|
|
||||||
|
# Clear the secret key to force production environment to define it.
|
||||||
|
SECRET_KEY = None
|
||||||
|
|
||||||
|
# Hosts/domain names that are valid for this site; required if DEBUG is False
|
||||||
|
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
|
||||||
|
ALLOWED_HOSTS = []
|
||||||
|
|
||||||
|
# Ansible base virtualenv paths and enablement
|
||||||
|
# only used for deprecated fields and management commands for them
|
||||||
|
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
||||||
|
|
||||||
|
# Very important that this is editable (not read_only) in the API
|
||||||
|
AWX_ISOLATION_SHOW_PATHS = [
|
||||||
|
'/etc/pki/ca-trust:/etc/pki/ca-trust:O',
|
||||||
|
'/usr/share/pki:/usr/share/pki:O',
|
||||||
|
]
|
||||||
8
awx/settings/quiet_defaults.py
Normal file
8
awx/settings/quiet_defaults.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Copyright (c) 2015 Ansible, Inc.
|
||||||
|
# All Rights Reserved.
|
||||||
|
# Development settings for AWX project, but with DEBUG disabled
|
||||||
|
|
||||||
|
# Disable capturing DEBUG
|
||||||
|
DEBUG = False
|
||||||
|
TEMPLATE_DEBUG = DEBUG
|
||||||
|
SQL_DEBUG = DEBUG
|
||||||
@@ -37,7 +37,7 @@ def get_urlpatterns(prefix=None):
|
|||||||
re_path(r'^(?!api/).*', include('awx.ui.urls', namespace='ui')),
|
re_path(r'^(?!api/).*', include('awx.ui.urls', namespace='ui')),
|
||||||
]
|
]
|
||||||
|
|
||||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
if settings.DYNACONF.is_development_mode:
|
||||||
try:
|
try:
|
||||||
import debug_toolbar
|
import debug_toolbar
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ DOCUMENTATION = """
|
|||||||
description:
|
description:
|
||||||
- The date to start the rule
|
- The date to start the rule
|
||||||
- Used for all frequencies
|
- Used for all frequencies
|
||||||
- Format should be YYYY-MM-DD [HH:MM:SS]
|
- Format should be 'YYYY-MM-DD HH:MM:SS'
|
||||||
type: str
|
type: str
|
||||||
timezone:
|
timezone:
|
||||||
description:
|
description:
|
||||||
@@ -47,8 +47,8 @@ DOCUMENTATION = """
|
|||||||
description:
|
description:
|
||||||
- How to end this schedule
|
- How to end this schedule
|
||||||
- If this is not defined, this schedule will never end
|
- If this is not defined, this schedule will never end
|
||||||
- If this is a positive integer, this schedule will end after this number of occurences
|
- If this is a positive number, specified as a string, this schedule will end after this number of occurrences
|
||||||
- If this is a date in the format YYYY-MM-DD [HH:MM:SS], this schedule ends after this date
|
- If this is a date in the format 'YYYY-MM-DD HH:MM:SS', this schedule ends after this date
|
||||||
- Used for all types except none
|
- Used for all types except none
|
||||||
type: str
|
type: str
|
||||||
on_days:
|
on_days:
|
||||||
|
|||||||
@@ -268,7 +268,7 @@ def main():
|
|||||||
for resource in value:
|
for resource in value:
|
||||||
# Attempt to look up project based on the provided name, ID, or named URL and lookup data
|
# Attempt to look up project based on the provided name, ID, or named URL and lookup data
|
||||||
lookup_key = key
|
lookup_key = key
|
||||||
if key == 'organizations' or key == 'users':
|
if key == 'organizations' or key == 'users' or key == 'teams':
|
||||||
lookup_data_populated = {}
|
lookup_data_populated = {}
|
||||||
else:
|
else:
|
||||||
lookup_data_populated = lookup_data
|
lookup_data_populated = lookup_data
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
wfjt_name: "AWX-Collection-tests-role-project-wfjt-{{ test_id }}"
|
wfjt_name: "AWX-Collection-tests-role-project-wfjt-{{ test_id }}"
|
||||||
team_name: "AWX-Collection-tests-team-team-{{ test_id }}"
|
team_name: "AWX-Collection-tests-team-team-{{ test_id }}"
|
||||||
team2_name: "AWX-Collection-tests-team-team-{{ test_id }}2"
|
team2_name: "AWX-Collection-tests-team-team-{{ test_id }}2"
|
||||||
|
org2_name: "AWX-Collection-tests-organization-{{ test_id }}2"
|
||||||
|
|
||||||
- block:
|
- block:
|
||||||
- name: Create a User
|
- name: Create a User
|
||||||
@@ -209,6 +210,40 @@
|
|||||||
that:
|
that:
|
||||||
- "result is changed"
|
- "result is changed"
|
||||||
|
|
||||||
|
- name: Create a 2nd organization
|
||||||
|
organization:
|
||||||
|
name: "{{ org2_name }}"
|
||||||
|
|
||||||
|
- name: Create a project in 2nd Organization
|
||||||
|
project:
|
||||||
|
name: "{{ project_name }}"
|
||||||
|
organization: "{{ org2_name }}"
|
||||||
|
scm_type: git
|
||||||
|
scm_url: https://github.com/ansible/test-playbooks
|
||||||
|
wait: true
|
||||||
|
register: project_info
|
||||||
|
|
||||||
|
- name: Add Joe and teams to the update role of the default Project with lookup from the 2nd Organization
|
||||||
|
role:
|
||||||
|
user: "{{ username }}"
|
||||||
|
users:
|
||||||
|
- "{{ username }}2"
|
||||||
|
teams:
|
||||||
|
- "{{ team_name }}"
|
||||||
|
- "{{ team2_name }}"
|
||||||
|
role: update
|
||||||
|
lookup_organization: "{{ org2_name }}"
|
||||||
|
project: "{{ project_name }}"
|
||||||
|
state: "{{ item }}"
|
||||||
|
register: result
|
||||||
|
with_items:
|
||||||
|
- "present"
|
||||||
|
- "absent"
|
||||||
|
|
||||||
|
- assert:
|
||||||
|
that:
|
||||||
|
- "result is changed"
|
||||||
|
|
||||||
always:
|
always:
|
||||||
- name: Delete a User
|
- name: Delete a User
|
||||||
user:
|
user:
|
||||||
@@ -252,3 +287,16 @@
|
|||||||
organization: Default
|
organization: Default
|
||||||
state: absent
|
state: absent
|
||||||
register: result
|
register: result
|
||||||
|
|
||||||
|
- name: Delete the 2nd project
|
||||||
|
project:
|
||||||
|
name: "{{ project_name }}"
|
||||||
|
organization: "{{ org2_name }}"
|
||||||
|
state: absent
|
||||||
|
register: result
|
||||||
|
|
||||||
|
- name: Delete the 2nd organization
|
||||||
|
organization:
|
||||||
|
name: "{{ org2_name }}"
|
||||||
|
state: absent
|
||||||
|
register: result
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
Copyright (c) 2013, 2General Oy
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without modification,
|
|
||||||
are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright
|
|
||||||
notice, this list of conditions and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
3. Neither the name of django-split-settings nor the names of its contributors
|
|
||||||
may be used to endorse or promote products derived from this software
|
|
||||||
without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
||||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
||||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
||||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
|
||||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
||||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
21
licenses/dynaconf.txt
Normal file
21
licenses/dynaconf.txt
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Bruno Rocha
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
@@ -22,9 +22,9 @@ django-guid
|
|||||||
django-oauth-toolkit<2.0.0 # Version 2.0.0 has breaking changes that will need to be worked out before upgrading
|
django-oauth-toolkit<2.0.0 # Version 2.0.0 has breaking changes that will need to be worked out before upgrading
|
||||||
django-polymorphic
|
django-polymorphic
|
||||||
django-solo
|
django-solo
|
||||||
django-split-settings
|
|
||||||
djangorestframework>=3.15.0
|
djangorestframework>=3.15.0
|
||||||
djangorestframework-yaml
|
djangorestframework-yaml
|
||||||
|
dynaconf<4
|
||||||
filelock
|
filelock
|
||||||
GitPython>=3.1.37 # CVE-2023-41040
|
GitPython>=3.1.37 # CVE-2023-41040
|
||||||
grpcio
|
grpcio
|
||||||
|
|||||||
@@ -158,10 +158,6 @@ django-polymorphic==3.1.0
|
|||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
django-solo==2.4.0
|
django-solo==2.4.0
|
||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
django-split-settings==1.3.2
|
|
||||||
# via
|
|
||||||
# -r /awx_devel/requirements/requirements.in
|
|
||||||
# django-ansible-base
|
|
||||||
djangorestframework==3.15.2
|
djangorestframework==3.15.2
|
||||||
# via
|
# via
|
||||||
# -r /awx_devel/requirements/requirements.in
|
# -r /awx_devel/requirements/requirements.in
|
||||||
@@ -170,6 +166,8 @@ djangorestframework-yaml==2.0.0
|
|||||||
# via -r /awx_devel/requirements/requirements.in
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
durationpy==0.9
|
durationpy==0.9
|
||||||
# via kubernetes
|
# via kubernetes
|
||||||
|
dynaconf==3.2.10
|
||||||
|
# via -r /awx_devel/requirements/requirements.in
|
||||||
enum-compat==0.0.3
|
enum-compat==0.0.3
|
||||||
# via asn1
|
# via asn1
|
||||||
filelock==3.16.1
|
filelock==3.16.1
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ build
|
|||||||
coreapi
|
coreapi
|
||||||
django-debug-toolbar==3.2.4
|
django-debug-toolbar==3.2.4
|
||||||
django-test-migrations
|
django-test-migrations
|
||||||
drf-yasg
|
drf-yasg<1.21.10 # introduces new DeprecationWarning that is turned into error
|
||||||
# pprofile - re-add once https://github.com/vpelletier/pprofile/issues/41 is addressed
|
# pprofile - re-add once https://github.com/vpelletier/pprofile/issues/41 is addressed
|
||||||
ipython>=7.31.1 # https://github.com/ansible/awx/security/dependabot/30
|
ipython>=7.31.1 # https://github.com/ansible/awx/security/dependabot/30
|
||||||
unittest2
|
unittest2
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
git+https://github.com/ansible/system-certifi.git@devel#egg=certifi
|
git+https://github.com/ansible/system-certifi.git@devel#egg=certifi
|
||||||
# Remove pbr from requirements.in when moving ansible-runner to requirements.in
|
# Remove pbr from requirements.in when moving ansible-runner to requirements.in
|
||||||
git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner
|
git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner
|
||||||
django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest-filters,jwt_consumer,resource-registry,rbac,feature-flags]
|
|
||||||
awx-plugins-core @ git+https://github.com/ansible/awx-plugins.git@devel#egg=awx-plugins-core[credentials-github-app]
|
awx-plugins-core @ git+https://github.com/ansible/awx-plugins.git@devel#egg=awx-plugins-core[credentials-github-app]
|
||||||
|
django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest-filters,jwt_consumer,resource-registry,rbac,feature-flags]
|
||||||
awx_plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.git
|
awx_plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.git
|
||||||
|
|||||||
19
tools/community-bugscrub/README.md
Normal file
19
tools/community-bugscrub/README.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Community BugScrub tooling
|
||||||
|
|
||||||
|
Small python script that automatically distributes PRs and Issues given a list of `people` and dumps the contents in a Spreadsheet.
|
||||||
|
|
||||||
|
To be used when distributing the work of reviewing community contributions.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Install requirements.
|
||||||
|
|
||||||
|
```
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
Get the usage.
|
||||||
|
|
||||||
|
```
|
||||||
|
python generate-sheet.py -h
|
||||||
|
```
|
||||||
125
tools/community-bugscrub/generate-sheet.py
Normal file
125
tools/community-bugscrub/generate-sheet.py
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
from typing import OrderedDict
|
||||||
|
import pyexcel
|
||||||
|
import requests
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def get_headers():
|
||||||
|
access_token_env_var = "GITHUB_ACCESS_TOKEN"
|
||||||
|
if access_token_env_var in os.environ:
|
||||||
|
access_token = os.environ[access_token_env_var]
|
||||||
|
return {"Authorization": f"token {access_token}"}
|
||||||
|
else:
|
||||||
|
print(f"{access_token_env_var} not present, performing unathenticated calls that might hit rate limits.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_items(url, params, headers):
|
||||||
|
response = requests.get(url, params=params, headers=headers)
|
||||||
|
if response.status_code == 200:
|
||||||
|
return response
|
||||||
|
else:
|
||||||
|
print(f"Failed to fetch items: {response.status_code}", file=sys.stderr)
|
||||||
|
print(f"{response.content}", file=sys.stderr)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def extract_next_url(response):
|
||||||
|
if 'Link' in response.headers:
|
||||||
|
links = response.headers['Link'].split(',')
|
||||||
|
for link in links:
|
||||||
|
if 'rel="next"' in link:
|
||||||
|
return link.split(';')[0].strip('<> ')
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_items(url, params, limit=None):
|
||||||
|
items = []
|
||||||
|
headers = get_headers()
|
||||||
|
while url:
|
||||||
|
response = fetch_items(url, params, headers)
|
||||||
|
if response:
|
||||||
|
items.extend(response.json())
|
||||||
|
print(f"Processing {len(items)}", file=sys.stderr)
|
||||||
|
if limit and len(items) > limit:
|
||||||
|
break
|
||||||
|
url = extract_next_url(response)
|
||||||
|
else:
|
||||||
|
url = None
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
def get_open_issues(repo_url, limit):
|
||||||
|
owner, repo = repo_url.rstrip('/').split('/')[-2:]
|
||||||
|
url = f"https://api.github.com/repos/{owner}/{repo}/issues"
|
||||||
|
params = {'state': 'open', 'per_page': 100}
|
||||||
|
issues = get_all_items(url, params, limit)
|
||||||
|
open_issues = [issue for issue in issues if 'pull_request' not in issue]
|
||||||
|
return open_issues
|
||||||
|
|
||||||
|
|
||||||
|
def get_open_pull_requests(repo_url, limit):
|
||||||
|
owner, repo = repo_url.rstrip('/').split('/')[-2:]
|
||||||
|
url = f"https://api.github.com/repos/{owner}/{repo}/pulls"
|
||||||
|
params = {'state': 'open', 'per_page': 100}
|
||||||
|
pull_requests = get_all_items(url, params, limit)
|
||||||
|
return pull_requests
|
||||||
|
|
||||||
|
|
||||||
|
def generate_ods(issues, pull_requests, filename, people):
|
||||||
|
data = OrderedDict()
|
||||||
|
|
||||||
|
# Prepare issues data
|
||||||
|
issues_data = []
|
||||||
|
for n, issue in enumerate(issues):
|
||||||
|
issues_data.append(
|
||||||
|
[
|
||||||
|
issue['html_url'],
|
||||||
|
issue['title'],
|
||||||
|
issue['created_at'],
|
||||||
|
issue['user']['login'],
|
||||||
|
issue['assignee']['login'] if issue['assignee'] else 'None',
|
||||||
|
people[n % len(people)],
|
||||||
|
]
|
||||||
|
)
|
||||||
|
issues_headers = ['url', 'title', 'created_at', 'user', 'assignee', 'action']
|
||||||
|
issues_data.insert(0, issues_headers)
|
||||||
|
data.update({"Issues": issues_data})
|
||||||
|
|
||||||
|
# Prepare pull requests data
|
||||||
|
prs_data = []
|
||||||
|
for n, pr in enumerate(pull_requests):
|
||||||
|
prs_data.append(
|
||||||
|
[pr['html_url'], pr['title'], pr['created_at'], pr['user']['login'], pr['assignee']['login'] if pr['assignee'] else 'None', people[n % len(people)]]
|
||||||
|
)
|
||||||
|
prs_headers = ['url', 'title', 'created_at', 'user', 'assignee', 'action']
|
||||||
|
prs_data.insert(0, prs_headers)
|
||||||
|
data.update({"Pull Requests": prs_data})
|
||||||
|
|
||||||
|
# Save to ODS file
|
||||||
|
pyexcel.save_book_as(bookdict=data, dest_file_name=filename)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--limit", type=int, help="minimum number of issues/PRs to pull [Pulls all by default]", default=None)
|
||||||
|
parser.add_argument("--out", type=str, help="output file name [awx_community-triage.ods]", default="awx_community-triage.ods")
|
||||||
|
parser.add_argument("--repository-url", type=str, help="repository url [https://github.com/ansible/awx]", default="https://github.com/ansible/awx")
|
||||||
|
parser.add_argument("--people", type=str, help="comma separated list of names to distribute the issues/PRs among [Alice,Bob]", default="Alice,Bob")
|
||||||
|
args = parser.parse_args()
|
||||||
|
limit = args.limit
|
||||||
|
output_file_name = args.out
|
||||||
|
repo_url = args.repository_url
|
||||||
|
people = str(args.people).split(",")
|
||||||
|
open_issues = get_open_issues(repo_url, limit)
|
||||||
|
open_pull_requests = get_open_pull_requests(repo_url, limit)
|
||||||
|
print(f"Open issues: {len(open_issues)}")
|
||||||
|
print(f"Open Pull Requests: {len(open_pull_requests)}")
|
||||||
|
generate_ods(open_issues, open_pull_requests, output_file_name, people)
|
||||||
|
print(f"Generated {output_file_name} with open issues and pull requests.")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
2
tools/community-bugscrub/requirements.txt
Normal file
2
tools/community-bugscrub/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
requests
|
||||||
|
pyexcel
|
||||||
@@ -42,6 +42,7 @@ services:
|
|||||||
DJANGO_SUPERUSER_PASSWORD: {{ admin_password }}
|
DJANGO_SUPERUSER_PASSWORD: {{ admin_password }}
|
||||||
UWSGI_MOUNT_PATH: {{ ingress_path }}
|
UWSGI_MOUNT_PATH: {{ ingress_path }}
|
||||||
DJANGO_COLORS: "${DJANGO_COLORS:-}"
|
DJANGO_COLORS: "${DJANGO_COLORS:-}"
|
||||||
|
DJANGO_SETTINGS_MODULE: "awx.settings"
|
||||||
{% if loop.index == 1 %}
|
{% if loop.index == 1 %}
|
||||||
RUN_MIGRATIONS: 1
|
RUN_MIGRATIONS: 1
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|||||||
Reference in New Issue
Block a user