replace certain terms with more inclusive language

see: https://www.redhat.com/en/blog/making-open-source-more-inclusive-eradicating-problematic-language
This commit is contained in:
Ryan Petrello 2020-06-30 09:37:47 -04:00
parent 5b9c19df8f
commit 78229f5871
No known key found for this signature in database
GPG Key ID: F2AA5F2122351777
24 changed files with 103 additions and 97 deletions

View File

@ -146,7 +146,7 @@ class FieldLookupBackend(BaseFilterBackend):
# A list of fields that we know can be filtered on without the possiblity
# of introducing duplicates
NO_DUPLICATES_WHITELIST = (CharField, IntegerField, BooleanField)
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField)
def get_fields_from_lookup(self, model, lookup):
@ -205,7 +205,7 @@ class FieldLookupBackend(BaseFilterBackend):
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
field = field_list[-1]
needs_distinct = (not all(isinstance(f, self.NO_DUPLICATES_WHITELIST) for f in field_list))
needs_distinct = (not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list))
# Type names are stored without underscores internally, but are presented and
# and serialized over the API containing underscores so we remove `_`

View File

@ -159,11 +159,11 @@ class APIView(views.APIView):
self.queries_before = len(connection.queries)
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
# they respect the proxy whitelist
# they respect the allowed proxy list
if all([
settings.PROXY_IP_WHITELIST,
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_WHITELIST,
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_WHITELIST
settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST
]):
for custom_header in settings.REMOTE_HOST_HEADERS:
if custom_header.startswith('HTTP_'):
@ -837,7 +837,7 @@ class CopyAPIView(GenericAPIView):
@staticmethod
def _decrypt_model_field_if_needed(obj, field_name, field_val):
if field_name in getattr(type(obj), 'REENCRYPTION_BLACKLIST_AT_COPY', []):
if field_name in getattr(type(obj), 'REENCRYPTION_BLOCKLIST_AT_COPY', []):
return field_val
if isinstance(obj, Credential) and field_name == 'inputs':
for secret in obj.credential_type.secret_fields:
@ -883,7 +883,7 @@ class CopyAPIView(GenericAPIView):
field_val = getattr(obj, field.name)
except AttributeError:
continue
# Adjust copy blacklist fields here.
# Adjust copy blocked fields here.
if field.name in fields_to_discard or field.name in [
'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by'
] or field.name.endswith('_role'):

View File

@ -1936,7 +1936,7 @@ class InventorySourceOptionsSerializer(BaseSerializer):
def validate_source_vars(self, value):
ret = vars_validate_or_raise(value)
for env_k in parse_yaml_or_json(value):
if env_k in settings.INV_ENV_VARIABLE_BLACKLIST:
if env_k in settings.INV_ENV_VARIABLE_BLOCKED:
raise serializers.ValidationError(_("`{}` is a prohibited environment variable".format(env_k)))
return ret

View File

@ -0,0 +1,19 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from awx.conf.migrations import _rename_setting
def copy_allowed_ips(apps, schema_editor):
_rename_setting.rename_setting(apps, schema_editor, old_key='PROXY_IP_WHITELIST', new_key='PROXY_IP_ALLOWED_LIST')
class Migration(migrations.Migration):
dependencies = [
('conf', '0006_v331_ldap_group_type'),
]
operations = [
migrations.RunPython(copy_allowed_ips),
]

View File

@ -1513,8 +1513,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
thus can be made by a job template administrator which may not have access
to the any inventory, project, or credentials associated with the template.
'''
# We are white listing fields that can
field_whitelist = [
allowed_fields = [
'name', 'description', 'forks', 'limit', 'verbosity', 'extra_vars',
'job_tags', 'force_handlers', 'skip_tags', 'ask_variables_on_launch',
'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_skip_tags_on_launch',
@ -1529,7 +1528,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
if k not in [x.name for x in obj._meta.concrete_fields]:
continue
if hasattr(obj, k) and getattr(obj, k) != v:
if k not in field_whitelist and v != getattr(obj, '%s_id' % k, None) \
if k not in allowed_fields and v != getattr(obj, '%s_id' % k, None) \
and not (hasattr(obj, '%s_id' % k) and getattr(obj, '%s_id' % k) is None and v == ''): # Equate '' to None in the case of foreign keys
return False
return True

View File

@ -80,11 +80,11 @@ register(
)
register(
'PROXY_IP_WHITELIST',
'PROXY_IP_ALLOWED_LIST',
field_class=fields.StringListField,
label=_('Proxy IP Whitelist'),
label=_('Proxy IP Allowed List'),
help_text=_("If Tower is behind a reverse proxy/load balancer, use this setting "
"to whitelist the proxy IP addresses from which Tower should trust "
"to configure the proxy IP addresses from which Tower should trust "
"custom REMOTE_HOST_HEADERS header values. "
"If this setting is an empty list (the default), the headers specified by "
"REMOTE_HOST_HEADERS will be trusted unconditionally')"),
@ -241,7 +241,7 @@ register(
field_class=fields.StringListField,
required=False,
label=_('Paths to expose to isolated jobs'),
help_text=_('Whitelist of paths that would otherwise be hidden to expose to isolated jobs. Enter one path per line.'),
help_text=_('List of paths that would otherwise be hidden to expose to isolated jobs. Enter one path per line.'),
category=_('Jobs'),
category_slug='jobs',
)

View File

@ -31,7 +31,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
ACTIVE_STATES = CAN_CANCEL
CENSOR_VALUE = '************'
ENV_BLACKLIST = frozenset((
ENV_BLOCKLIST = frozenset((
'VIRTUAL_ENV', 'PATH', 'PYTHONPATH', 'PROOT_TMP_DIR', 'JOB_ID',
'INVENTORY_ID', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID',
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES',
@ -41,7 +41,7 @@ ENV_BLACKLIST = frozenset((
))
# loggers that may be called in process of emitting a log
LOGGER_BLACKLIST = (
LOGGER_BLOCKLIST = (
'awx.main.utils.handlers',
'awx.main.utils.formatters',
'awx.main.utils.filters',

View File

@ -50,7 +50,7 @@ from awx.main.models.rbac import (
batch_role_ancestor_rebuilding, Role,
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
)
from awx.main.constants import ENV_BLACKLIST
from awx.main.constants import ENV_BLOCKLIST
from awx.main import utils
@ -870,9 +870,9 @@ class CredentialTypeInjectorField(JSONSchemaField):
'use is not allowed in credentials.').format(env_var),
code='invalid', params={'value': env_var},
)
if env_var in ENV_BLACKLIST:
if env_var in ENV_BLOCKLIST:
raise django_exceptions.ValidationError(
_('Environment variable {} is blacklisted from use in credentials.').format(env_var),
_('Environment variable {} is not allowed to be used in credentials.').format(env_var),
code='invalid', params={'value': env_var},
)

View File

@ -407,7 +407,7 @@ def prevent_search(relation):
sensitive_data = prevent_search(models.CharField(...))
The flag set by this function is used by
`awx.api.filters.FieldLookupBackend` to blacklist fields and relations that
`awx.api.filters.FieldLookupBackend` to block fields and relations that
should not be searchable/filterable via search query params
"""
setattr(relation, '__prevent_search__', True)

View File

@ -1910,7 +1910,7 @@ class ec2(PluginFileInjector):
# Compatibility content
legacy_regex = {
True: r"[^A-Za-z0-9\_]",
False: r"[^A-Za-z0-9\_\-]" # do not replace dash, dash is whitelisted
False: r"[^A-Za-z0-9\_\-]" # do not replace dash, dash is allowed
}[replace_dash]
list_replacer = 'map("regex_replace", "{rx}", "_") | list'.format(rx=legacy_regex)
# this option, a plugin option, will allow dashes, but not unicode
@ -1943,7 +1943,7 @@ class ec2(PluginFileInjector):
ret['boto_profile'] = source_vars['boto_profile']
elif not replace_dash:
# Using the plugin, but still want dashes whitelisted
# Using the plugin, but still want dashes allowed
ret['use_contrib_script_compatible_sanitization'] = True
if source_vars.get('nested_groups') is False:

View File

@ -262,25 +262,25 @@ class JobNotificationMixin(object):
'running': 'started',
'failed': 'error'}
# Tree of fields that can be safely referenced in a notification message
JOB_FIELDS_WHITELIST = ['id', 'type', 'url', 'created', 'modified', 'name', 'description', 'job_type', 'playbook',
'forks', 'limit', 'verbosity', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task',
'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
'approval_status', 'approval_node_name', 'workflow_url', 'scm_branch',
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
'processed', 'rescued', 'ignored']},
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
'total_hosts', 'hosts_with_active_failures', 'total_groups',
'has_inventory_sources',
'total_inventory_sources', 'inventory_sources_with_failures',
'organization_id', 'kind']},
{'project': ['id', 'name', 'description', 'status', 'scm_type']},
{'job_template': ['id', 'name', 'description']},
{'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
{'instance_group': ['name', 'id']},
{'created_by': ['id', 'username', 'first_name', 'last_name']},
{'labels': ['count', 'results']}]}]
JOB_FIELDS_ALLOWED_LIST = ['id', 'type', 'url', 'created', 'modified', 'name', 'description', 'job_type', 'playbook',
'forks', 'limit', 'verbosity', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task',
'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
'approval_status', 'approval_node_name', 'workflow_url', 'scm_branch',
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
'processed', 'rescued', 'ignored']},
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
'total_hosts', 'hosts_with_active_failures', 'total_groups',
'has_inventory_sources',
'total_inventory_sources', 'inventory_sources_with_failures',
'organization_id', 'kind']},
{'project': ['id', 'name', 'description', 'status', 'scm_type']},
{'job_template': ['id', 'name', 'description']},
{'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
{'instance_group': ['name', 'id']},
{'created_by': ['id', 'username', 'first_name', 'last_name']},
{'labels': ['count', 'results']}]}]
@classmethod
def context_stub(cls):
@ -377,8 +377,8 @@ class JobNotificationMixin(object):
def context(self, serialized_job):
"""Returns a dictionary that can be used for rendering notification messages.
The context will contain whitelisted content retrieved from a serialized job object
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
The context will contain allowed content retrieved from a serialized job object
(see JobNotificationMixin.JOB_FIELDS_ALLOWED_LIST the job's friendly name,
and a url to the job run."""
job_context = {'host_status_counts': {}}
summary = None
@ -395,22 +395,22 @@ class JobNotificationMixin(object):
'job_metadata': json.dumps(self.notification_data(), indent=4)
}
def build_context(node, fields, whitelisted_fields):
for safe_field in whitelisted_fields:
def build_context(node, fields, allowed_fields):
for safe_field in allowed_fields:
if type(safe_field) is dict:
field, whitelist_subnode = safe_field.copy().popitem()
field, allowed_subnode = safe_field.copy().popitem()
# ensure content present in job serialization
if field not in fields:
continue
subnode = fields[field]
node[field] = {}
build_context(node[field], subnode, whitelist_subnode)
build_context(node[field], subnode, allowed_subnode)
else:
# ensure content present in job serialization
if safe_field not in fields:
continue
node[safe_field] = fields[safe_field]
build_context(context['job'], serialized_job, self.JOB_FIELDS_WHITELIST)
build_context(context['job'], serialized_job, self.JOB_FIELDS_ALLOWED_LIST)
return context

View File

@ -139,7 +139,7 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
'always_nodes', 'credentials', 'inventory', 'extra_data', 'survey_passwords',
'char_prompts', 'all_parents_must_converge', 'identifier'
]
REENCRYPTION_BLACKLIST_AT_COPY = ['extra_data', 'survey_passwords']
REENCRYPTION_BLOCKLIST_AT_COPY = ['extra_data', 'survey_passwords']
workflow_job_template = models.ForeignKey(
'WorkflowJobTemplate',

View File

@ -1802,7 +1802,7 @@ class RunJob(BaseTask):
# By default, all extra vars disallow Jinja2 template usage for
# security reasons; top level key-values defined in JT.extra_vars, however,
# are whitelisted as "safe" (because they can only be set by users with
# are allowed as "safe" (because they can only be set by users with
# higher levels of privilege - those that have the ability create and
# edit Job Templates)
safe_dict = {}
@ -2472,7 +2472,7 @@ class RunInventoryUpdate(BaseTask):
if inventory_update.source in ['scm', 'custom']:
for env_k in inventory_update.source_vars_dict:
if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLACKLIST:
if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLOCKED:
env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
elif inventory_update.source == 'file':
raise NotImplementedError('Cannot update file sources through the task system.')

View File

@ -4,7 +4,7 @@ from awx.api.versioning import reverse
@pytest.mark.django_db
def test_proxy_ip_whitelist(get, patch, admin):
def test_proxy_ip_allowed(get, patch, admin):
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
patch(url, user=admin, data={
'REMOTE_HOST_HEADERS': [
@ -23,37 +23,37 @@ def test_proxy_ip_whitelist(get, patch, admin):
def process_response(self, request, response):
self.environ = request.environ
# By default, `PROXY_IP_WHITELIST` is disabled, so custom `REMOTE_HOST_HEADERS`
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
# should just pass through
middleware = HeaderTrackingMiddleware()
get(url, user=admin, middleware=middleware,
HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
# If `PROXY_IP_WHITELIST` is restricted to 10.0.1.100 and we make a request
# If `PROXY_IP_ALLOWED_LIST` is restricted to 10.0.1.100 and we make a request
# from 8.9.10.11, the custom `HTTP_X_FROM_THE_LOAD_BALANCER` header should
# be stripped
patch(url, user=admin, data={
'PROXY_IP_WHITELIST': ['10.0.1.100']
'PROXY_IP_ALLOWED_LIST': ['10.0.1.100']
})
middleware = HeaderTrackingMiddleware()
get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11',
HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
assert 'HTTP_X_FROM_THE_LOAD_BALANCER' not in middleware.environ
# If 8.9.10.11 is added to `PROXY_IP_WHITELIST` the
# If 8.9.10.11 is added to `PROXY_IP_ALLOWED_LIST` the
# `HTTP_X_FROM_THE_LOAD_BALANCER` header should be passed through again
patch(url, user=admin, data={
'PROXY_IP_WHITELIST': ['10.0.1.100', '8.9.10.11']
'PROXY_IP_ALLOWED_LIST': ['10.0.1.100', '8.9.10.11']
})
middleware = HeaderTrackingMiddleware()
get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11',
HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
# Allow whitelisting of proxy hostnames in addition to IP addresses
# Allow allowed list of proxy hostnames in addition to IP addresses
patch(url, user=admin, data={
'PROXY_IP_WHITELIST': ['my.proxy.example.org']
'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']
})
middleware = HeaderTrackingMiddleware()
get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11',

View File

@ -413,7 +413,7 @@ def test_inventory_update_access_called(post, inventory_source, alice, mock_acce
@pytest.mark.django_db
def test_inventory_source_vars_prohibition(post, inventory, admin_user):
with mock.patch('awx.api.serializers.settings') as mock_settings:
mock_settings.INV_ENV_VARIABLE_BLACKLIST = ('FOOBAR',)
mock_settings.INV_ENV_VARIABLE_BLOCKED = ('FOOBAR',)
r = post(reverse('api:inventory_source_list'),
{'name': 'new inv src', 'source_vars': '{\"FOOBAR\": \"val\"}', 'inventory': inventory.pk},
admin_user, expect=400)

View File

@ -347,7 +347,7 @@ class TestExtraVarSanitation(TestJobExecution):
assert extra_vars['msg'] == {'a': [self.UNSAFE]}
assert hasattr(extra_vars['msg']['a'][0], '__UNSAFE__')
def test_whitelisted_jt_extra_vars(self, job, private_data_dir):
def test_allowed_jt_extra_vars(self, job, private_data_dir):
job.job_template.extra_vars = job.extra_vars = json.dumps({'msg': self.UNSAFE})
task = tasks.RunJob()
@ -358,7 +358,7 @@ class TestExtraVarSanitation(TestJobExecution):
assert extra_vars['msg'] == self.UNSAFE
assert not hasattr(extra_vars['msg'], '__UNSAFE__')
def test_nested_whitelisted_vars(self, job, private_data_dir):
def test_nested_allowed_vars(self, job, private_data_dir):
job.extra_vars = json.dumps({'msg': {'a': {'b': [self.UNSAFE]}}})
job.job_template.extra_vars = job.extra_vars
task = tasks.RunJob()

View File

@ -367,13 +367,13 @@ def get_allowed_fields(obj, serializer_mapping):
'oauth2application': ['client_secret']
}
model_name = obj._meta.model_name
field_blacklist = ACTIVITY_STREAM_FIELD_EXCLUSIONS.get(model_name, [])
fields_excluded = ACTIVITY_STREAM_FIELD_EXCLUSIONS.get(model_name, [])
# see definition of from_db for CredentialType
# injection logic of any managed types are incompatible with activity stream
if model_name == 'credentialtype' and obj.managed_by_tower and obj.namespace:
field_blacklist.extend(['inputs', 'injectors'])
if field_blacklist:
allowed_fields = [f for f in allowed_fields if f not in field_blacklist]
fields_excluded.extend(['inputs', 'injectors'])
if fields_excluded:
allowed_fields = [f for f in allowed_fields if f not in fields_excluded]
return allowed_fields

View File

@ -15,7 +15,7 @@ from django.apps import apps
from django.db import models
from django.conf import settings
from awx.main.constants import LOGGER_BLACKLIST
from awx.main.constants import LOGGER_BLOCKLIST
from awx.main.utils.common import get_search_fields
__all__ = ['SmartFilter', 'ExternalLoggerEnabled', 'DynamicLevelFilter']
@ -48,11 +48,11 @@ class FieldFromSettings(object):
instance.settings_override[self.setting_name] = value
def record_is_blacklisted(record):
"""Given a log record, return True if it is considered to be in
the logging blacklist, return False if not
def record_is_blocked(record):
"""Given a log record, return True if it is considered to be
blocked, return False if not
"""
for logger_name in LOGGER_BLACKLIST:
for logger_name in LOGGER_BLOCKLIST:
if record.name.startswith(logger_name):
return True
return False
@ -81,7 +81,7 @@ class ExternalLoggerEnabled(Filter):
True - should be logged
"""
# Do not send exceptions to external logger
if record_is_blacklisted(record):
if record_is_blocked(record):
return False
# General enablement
if not self.enabled_flag:
@ -108,8 +108,8 @@ class DynamicLevelFilter(Filter):
"""Filters out logs that have a level below the threshold defined
by the databse setting LOG_AGGREGATOR_LEVEL
"""
if record_is_blacklisted(record):
# Fine to write blacklisted loggers to file, apply default filtering level
if record_is_blocked(record):
# Fine to write denied loggers to file, apply default filtering level
cutoff_level = logging.WARNING
else:
try:
@ -179,7 +179,7 @@ class SmartFilter(object):
pyparsing do the heavy lifting.
TODO: separate django filter requests from our custom json filter
request so we don't process the key any. This could be
accomplished using a whitelist or introspecting the
accomplished using an allowed list or introspecting the
relationship refered to to see if it's a jsonb type.
'''
def _json_path_to_contains(self, k, v):

View File

@ -36,7 +36,7 @@ def safe_dump(x, safe_dict=None):
_unless_ they've been deemed "trusted" (meaning, they likely were set/added
by a user with a high level of privilege).
This function allows you to pass in a trusted `safe_dict` to whitelist
This function allows you to pass in a trusted `safe_dict` to allow
certain extra vars so that they are _not_ marked as `!unsafe` in the
resulting YAML. Anything _not_ in this dict will automatically be
`!unsafe`.

View File

@ -163,13 +163,13 @@ ALLOWED_HOSTS = []
REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST']
# If Tower is behind a reverse proxy/load balancer, use this setting to
# whitelist the proxy IP addresses from which Tower should trust custom
# allow the proxy IP addresses from which Tower should trust custom
# REMOTE_HOST_HEADERS header values
# REMOTE_HOST_HEADERS = ['HTTP_X_FORWARDED_FOR', ''REMOTE_ADDR', 'REMOTE_HOST']
# PROXY_IP_WHITELIST = ['10.0.1.100', '10.0.1.101']
# PROXY_IP_ALLOWED_LIST = ['10.0.1.100', '10.0.1.101']
# If this setting is an empty list (the default), the headers specified by
# REMOTE_HOST_HEADERS will be trusted unconditionally')
PROXY_IP_WHITELIST = []
PROXY_IP_ALLOWED_LIST = []
CUSTOM_VENV_PATHS = []
@ -666,7 +666,7 @@ AD_HOC_COMMANDS = [
'win_user',
]
INV_ENV_VARIABLE_BLACKLIST = ("HOME", "USER", "_", "TERM")
INV_ENV_VARIABLE_BLOCKED = ("HOME", "USER", "_", "TERM")
# ----------------
# -- Amazon EC2 --
@ -694,11 +694,6 @@ EC2_REGION_NAMES = {
'cn-north-1': _('China (Beijing)'),
}
EC2_REGIONS_BLACKLIST = [
'us-gov-west-1',
'cn-north-1',
]
# Inventory variable name/values for determining if host is active/enabled.
EC2_ENABLED_VAR = 'ec2_state'
EC2_ENABLED_VALUE = 'running'
@ -715,8 +710,6 @@ EC2_EXCLUDE_EMPTY_GROUPS = True
# ------------
# -- VMware --
# ------------
VMWARE_REGIONS_BLACKLIST = []
# Inventory variable name/values for determining whether a host is
# active in vSphere.
VMWARE_ENABLED_VAR = 'guest.gueststate'
@ -771,8 +764,6 @@ GCE_REGION_CHOICES = [
('australia-southeast1-b', _('Australia Southeast (B)')),
('australia-southeast1-c', _('Australia Southeast (C)')),
]
GCE_REGIONS_BLACKLIST = []
# Inventory variable name/value for determining whether a host is active
# in Google Compute Engine.
GCE_ENABLED_VAR = 'status'
@ -817,8 +808,6 @@ AZURE_RM_REGION_CHOICES = [
('koreacentral', _('Korea Central')),
('koreasouth', _('Korea South')),
]
AZURE_RM_REGIONS_BLACKLIST = []
AZURE_RM_GROUP_FILTER = r'^.+$'
AZURE_RM_HOST_FILTER = r'^.+$'
AZURE_RM_ENABLED_VAR = 'powerstate'

View File

@ -183,5 +183,4 @@ else:
except Exception:
pass
WEBSOCKET_ORIGIN_WHITELIST = ['https://localhost:8043', 'https://localhost:3000']
AWX_CALLBACK_PROFILE = True

View File

@ -11,7 +11,7 @@ This IP address can be found by making a GET to any page on the browsable API an
awx_1 | 14:42:08 uwsgi.1 | 172.18.0.1 GET /api/v2/tokens/ - HTTP/1.1 200
```
Whitelist this IP address by adding it to the `INTERNAL_IPS` variable in `local_settings`, then navigate to the API and you should see DDT on the
Allow this IP address by adding it to the `INTERNAL_IPS` variable in `local_settings`, then navigate to the API and you should see DDT on the
right side. If you don't see it, make sure to set `DEBUG=True`.
> Note that enabling DDT is detrimental to the performance of AWX and adds overhead to every API request. It is
recommended to keep this turned off when you are not using it.

View File

@ -32,7 +32,7 @@ The standard pattern applies to the following fields:
- `ask_inventory_on_launch` allows use of `inventory`
Surveys are a special-case of prompting for variables - applying a survey to
a template whitelists variable names in the survey spec (requires the survey
a template allows variable names in the survey spec (requires the survey
spec to exist and `survey_enabled` to be true). On the other hand,
if `ask_variables_on_launch` is true, users can provide any variables in
`extra_vars`.

View File

@ -132,7 +132,7 @@ user performing the copy, and `new_objs`, a list of all sub objects of the creat
`user`'s permission against these new sub objects and unlink related objects or send
warning logs as necessary. `deep_copy_permission_check_func` should not return anything.
Lastly, macro `REENCRYPTION_BLACKLIST_AT_COPY` is available as part of a model definition. It is a
Lastly, macro `REENCRYPTION_BLOCKLIST_AT_COPY` is available as part of a model definition. It is a
list of field names which will escape re-encryption during copy. For example, the `extra_data` field
of workflow job template nodes.