mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 01:57:35 -03:30
Merge pull request #7485 from ryanpetrello/words-matter
replace certain terms with more inclusive language Reviewed-by: https://github.com/apps/softwarefactory-project-zuul
This commit is contained in:
commit
d8f6ea3fe8
@ -146,7 +146,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
|
||||
# A list of fields that we know can be filtered on without the possiblity
|
||||
# of introducing duplicates
|
||||
NO_DUPLICATES_WHITELIST = (CharField, IntegerField, BooleanField)
|
||||
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField)
|
||||
|
||||
def get_fields_from_lookup(self, model, lookup):
|
||||
|
||||
@ -205,7 +205,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
|
||||
field = field_list[-1]
|
||||
|
||||
needs_distinct = (not all(isinstance(f, self.NO_DUPLICATES_WHITELIST) for f in field_list))
|
||||
needs_distinct = (not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list))
|
||||
|
||||
# Type names are stored without underscores internally, but are presented and
|
||||
# and serialized over the API containing underscores so we remove `_`
|
||||
|
||||
@ -159,11 +159,11 @@ class APIView(views.APIView):
|
||||
self.queries_before = len(connection.queries)
|
||||
|
||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||
# they respect the proxy whitelist
|
||||
# they respect the allowed proxy list
|
||||
if all([
|
||||
settings.PROXY_IP_WHITELIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_WHITELIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_WHITELIST
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST
|
||||
]):
|
||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
||||
if custom_header.startswith('HTTP_'):
|
||||
@ -837,7 +837,7 @@ class CopyAPIView(GenericAPIView):
|
||||
|
||||
@staticmethod
|
||||
def _decrypt_model_field_if_needed(obj, field_name, field_val):
|
||||
if field_name in getattr(type(obj), 'REENCRYPTION_BLACKLIST_AT_COPY', []):
|
||||
if field_name in getattr(type(obj), 'REENCRYPTION_BLOCKLIST_AT_COPY', []):
|
||||
return field_val
|
||||
if isinstance(obj, Credential) and field_name == 'inputs':
|
||||
for secret in obj.credential_type.secret_fields:
|
||||
@ -883,7 +883,7 @@ class CopyAPIView(GenericAPIView):
|
||||
field_val = getattr(obj, field.name)
|
||||
except AttributeError:
|
||||
continue
|
||||
# Adjust copy blacklist fields here.
|
||||
# Adjust copy blocked fields here.
|
||||
if field.name in fields_to_discard or field.name in [
|
||||
'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by'
|
||||
] or field.name.endswith('_role'):
|
||||
|
||||
@ -1946,7 +1946,7 @@ class InventorySourceOptionsSerializer(BaseSerializer):
|
||||
def validate_source_vars(self, value):
|
||||
ret = vars_validate_or_raise(value)
|
||||
for env_k in parse_yaml_or_json(value):
|
||||
if env_k in settings.INV_ENV_VARIABLE_BLACKLIST:
|
||||
if env_k in settings.INV_ENV_VARIABLE_BLOCKED:
|
||||
raise serializers.ValidationError(_("`{}` is a prohibited environment variable".format(env_k)))
|
||||
return ret
|
||||
|
||||
|
||||
19
awx/conf/migrations/0007_v380_rename_more_settings.py
Normal file
19
awx/conf/migrations/0007_v380_rename_more_settings.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
from django.db import migrations
|
||||
from awx.conf.migrations import _rename_setting
|
||||
|
||||
|
||||
def copy_allowed_ips(apps, schema_editor):
|
||||
_rename_setting.rename_setting(apps, schema_editor, old_key='PROXY_IP_WHITELIST', new_key='PROXY_IP_ALLOWED_LIST')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0006_v331_ldap_group_type'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(copy_allowed_ips),
|
||||
]
|
||||
@ -1513,8 +1513,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
thus can be made by a job template administrator which may not have access
|
||||
to the any inventory, project, or credentials associated with the template.
|
||||
'''
|
||||
# We are white listing fields that can
|
||||
field_whitelist = [
|
||||
allowed_fields = [
|
||||
'name', 'description', 'forks', 'limit', 'verbosity', 'extra_vars',
|
||||
'job_tags', 'force_handlers', 'skip_tags', 'ask_variables_on_launch',
|
||||
'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_skip_tags_on_launch',
|
||||
@ -1529,7 +1528,7 @@ class JobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
if k not in [x.name for x in obj._meta.concrete_fields]:
|
||||
continue
|
||||
if hasattr(obj, k) and getattr(obj, k) != v:
|
||||
if k not in field_whitelist and v != getattr(obj, '%s_id' % k, None) \
|
||||
if k not in allowed_fields and v != getattr(obj, '%s_id' % k, None) \
|
||||
and not (hasattr(obj, '%s_id' % k) and getattr(obj, '%s_id' % k) is None and v == ''): # Equate '' to None in the case of foreign keys
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -80,11 +80,11 @@ register(
|
||||
)
|
||||
|
||||
register(
|
||||
'PROXY_IP_WHITELIST',
|
||||
'PROXY_IP_ALLOWED_LIST',
|
||||
field_class=fields.StringListField,
|
||||
label=_('Proxy IP Whitelist'),
|
||||
label=_('Proxy IP Allowed List'),
|
||||
help_text=_("If Tower is behind a reverse proxy/load balancer, use this setting "
|
||||
"to whitelist the proxy IP addresses from which Tower should trust "
|
||||
"to configure the proxy IP addresses from which Tower should trust "
|
||||
"custom REMOTE_HOST_HEADERS header values. "
|
||||
"If this setting is an empty list (the default), the headers specified by "
|
||||
"REMOTE_HOST_HEADERS will be trusted unconditionally')"),
|
||||
@ -241,7 +241,7 @@ register(
|
||||
field_class=fields.StringListField,
|
||||
required=False,
|
||||
label=_('Paths to expose to isolated jobs'),
|
||||
help_text=_('Whitelist of paths that would otherwise be hidden to expose to isolated jobs. Enter one path per line.'),
|
||||
help_text=_('List of paths that would otherwise be hidden to expose to isolated jobs. Enter one path per line.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
@ -31,7 +31,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||
ACTIVE_STATES = CAN_CANCEL
|
||||
CENSOR_VALUE = '************'
|
||||
ENV_BLACKLIST = frozenset((
|
||||
ENV_BLOCKLIST = frozenset((
|
||||
'VIRTUAL_ENV', 'PATH', 'PYTHONPATH', 'PROOT_TMP_DIR', 'JOB_ID',
|
||||
'INVENTORY_ID', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID',
|
||||
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES',
|
||||
@ -41,7 +41,7 @@ ENV_BLACKLIST = frozenset((
|
||||
))
|
||||
|
||||
# loggers that may be called in process of emitting a log
|
||||
LOGGER_BLACKLIST = (
|
||||
LOGGER_BLOCKLIST = (
|
||||
'awx.main.utils.handlers',
|
||||
'awx.main.utils.formatters',
|
||||
'awx.main.utils.filters',
|
||||
|
||||
@ -50,7 +50,7 @@ from awx.main.models.rbac import (
|
||||
batch_role_ancestor_rebuilding, Role,
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
|
||||
)
|
||||
from awx.main.constants import ENV_BLACKLIST
|
||||
from awx.main.constants import ENV_BLOCKLIST
|
||||
from awx.main import utils
|
||||
|
||||
|
||||
@ -870,9 +870,9 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
'use is not allowed in credentials.').format(env_var),
|
||||
code='invalid', params={'value': env_var},
|
||||
)
|
||||
if env_var in ENV_BLACKLIST:
|
||||
if env_var in ENV_BLOCKLIST:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('Environment variable {} is blacklisted from use in credentials.').format(env_var),
|
||||
_('Environment variable {} is not allowed to be used in credentials.').format(env_var),
|
||||
code='invalid', params={'value': env_var},
|
||||
)
|
||||
|
||||
|
||||
@ -407,7 +407,7 @@ def prevent_search(relation):
|
||||
sensitive_data = prevent_search(models.CharField(...))
|
||||
|
||||
The flag set by this function is used by
|
||||
`awx.api.filters.FieldLookupBackend` to blacklist fields and relations that
|
||||
`awx.api.filters.FieldLookupBackend` to block fields and relations that
|
||||
should not be searchable/filterable via search query params
|
||||
"""
|
||||
setattr(relation, '__prevent_search__', True)
|
||||
|
||||
@ -1910,7 +1910,7 @@ class ec2(PluginFileInjector):
|
||||
# Compatibility content
|
||||
legacy_regex = {
|
||||
True: r"[^A-Za-z0-9\_]",
|
||||
False: r"[^A-Za-z0-9\_\-]" # do not replace dash, dash is whitelisted
|
||||
False: r"[^A-Za-z0-9\_\-]" # do not replace dash, dash is allowed
|
||||
}[replace_dash]
|
||||
list_replacer = 'map("regex_replace", "{rx}", "_") | list'.format(rx=legacy_regex)
|
||||
# this option, a plugin option, will allow dashes, but not unicode
|
||||
@ -1943,7 +1943,7 @@ class ec2(PluginFileInjector):
|
||||
ret['boto_profile'] = source_vars['boto_profile']
|
||||
|
||||
elif not replace_dash:
|
||||
# Using the plugin, but still want dashes whitelisted
|
||||
# Using the plugin, but still want dashes allowed
|
||||
ret['use_contrib_script_compatible_sanitization'] = True
|
||||
|
||||
if source_vars.get('nested_groups') is False:
|
||||
|
||||
@ -262,25 +262,25 @@ class JobNotificationMixin(object):
|
||||
'running': 'started',
|
||||
'failed': 'error'}
|
||||
# Tree of fields that can be safely referenced in a notification message
|
||||
JOB_FIELDS_WHITELIST = ['id', 'type', 'url', 'created', 'modified', 'name', 'description', 'job_type', 'playbook',
|
||||
'forks', 'limit', 'verbosity', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task',
|
||||
'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
|
||||
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
|
||||
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
|
||||
'approval_status', 'approval_node_name', 'workflow_url', 'scm_branch',
|
||||
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
|
||||
'processed', 'rescued', 'ignored']},
|
||||
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
|
||||
'total_hosts', 'hosts_with_active_failures', 'total_groups',
|
||||
'has_inventory_sources',
|
||||
'total_inventory_sources', 'inventory_sources_with_failures',
|
||||
'organization_id', 'kind']},
|
||||
{'project': ['id', 'name', 'description', 'status', 'scm_type']},
|
||||
{'job_template': ['id', 'name', 'description']},
|
||||
{'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
|
||||
{'instance_group': ['name', 'id']},
|
||||
{'created_by': ['id', 'username', 'first_name', 'last_name']},
|
||||
{'labels': ['count', 'results']}]}]
|
||||
JOB_FIELDS_ALLOWED_LIST = ['id', 'type', 'url', 'created', 'modified', 'name', 'description', 'job_type', 'playbook',
|
||||
'forks', 'limit', 'verbosity', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task',
|
||||
'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
|
||||
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
|
||||
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
|
||||
'approval_status', 'approval_node_name', 'workflow_url', 'scm_branch',
|
||||
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
|
||||
'processed', 'rescued', 'ignored']},
|
||||
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
|
||||
'total_hosts', 'hosts_with_active_failures', 'total_groups',
|
||||
'has_inventory_sources',
|
||||
'total_inventory_sources', 'inventory_sources_with_failures',
|
||||
'organization_id', 'kind']},
|
||||
{'project': ['id', 'name', 'description', 'status', 'scm_type']},
|
||||
{'job_template': ['id', 'name', 'description']},
|
||||
{'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
|
||||
{'instance_group': ['name', 'id']},
|
||||
{'created_by': ['id', 'username', 'first_name', 'last_name']},
|
||||
{'labels': ['count', 'results']}]}]
|
||||
|
||||
@classmethod
|
||||
def context_stub(cls):
|
||||
@ -377,8 +377,8 @@ class JobNotificationMixin(object):
|
||||
|
||||
def context(self, serialized_job):
|
||||
"""Returns a dictionary that can be used for rendering notification messages.
|
||||
The context will contain whitelisted content retrieved from a serialized job object
|
||||
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
|
||||
The context will contain allowed content retrieved from a serialized job object
|
||||
(see JobNotificationMixin.JOB_FIELDS_ALLOWED_LIST the job's friendly name,
|
||||
and a url to the job run."""
|
||||
job_context = {'host_status_counts': {}}
|
||||
summary = None
|
||||
@ -395,22 +395,22 @@ class JobNotificationMixin(object):
|
||||
'job_metadata': json.dumps(self.notification_data(), indent=4)
|
||||
}
|
||||
|
||||
def build_context(node, fields, whitelisted_fields):
|
||||
for safe_field in whitelisted_fields:
|
||||
def build_context(node, fields, allowed_fields):
|
||||
for safe_field in allowed_fields:
|
||||
if type(safe_field) is dict:
|
||||
field, whitelist_subnode = safe_field.copy().popitem()
|
||||
field, allowed_subnode = safe_field.copy().popitem()
|
||||
# ensure content present in job serialization
|
||||
if field not in fields:
|
||||
continue
|
||||
subnode = fields[field]
|
||||
node[field] = {}
|
||||
build_context(node[field], subnode, whitelist_subnode)
|
||||
build_context(node[field], subnode, allowed_subnode)
|
||||
else:
|
||||
# ensure content present in job serialization
|
||||
if safe_field not in fields:
|
||||
continue
|
||||
node[safe_field] = fields[safe_field]
|
||||
build_context(context['job'], serialized_job, self.JOB_FIELDS_WHITELIST)
|
||||
build_context(context['job'], serialized_job, self.JOB_FIELDS_ALLOWED_LIST)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
@ -139,7 +139,7 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
|
||||
'always_nodes', 'credentials', 'inventory', 'extra_data', 'survey_passwords',
|
||||
'char_prompts', 'all_parents_must_converge', 'identifier'
|
||||
]
|
||||
REENCRYPTION_BLACKLIST_AT_COPY = ['extra_data', 'survey_passwords']
|
||||
REENCRYPTION_BLOCKLIST_AT_COPY = ['extra_data', 'survey_passwords']
|
||||
|
||||
workflow_job_template = models.ForeignKey(
|
||||
'WorkflowJobTemplate',
|
||||
|
||||
@ -1802,7 +1802,7 @@ class RunJob(BaseTask):
|
||||
|
||||
# By default, all extra vars disallow Jinja2 template usage for
|
||||
# security reasons; top level key-values defined in JT.extra_vars, however,
|
||||
# are whitelisted as "safe" (because they can only be set by users with
|
||||
# are allowed as "safe" (because they can only be set by users with
|
||||
# higher levels of privilege - those that have the ability create and
|
||||
# edit Job Templates)
|
||||
safe_dict = {}
|
||||
@ -2472,7 +2472,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
|
||||
if inventory_update.source in ['scm', 'custom']:
|
||||
for env_k in inventory_update.source_vars_dict:
|
||||
if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLACKLIST:
|
||||
if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLOCKED:
|
||||
env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
|
||||
elif inventory_update.source == 'file':
|
||||
raise NotImplementedError('Cannot update file sources through the task system.')
|
||||
|
||||
@ -4,7 +4,7 @@ from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_proxy_ip_whitelist(get, patch, admin):
|
||||
def test_proxy_ip_allowed(get, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={
|
||||
'REMOTE_HOST_HEADERS': [
|
||||
@ -23,37 +23,37 @@ def test_proxy_ip_whitelist(get, patch, admin):
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
# By default, `PROXY_IP_WHITELIST` is disabled, so custom `REMOTE_HOST_HEADERS`
|
||||
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
|
||||
# should just pass through
|
||||
middleware = HeaderTrackingMiddleware()
|
||||
get(url, user=admin, middleware=middleware,
|
||||
HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
# If `PROXY_IP_WHITELIST` is restricted to 10.0.1.100 and we make a request
|
||||
# If `PROXY_IP_ALLOWED_LIST` is restricted to 10.0.1.100 and we make a request
|
||||
# from 8.9.10.11, the custom `HTTP_X_FROM_THE_LOAD_BALANCER` header should
|
||||
# be stripped
|
||||
patch(url, user=admin, data={
|
||||
'PROXY_IP_WHITELIST': ['10.0.1.100']
|
||||
'PROXY_IP_ALLOWED_LIST': ['10.0.1.100']
|
||||
})
|
||||
middleware = HeaderTrackingMiddleware()
|
||||
get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11',
|
||||
HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
|
||||
assert 'HTTP_X_FROM_THE_LOAD_BALANCER' not in middleware.environ
|
||||
|
||||
# If 8.9.10.11 is added to `PROXY_IP_WHITELIST` the
|
||||
# If 8.9.10.11 is added to `PROXY_IP_ALLOWED_LIST` the
|
||||
# `HTTP_X_FROM_THE_LOAD_BALANCER` header should be passed through again
|
||||
patch(url, user=admin, data={
|
||||
'PROXY_IP_WHITELIST': ['10.0.1.100', '8.9.10.11']
|
||||
'PROXY_IP_ALLOWED_LIST': ['10.0.1.100', '8.9.10.11']
|
||||
})
|
||||
middleware = HeaderTrackingMiddleware()
|
||||
get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11',
|
||||
HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
# Allow whitelisting of proxy hostnames in addition to IP addresses
|
||||
# Allow allowed list of proxy hostnames in addition to IP addresses
|
||||
patch(url, user=admin, data={
|
||||
'PROXY_IP_WHITELIST': ['my.proxy.example.org']
|
||||
'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']
|
||||
})
|
||||
middleware = HeaderTrackingMiddleware()
|
||||
get(url, user=admin, middleware=middleware, REMOTE_ADDR='8.9.10.11',
|
||||
|
||||
@ -449,7 +449,7 @@ def test_inventory_update_access_called(post, inventory_source, alice, mock_acce
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_source_vars_prohibition(post, inventory, admin_user):
|
||||
with mock.patch('awx.api.serializers.settings') as mock_settings:
|
||||
mock_settings.INV_ENV_VARIABLE_BLACKLIST = ('FOOBAR',)
|
||||
mock_settings.INV_ENV_VARIABLE_BLOCKED = ('FOOBAR',)
|
||||
r = post(reverse('api:inventory_source_list'),
|
||||
{'name': 'new inv src', 'source_vars': '{\"FOOBAR\": \"val\"}', 'inventory': inventory.pk},
|
||||
admin_user, expect=400)
|
||||
|
||||
@ -347,7 +347,7 @@ class TestExtraVarSanitation(TestJobExecution):
|
||||
assert extra_vars['msg'] == {'a': [self.UNSAFE]}
|
||||
assert hasattr(extra_vars['msg']['a'][0], '__UNSAFE__')
|
||||
|
||||
def test_whitelisted_jt_extra_vars(self, job, private_data_dir):
|
||||
def test_allowed_jt_extra_vars(self, job, private_data_dir):
|
||||
job.job_template.extra_vars = job.extra_vars = json.dumps({'msg': self.UNSAFE})
|
||||
task = tasks.RunJob()
|
||||
|
||||
@ -358,7 +358,7 @@ class TestExtraVarSanitation(TestJobExecution):
|
||||
assert extra_vars['msg'] == self.UNSAFE
|
||||
assert not hasattr(extra_vars['msg'], '__UNSAFE__')
|
||||
|
||||
def test_nested_whitelisted_vars(self, job, private_data_dir):
|
||||
def test_nested_allowed_vars(self, job, private_data_dir):
|
||||
job.extra_vars = json.dumps({'msg': {'a': {'b': [self.UNSAFE]}}})
|
||||
job.job_template.extra_vars = job.extra_vars
|
||||
task = tasks.RunJob()
|
||||
|
||||
@ -367,13 +367,13 @@ def get_allowed_fields(obj, serializer_mapping):
|
||||
'oauth2application': ['client_secret']
|
||||
}
|
||||
model_name = obj._meta.model_name
|
||||
field_blacklist = ACTIVITY_STREAM_FIELD_EXCLUSIONS.get(model_name, [])
|
||||
fields_excluded = ACTIVITY_STREAM_FIELD_EXCLUSIONS.get(model_name, [])
|
||||
# see definition of from_db for CredentialType
|
||||
# injection logic of any managed types are incompatible with activity stream
|
||||
if model_name == 'credentialtype' and obj.managed_by_tower and obj.namespace:
|
||||
field_blacklist.extend(['inputs', 'injectors'])
|
||||
if field_blacklist:
|
||||
allowed_fields = [f for f in allowed_fields if f not in field_blacklist]
|
||||
fields_excluded.extend(['inputs', 'injectors'])
|
||||
if fields_excluded:
|
||||
allowed_fields = [f for f in allowed_fields if f not in fields_excluded]
|
||||
return allowed_fields
|
||||
|
||||
|
||||
|
||||
@ -15,7 +15,7 @@ from django.apps import apps
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.constants import LOGGER_BLACKLIST
|
||||
from awx.main.constants import LOGGER_BLOCKLIST
|
||||
from awx.main.utils.common import get_search_fields
|
||||
|
||||
__all__ = ['SmartFilter', 'ExternalLoggerEnabled', 'DynamicLevelFilter']
|
||||
@ -48,11 +48,11 @@ class FieldFromSettings(object):
|
||||
instance.settings_override[self.setting_name] = value
|
||||
|
||||
|
||||
def record_is_blacklisted(record):
|
||||
"""Given a log record, return True if it is considered to be in
|
||||
the logging blacklist, return False if not
|
||||
def record_is_blocked(record):
|
||||
"""Given a log record, return True if it is considered to be
|
||||
blocked, return False if not
|
||||
"""
|
||||
for logger_name in LOGGER_BLACKLIST:
|
||||
for logger_name in LOGGER_BLOCKLIST:
|
||||
if record.name.startswith(logger_name):
|
||||
return True
|
||||
return False
|
||||
@ -81,7 +81,7 @@ class ExternalLoggerEnabled(Filter):
|
||||
True - should be logged
|
||||
"""
|
||||
# Do not send exceptions to external logger
|
||||
if record_is_blacklisted(record):
|
||||
if record_is_blocked(record):
|
||||
return False
|
||||
# General enablement
|
||||
if not self.enabled_flag:
|
||||
@ -108,8 +108,8 @@ class DynamicLevelFilter(Filter):
|
||||
"""Filters out logs that have a level below the threshold defined
|
||||
by the databse setting LOG_AGGREGATOR_LEVEL
|
||||
"""
|
||||
if record_is_blacklisted(record):
|
||||
# Fine to write blacklisted loggers to file, apply default filtering level
|
||||
if record_is_blocked(record):
|
||||
# Fine to write denied loggers to file, apply default filtering level
|
||||
cutoff_level = logging.WARNING
|
||||
else:
|
||||
try:
|
||||
@ -179,7 +179,7 @@ class SmartFilter(object):
|
||||
pyparsing do the heavy lifting.
|
||||
TODO: separate django filter requests from our custom json filter
|
||||
request so we don't process the key any. This could be
|
||||
accomplished using a whitelist or introspecting the
|
||||
accomplished using an allowed list or introspecting the
|
||||
relationship refered to to see if it's a jsonb type.
|
||||
'''
|
||||
def _json_path_to_contains(self, k, v):
|
||||
|
||||
@ -36,7 +36,7 @@ def safe_dump(x, safe_dict=None):
|
||||
_unless_ they've been deemed "trusted" (meaning, they likely were set/added
|
||||
by a user with a high level of privilege).
|
||||
|
||||
This function allows you to pass in a trusted `safe_dict` to whitelist
|
||||
This function allows you to pass in a trusted `safe_dict` to allow
|
||||
certain extra vars so that they are _not_ marked as `!unsafe` in the
|
||||
resulting YAML. Anything _not_ in this dict will automatically be
|
||||
`!unsafe`.
|
||||
|
||||
@ -163,13 +163,13 @@ ALLOWED_HOSTS = []
|
||||
REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
|
||||
# If Tower is behind a reverse proxy/load balancer, use this setting to
|
||||
# whitelist the proxy IP addresses from which Tower should trust custom
|
||||
# allow the proxy IP addresses from which Tower should trust custom
|
||||
# REMOTE_HOST_HEADERS header values
|
||||
# REMOTE_HOST_HEADERS = ['HTTP_X_FORWARDED_FOR', ''REMOTE_ADDR', 'REMOTE_HOST']
|
||||
# PROXY_IP_WHITELIST = ['10.0.1.100', '10.0.1.101']
|
||||
# PROXY_IP_ALLOWED_LIST = ['10.0.1.100', '10.0.1.101']
|
||||
# If this setting is an empty list (the default), the headers specified by
|
||||
# REMOTE_HOST_HEADERS will be trusted unconditionally')
|
||||
PROXY_IP_WHITELIST = []
|
||||
PROXY_IP_ALLOWED_LIST = []
|
||||
|
||||
CUSTOM_VENV_PATHS = []
|
||||
|
||||
@ -666,7 +666,7 @@ AD_HOC_COMMANDS = [
|
||||
'win_user',
|
||||
]
|
||||
|
||||
INV_ENV_VARIABLE_BLACKLIST = ("HOME", "USER", "_", "TERM")
|
||||
INV_ENV_VARIABLE_BLOCKED = ("HOME", "USER", "_", "TERM")
|
||||
|
||||
# ----------------
|
||||
# -- Amazon EC2 --
|
||||
@ -694,11 +694,6 @@ EC2_REGION_NAMES = {
|
||||
'cn-north-1': _('China (Beijing)'),
|
||||
}
|
||||
|
||||
EC2_REGIONS_BLACKLIST = [
|
||||
'us-gov-west-1',
|
||||
'cn-north-1',
|
||||
]
|
||||
|
||||
# Inventory variable name/values for determining if host is active/enabled.
|
||||
EC2_ENABLED_VAR = 'ec2_state'
|
||||
EC2_ENABLED_VALUE = 'running'
|
||||
@ -715,8 +710,6 @@ EC2_EXCLUDE_EMPTY_GROUPS = True
|
||||
# ------------
|
||||
# -- VMware --
|
||||
# ------------
|
||||
VMWARE_REGIONS_BLACKLIST = []
|
||||
|
||||
# Inventory variable name/values for determining whether a host is
|
||||
# active in vSphere.
|
||||
VMWARE_ENABLED_VAR = 'guest.gueststate'
|
||||
@ -771,8 +764,6 @@ GCE_REGION_CHOICES = [
|
||||
('australia-southeast1-b', _('Australia Southeast (B)')),
|
||||
('australia-southeast1-c', _('Australia Southeast (C)')),
|
||||
]
|
||||
GCE_REGIONS_BLACKLIST = []
|
||||
|
||||
# Inventory variable name/value for determining whether a host is active
|
||||
# in Google Compute Engine.
|
||||
GCE_ENABLED_VAR = 'status'
|
||||
@ -817,8 +808,6 @@ AZURE_RM_REGION_CHOICES = [
|
||||
('koreacentral', _('Korea Central')),
|
||||
('koreasouth', _('Korea South')),
|
||||
]
|
||||
AZURE_RM_REGIONS_BLACKLIST = []
|
||||
|
||||
AZURE_RM_GROUP_FILTER = r'^.+$'
|
||||
AZURE_RM_HOST_FILTER = r'^.+$'
|
||||
AZURE_RM_ENABLED_VAR = 'powerstate'
|
||||
|
||||
@ -183,5 +183,4 @@ else:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
WEBSOCKET_ORIGIN_WHITELIST = ['https://localhost:8043', 'https://localhost:3000']
|
||||
AWX_CALLBACK_PROFILE = True
|
||||
|
||||
@ -18,7 +18,7 @@ export default ['Rest', 'Wait',
|
||||
data = credential_typeData,
|
||||
id = credential_typeData.id,
|
||||
form = CredentialTypesForm,
|
||||
master = {},
|
||||
main = {},
|
||||
url = GetBasePath('credential_types');
|
||||
|
||||
init();
|
||||
@ -123,7 +123,7 @@ export default ['Rest', 'Wait',
|
||||
for (fld in form.fields) {
|
||||
if (data[fld] && fld !== 'inputs' || fld !== 'injectors') {
|
||||
$scope[fld] = data[fld];
|
||||
master[fld] = data[fld];
|
||||
main[fld] = data[fld];
|
||||
}
|
||||
|
||||
if (fld === "kind") {
|
||||
@ -131,7 +131,7 @@ export default ['Rest', 'Wait',
|
||||
for (i = 0; i < $scope.credential_kind_options.length; i++) {
|
||||
if ($scope.kind === $scope.credential_kind_options[i].value) {
|
||||
$scope.kind = $scope.credential_kind_options[i];
|
||||
master[fld] = $scope.credential_kind_options[i];
|
||||
main[fld] = $scope.credential_kind_options[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@ -17,7 +17,7 @@ export default ['Rest', 'Wait',
|
||||
data = inventory_scriptData,
|
||||
id = inventory_scriptData.id,
|
||||
form = InventoryScriptsForm,
|
||||
master = {},
|
||||
main = {},
|
||||
url = GetBasePath('inventory_scripts');
|
||||
|
||||
init();
|
||||
@ -35,14 +35,14 @@ export default ['Rest', 'Wait',
|
||||
for (fld in form.fields) {
|
||||
if (data[fld]) {
|
||||
$scope[fld] = data[fld];
|
||||
master[fld] = data[fld];
|
||||
main[fld] = data[fld];
|
||||
}
|
||||
|
||||
if (form.fields[fld].sourceModel && data.summary_fields &&
|
||||
data.summary_fields[form.fields[fld].sourceModel]) {
|
||||
$scope[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
data.summary_fields[form.fields[fld].sourceModel][form.fields[fld].sourceField];
|
||||
master[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
main[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
data.summary_fields[form.fields[fld].sourceModel][form.fields[fld].sourceField];
|
||||
}
|
||||
}
|
||||
|
||||
@ -23,7 +23,7 @@ export default ['Rest', 'Wait',
|
||||
var generator = GenerateForm,
|
||||
id = notification_template.id,
|
||||
form = NotificationsFormObject,
|
||||
master = {},
|
||||
main = {},
|
||||
url = GetBasePath('notification_templates'),
|
||||
defaultMessages = {};
|
||||
|
||||
@ -73,25 +73,25 @@ export default ['Rest', 'Wait',
|
||||
for (fld in form.fields) {
|
||||
if (data[fld]) {
|
||||
$scope[fld] = data[fld];
|
||||
master[fld] = data[fld];
|
||||
main[fld] = data[fld];
|
||||
}
|
||||
|
||||
if(data.notification_configuration.use_ssl === true){
|
||||
$scope.email_options = "use_ssl";
|
||||
master.email_options = "use_ssl";
|
||||
main.email_options = "use_ssl";
|
||||
$scope.use_ssl = true;
|
||||
master.use_ssl = true;
|
||||
main.use_ssl = true;
|
||||
$scope.use_tls = false;
|
||||
master.use_tls = false;
|
||||
main.use_tls = false;
|
||||
}
|
||||
|
||||
if(data.notification_configuration.use_tls === true){
|
||||
$scope.email_options = "use_tls";
|
||||
master.email_options = "use_tls";
|
||||
main.email_options = "use_tls";
|
||||
$scope.use_ssl = false;
|
||||
master.use_ssl = false;
|
||||
main.use_ssl = false;
|
||||
$scope.use_tls = true;
|
||||
master.use_tls = true;
|
||||
main.use_tls = true;
|
||||
}
|
||||
|
||||
if (data.notification_configuration.timeout === null ||
|
||||
@ -101,7 +101,7 @@ export default ['Rest', 'Wait',
|
||||
|
||||
if (data.notification_configuration[fld]) {
|
||||
$scope[fld] = data.notification_configuration[fld];
|
||||
master[fld] = data.notification_configuration[fld];
|
||||
main[fld] = data.notification_configuration[fld];
|
||||
|
||||
if (form.fields[fld].type === 'textarea') {
|
||||
if (form.fields[fld].name === 'headers') {
|
||||
@ -116,7 +116,7 @@ export default ['Rest', 'Wait',
|
||||
data.summary_fields[form.fields[fld].sourceModel]) {
|
||||
$scope[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
data.summary_fields[form.fields[fld].sourceModel][form.fields[fld].sourceField];
|
||||
master[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
main[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
data.summary_fields[form.fields[fld].sourceModel][form.fields[fld].sourceField];
|
||||
}
|
||||
}
|
||||
@ -128,7 +128,7 @@ export default ['Rest', 'Wait',
|
||||
}
|
||||
}
|
||||
|
||||
master.notification_type = $scope.notification_type;
|
||||
main.notification_type = $scope.notification_type;
|
||||
CreateSelect2({
|
||||
element: '#notification_template_notification_type',
|
||||
multiple: false
|
||||
|
||||
@ -14,7 +14,7 @@ export default ['$scope', '$location', '$stateParams', 'isOrgAdmin', 'isNotifica
|
||||
let form = OrganizationForm(),
|
||||
defaultUrl = GetBasePath('organizations'),
|
||||
base = $location.path().replace(/^\//, '').split('/')[0],
|
||||
master = {},
|
||||
main = {},
|
||||
id = $stateParams.organization_id,
|
||||
instance_group_url = defaultUrl + id + '/instance_groups/';
|
||||
|
||||
@ -55,7 +55,7 @@ export default ['$scope', '$location', '$stateParams', 'isOrgAdmin', 'isNotifica
|
||||
for (fld in form.fields) {
|
||||
if (typeof data[fld] !== 'undefined') {
|
||||
$scope[fld] = data[fld];
|
||||
master[fld] = data[fld];
|
||||
main[fld] = data[fld];
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,7 +112,7 @@ export default ['$scope', '$location', '$stateParams', 'isOrgAdmin', 'isNotifica
|
||||
});
|
||||
});
|
||||
$scope.organization_name = $scope.name;
|
||||
master = params;
|
||||
main = params;
|
||||
})
|
||||
.catch(({data, status}) => {
|
||||
ProcessErrors($scope, data, status, OrganizationForm, {
|
||||
@ -166,4 +166,4 @@ export default ['$scope', '$location', '$stateParams', 'isOrgAdmin', 'isNotifica
|
||||
|
||||
};
|
||||
}
|
||||
];
|
||||
];
|
||||
|
||||
@ -15,7 +15,7 @@ export default ['$scope', '$location', '$stateParams', 'GenerateForm',
|
||||
let form = ProjectsForm(),
|
||||
base = $location.path().replace(/^\//, '').split('/')[0],
|
||||
defaultUrl = GetBasePath('projects'),
|
||||
master = {};
|
||||
main = {};
|
||||
|
||||
init();
|
||||
|
||||
@ -46,7 +46,7 @@ export default ['$scope', '$location', '$stateParams', 'GenerateForm',
|
||||
GenerateForm.applyDefaults(form, $scope);
|
||||
}
|
||||
|
||||
GetProjectPath({ scope: $scope, master: master });
|
||||
GetProjectPath({ scope: $scope, main: main });
|
||||
|
||||
if ($scope.removeChoicesReady) {
|
||||
$scope.removeChoicesReady();
|
||||
@ -67,7 +67,7 @@ export default ['$scope', '$location', '$stateParams', 'GenerateForm',
|
||||
});
|
||||
|
||||
$scope.scmRequired = false;
|
||||
master.scm_type = $scope.scm_type;
|
||||
main.scm_type = $scope.scm_type;
|
||||
});
|
||||
|
||||
// Load the list of options for Kind
|
||||
|
||||
@ -17,7 +17,7 @@ export default ['$scope', '$rootScope', '$stateParams', 'ProjectsForm', 'Rest',
|
||||
|
||||
let form = ProjectsForm(),
|
||||
defaultUrl = GetBasePath('projects') + $stateParams.project_id + '/',
|
||||
master = {},
|
||||
main = {},
|
||||
id = $stateParams.project_id;
|
||||
|
||||
$scope.project_local_paths = [];
|
||||
@ -46,7 +46,7 @@ export default ['$scope', '$rootScope', '$stateParams', 'ProjectsForm', 'Rest',
|
||||
$scope.projectLoadedRemove();
|
||||
}
|
||||
$scope.projectLoadedRemove = $scope.$on('projectLoaded', function() {
|
||||
GetProjectPath({ scope: $scope, master: master });
|
||||
GetProjectPath({ scope: $scope, main: main });
|
||||
|
||||
$scope.pathRequired = ($scope.scm_type.value === 'manual') ? true : false;
|
||||
$scope.scmRequired = ($scope.scm_type.value !== 'manual') ? true : false;
|
||||
@ -76,19 +76,19 @@ export default ['$scope', '$rootScope', '$stateParams', 'ProjectsForm', 'Rest',
|
||||
if (form.fields[fld].type === 'checkbox_group') {
|
||||
for (i = 0; i < form.fields[fld].fields.length; i++) {
|
||||
$scope[form.fields[fld].fields[i].name] = data[form.fields[fld].fields[i].name];
|
||||
master[form.fields[fld].fields[i].name] = data[form.fields[fld].fields[i].name];
|
||||
main[form.fields[fld].fields[i].name] = data[form.fields[fld].fields[i].name];
|
||||
}
|
||||
} else {
|
||||
if (data[fld] !== undefined) {
|
||||
$scope[fld] = data[fld];
|
||||
master[fld] = data[fld];
|
||||
main[fld] = data[fld];
|
||||
}
|
||||
}
|
||||
if (form.fields[fld].sourceModel && data.summary_fields &&
|
||||
data.summary_fields[form.fields[fld].sourceModel]) {
|
||||
$scope[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
data.summary_fields[form.fields[fld].sourceModel][form.fields[fld].sourceField];
|
||||
master[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
main[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
data.summary_fields[form.fields[fld].sourceModel][form.fields[fld].sourceField];
|
||||
}
|
||||
}
|
||||
@ -109,7 +109,7 @@ export default ['$scope', '$rootScope', '$stateParams', 'ProjectsForm', 'Rest',
|
||||
$scope.scmRequired = false;
|
||||
}
|
||||
|
||||
master.scm_type = $scope.scm_type;
|
||||
main.scm_type = $scope.scm_type;
|
||||
CreateSelect2({
|
||||
element: '#project_scm_type',
|
||||
multiple: false
|
||||
|
||||
@ -2,7 +2,7 @@ export default
|
||||
function GetProjectPath(i18n, Rest, GetBasePath, ProcessErrors) {
|
||||
return function(params) {
|
||||
var scope = params.scope,
|
||||
master = params.master;
|
||||
main = params.main;
|
||||
|
||||
function arraySort(data) {
|
||||
//Sort nodes by name
|
||||
@ -54,8 +54,8 @@ export default
|
||||
}
|
||||
}
|
||||
scope.base_dir = data.project_base_dir || i18n._('You do not have access to view this property');
|
||||
master.local_path = scope.local_path;
|
||||
master.base_dir = scope.base_dir; // Keep in master object so that it doesn't get
|
||||
main.local_path = scope.local_path;
|
||||
main.base_dir = scope.base_dir; // Keep in main object so that it doesn't get
|
||||
// wiped out on form reset.
|
||||
if (opts.length === 0) {
|
||||
// trigger display of alert block when scm_type == manual
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
let defaultUrl = GetBasePath('job_templates'),
|
||||
form = JobTemplateForm(),
|
||||
generator = GenerateForm,
|
||||
master = {},
|
||||
main = {},
|
||||
selectPlaybook, checkSCMStatus,
|
||||
callback;
|
||||
|
||||
@ -157,14 +157,14 @@
|
||||
|
||||
hashSetup({
|
||||
scope: $scope,
|
||||
master: master,
|
||||
main: main,
|
||||
check_field: 'allow_callbacks',
|
||||
default_val: false
|
||||
});
|
||||
CallbackHelpInit({ scope: $scope });
|
||||
// set initial vals for webhook checkbox
|
||||
$scope.enable_webhook = false;
|
||||
master.enable_webhook = false;
|
||||
main.enable_webhook = false;
|
||||
|
||||
$scope.surveyTooltip = i18n._('Please save before adding a survey to this job template.');
|
||||
|
||||
|
||||
@ -41,7 +41,7 @@ export default
|
||||
let defaultUrl = GetBasePath('job_templates'),
|
||||
generator = GenerateForm,
|
||||
form = JobTemplateForm(),
|
||||
master = {},
|
||||
main = {},
|
||||
id = $stateParams.job_template_id,
|
||||
callback,
|
||||
choicesCount = 0,
|
||||
@ -470,15 +470,15 @@ export default
|
||||
if ($scope.jobTemplateLoadedRemove) {
|
||||
$scope.jobTemplateLoadedRemove();
|
||||
}
|
||||
$scope.jobTemplateLoadedRemove = $scope.$on('jobTemplateLoaded', function (e, masterObject) {
|
||||
$scope.jobTemplateLoadedRemove = $scope.$on('jobTemplateLoaded', function (e, mainObject) {
|
||||
var dft;
|
||||
|
||||
master = masterObject;
|
||||
main = mainObject;
|
||||
|
||||
dft = ($scope.host_config_key === "" || $scope.host_config_key === null) ? false : true;
|
||||
hashSetup({
|
||||
scope: $scope,
|
||||
master: master,
|
||||
main: main,
|
||||
check_field: 'allow_callbacks',
|
||||
default_val: dft
|
||||
});
|
||||
@ -486,10 +486,10 @@ export default
|
||||
// set initial vals for webhook checkbox
|
||||
if (jobTemplateData.webhook_service) {
|
||||
$scope.enable_webhook = true;
|
||||
master.enable_webhook = true;
|
||||
main.enable_webhook = true;
|
||||
} else {
|
||||
$scope.enable_webhook = false;
|
||||
master.enable_webhook = false;
|
||||
main.enable_webhook = false;
|
||||
}
|
||||
|
||||
ParseTypeChange({
|
||||
@ -540,17 +540,17 @@ export default
|
||||
$scope.survey_exists = true;
|
||||
}
|
||||
}
|
||||
master[fld] = $scope[fld];
|
||||
main[fld] = $scope[fld];
|
||||
}
|
||||
if (fld === 'extra_vars') {
|
||||
// Parse extra_vars, converting to YAML.
|
||||
$scope.extra_vars = ParseVariableString(jobTemplateData.extra_vars);
|
||||
master.extra_vars = $scope.extra_vars;
|
||||
main.extra_vars = $scope.extra_vars;
|
||||
}
|
||||
if (form.fields[fld].type === 'lookup' && jobTemplateData.summary_fields[form.fields[fld].sourceModel]) {
|
||||
$scope[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
jobTemplateData.summary_fields[form.fields[fld].sourceModel][form.fields[fld].sourceField];
|
||||
master[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
main[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField] =
|
||||
$scope[form.fields[fld].sourceModel + '_' + form.fields[fld].sourceField];
|
||||
}
|
||||
if (form.fields[fld].type === 'checkbox_group') {
|
||||
@ -565,44 +565,44 @@ export default
|
||||
$scope.survey_enabled = jobTemplateData.survey_enabled;
|
||||
|
||||
$scope.ask_variables_on_launch = (jobTemplateData.ask_variables_on_launch) ? true : false;
|
||||
master.ask_variables_on_launch = $scope.ask_variables_on_launch;
|
||||
main.ask_variables_on_launch = $scope.ask_variables_on_launch;
|
||||
|
||||
$scope.ask_verbosity_on_launch = (jobTemplateData.ask_verbosity_on_launch) ? true : false;
|
||||
master.ask_verbosity_on_launch = $scope.ask_verbosity_on_launch;
|
||||
main.ask_verbosity_on_launch = $scope.ask_verbosity_on_launch;
|
||||
|
||||
$scope.ask_limit_on_launch = (jobTemplateData.ask_limit_on_launch) ? true : false;
|
||||
master.ask_limit_on_launch = $scope.ask_limit_on_launch;
|
||||
main.ask_limit_on_launch = $scope.ask_limit_on_launch;
|
||||
|
||||
$scope.ask_tags_on_launch = (jobTemplateData.ask_tags_on_launch) ? true : false;
|
||||
master.ask_tags_on_launch = $scope.ask_tags_on_launch;
|
||||
main.ask_tags_on_launch = $scope.ask_tags_on_launch;
|
||||
|
||||
$scope.ask_skip_tags_on_launch = (jobTemplateData.ask_skip_tags_on_launch) ? true : false;
|
||||
master.ask_skip_tags_on_launch = $scope.ask_skip_tags_on_launch;
|
||||
main.ask_skip_tags_on_launch = $scope.ask_skip_tags_on_launch;
|
||||
|
||||
$scope.ask_diff_mode_on_launch = (jobTemplateData.ask_diff_mode_on_launch) ? true : false;
|
||||
master.ask_diff_mode_on_launch = $scope.ask_diff_mode_on_launch;
|
||||
main.ask_diff_mode_on_launch = $scope.ask_diff_mode_on_launch;
|
||||
|
||||
$scope.ask_scm_branch_on_launch = (jobTemplateData.ask_scm_branch_on_launch) ? true : false;
|
||||
master.ask_scm_branch_on_launch = $scope.ask_scm_branch_on_launch;
|
||||
main.ask_scm_branch_on_launch = $scope.ask_scm_branch_on_launch;
|
||||
|
||||
$scope.job_tag_options = (jobTemplateData.job_tags) ? jobTemplateData.job_tags.split(',')
|
||||
.map((i) => ({name: i, label: i, value: i})) : [];
|
||||
$scope.job_tags = $scope.job_tag_options;
|
||||
master.job_tags = $scope.job_tags;
|
||||
main.job_tags = $scope.job_tags;
|
||||
|
||||
$scope.skip_tag_options = (jobTemplateData.skip_tags) ? jobTemplateData.skip_tags.split(',')
|
||||
.map((i) => ({name: i, label: i, value: i})) : [];
|
||||
$scope.skip_tags = $scope.skip_tag_options;
|
||||
master.skip_tags = $scope.skip_tags;
|
||||
main.skip_tags = $scope.skip_tags;
|
||||
|
||||
$scope.ask_job_type_on_launch = (jobTemplateData.ask_job_type_on_launch) ? true : false;
|
||||
master.ask_job_type_on_launch = $scope.ask_job_type_on_launch;
|
||||
main.ask_job_type_on_launch = $scope.ask_job_type_on_launch;
|
||||
|
||||
$scope.ask_inventory_on_launch = (jobTemplateData.ask_inventory_on_launch) ? true : false;
|
||||
master.ask_inventory_on_launch = $scope.ask_inventory_on_launch;
|
||||
main.ask_inventory_on_launch = $scope.ask_inventory_on_launch;
|
||||
|
||||
$scope.ask_credential_on_launch = (jobTemplateData.ask_credential_on_launch) ? true : false;
|
||||
master.ask_credential_on_launch = $scope.ask_credential_on_launch;
|
||||
main.ask_credential_on_launch = $scope.ask_credential_on_launch;
|
||||
|
||||
if (jobTemplateData.host_config_key) {
|
||||
$scope.example_config_key = jobTemplateData.host_config_key;
|
||||
@ -612,7 +612,7 @@ export default
|
||||
|
||||
$scope.callback_url = $scope.callback_server_path + ((jobTemplateData.related.callback) ? jobTemplateData.related.callback :
|
||||
GetBasePath('job_templates') + id + '/callback/');
|
||||
master.callback_url = $scope.callback_url;
|
||||
main.callback_url = $scope.callback_url;
|
||||
|
||||
$scope.can_edit = jobTemplateData.summary_fields.user_capabilities.edit;
|
||||
|
||||
@ -645,7 +645,7 @@ export default
|
||||
$q.all(multiCredentialPromises)
|
||||
.then(() => {
|
||||
$scope.multiCredential = multiCredential;
|
||||
$scope.$emit('jobTemplateLoaded', master);
|
||||
$scope.$emit('jobTemplateLoaded', main);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -2,12 +2,12 @@ export default
|
||||
function hashSetup() {
|
||||
return function(params) {
|
||||
var scope = params.scope,
|
||||
master = params.master,
|
||||
main = params.main,
|
||||
check_field = params.check_field,
|
||||
default_val = params.default_val;
|
||||
|
||||
scope[check_field] = default_val;
|
||||
master[check_field] = default_val;
|
||||
main[check_field] = default_val;
|
||||
|
||||
// Original gist here: https://gist.github.com/jed/982883
|
||||
scope.genHash = function (fld) {
|
||||
|
||||
@ -24,7 +24,7 @@ export default ['$scope', '$rootScope', '$stateParams', 'UserForm', 'Rest',
|
||||
|
||||
const { me } = models;
|
||||
var form = UserForm,
|
||||
master = {},
|
||||
main = {},
|
||||
id = $stateParams.user_id,
|
||||
defaultUrl = GetBasePath('users') + id,
|
||||
user_obj = resourceData.data;
|
||||
@ -50,7 +50,7 @@ export default ['$scope', '$rootScope', '$stateParams', 'UserForm', 'Rest',
|
||||
$scope.user_id = id;
|
||||
$scope.ldap_user = (user_obj.ldap_dn !== null && user_obj.ldap_dn !== undefined && user_obj.ldap_dn !== '') ? true : false;
|
||||
$scope.not_ldap_user = !$scope.ldap_user;
|
||||
master.ldap_user = $scope.ldap_user;
|
||||
main.ldap_user = $scope.ldap_user;
|
||||
$scope.socialAuthUser = (user_obj.auth.length > 0) ? true : false;
|
||||
$scope.last_login = user_obj.last_login;
|
||||
$scope.external_account = user_obj.external_account;
|
||||
|
||||
@ -10,7 +10,7 @@ const initializeData = () => {
|
||||
data.ORGANIZATION_NAME = `organization-${id}`;
|
||||
data.PROJECT_NAME = `project-${id}`;
|
||||
data.PROJECT_URL = 'https://github.com/ansible/test-playbooks';
|
||||
data.PROJECT_BRANCH = 'master';
|
||||
data.PROJECT_BRANCH = 'devel';
|
||||
data.PLAYBOOK_NAME = 'multivault.yml';
|
||||
data.TEMPLATE_NAME = `template-${id}`;
|
||||
data.VAULT_CREDENTIAL_NAME_1 = `credential-vault-${id}-1`;
|
||||
|
||||
@ -11,7 +11,7 @@ This IP address can be found by making a GET to any page on the browsable API an
|
||||
awx_1 | 14:42:08 uwsgi.1 | 172.18.0.1 GET /api/v2/tokens/ - HTTP/1.1 200
|
||||
```
|
||||
|
||||
Whitelist this IP address by adding it to the `INTERNAL_IPS` variable in `local_settings`, then navigate to the API and you should see DDT on the
|
||||
Allow this IP address by adding it to the `INTERNAL_IPS` variable in `local_settings`, then navigate to the API and you should see DDT on the
|
||||
right side. If you don't see it, make sure to set `DEBUG=True`.
|
||||
> Note that enabling DDT is detrimental to the performance of AWX and adds overhead to every API request. It is
|
||||
recommended to keep this turned off when you are not using it.
|
||||
|
||||
@ -32,7 +32,7 @@ The standard pattern applies to the following fields:
|
||||
- `ask_inventory_on_launch` allows use of `inventory`
|
||||
|
||||
Surveys are a special-case of prompting for variables - applying a survey to
|
||||
a template whitelists variable names in the survey spec (requires the survey
|
||||
a template allows variable names in the survey spec (requires the survey
|
||||
spec to exist and `survey_enabled` to be true). On the other hand,
|
||||
if `ask_variables_on_launch` is true, users can provide any variables in
|
||||
`extra_vars`.
|
||||
|
||||
@ -132,7 +132,7 @@ user performing the copy, and `new_objs`, a list of all sub objects of the creat
|
||||
`user`'s permission against these new sub objects and unlink related objects or send
|
||||
warning logs as necessary. `deep_copy_permission_check_func` should not return anything.
|
||||
|
||||
Lastly, macro `REENCRYPTION_BLACKLIST_AT_COPY` is available as part of a model definition. It is a
|
||||
Lastly, macro `REENCRYPTION_BLOCKLIST_AT_COPY` is available as part of a model definition. It is a
|
||||
list of field names which will escape re-encryption during copy. For example, the `extra_data` field
|
||||
of workflow job template nodes.
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user