Merge branch 'hardening' into devel

This commit is contained in:
Ryan Petrello 2019-10-16 13:15:20 -04:00
commit c7be94c2f2
No known key found for this signature in database
GPG Key ID: F2AA5F2122351777
34 changed files with 422 additions and 134 deletions

View File

@ -400,7 +400,7 @@ flake8_collection:
test_collection_all: prepare_collection_venv test_collection flake8_collection
build_collection:
ansible-playbook -i localhost, awx_collection/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e namespace_name=$(COLLECTION_NAMESPACE) -e package_version=$(VERSION)
ansible-playbook -i localhost, awx_collection/template_galaxy.yml -e collection_package=$(COLLECTION_PACKAGE) -e collection_namespace=$(COLLECTION_NAMESPACE) -e collection_version=$(VERSION)
ansible-galaxy collection build awx_collection --output-path=awx_collection
test_unit:

View File

@ -82,6 +82,16 @@ def find_commands(management_dir):
return commands
def oauth2_getattribute(self, attr):
# Custom method to override
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
from django.conf import settings
val = settings.OAUTH2_PROVIDER.get(attr)
if val is None:
val = object.__getattribute__(self, attr)
return val
def prepare_env():
# Update the default settings environment variable based on current mode.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
@ -93,6 +103,12 @@ def prepare_env():
# Monkeypatch Django find_commands to also work with .pyc files.
import django.core.management
django.core.management.find_commands = find_commands
# Monkeypatch Oauth2 toolkit settings class to check for settings
# in django.conf settings each time, not just once during import
import oauth2_provider.settings
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
# database settings to use when management command is run as an external
# program via unit tests.

View File

@ -126,7 +126,7 @@ class FieldLookupBackend(BaseFilterBackend):
'''
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by',
'search', 'type', 'host_filter', 'count_disabled',)
'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
SUPPORTED_LOOKUPS = ('exact', 'iexact', 'contains', 'icontains',
'startswith', 'istartswith', 'endswith', 'iendswith',

View File

@ -45,7 +45,6 @@ from polymorphic.models import PolymorphicModel
from awx.main.access import get_user_capabilities
from awx.main.constants import (
SCHEDULEABLE_PROVIDERS,
ANSI_SGR_PATTERN,
ACTIVE_STATES,
CENSOR_VALUE,
)
@ -70,7 +69,8 @@ from awx.main.utils import (
get_type_for_model, get_model_for_type,
camelcase_to_underscore, getattrd, parse_yaml_or_json,
has_model_field_prefetched, extract_ansible_vars, encrypt_dict,
prefetch_page_capabilities, get_external_account)
prefetch_page_capabilities, get_external_account, truncate_stdout,
)
from awx.main.utils.filters import SmartFilter
from awx.main.redact import UriCleaner, REPLACE_STR
@ -140,6 +140,7 @@ SUMMARIZABLE_FK_FIELDS = {
'source_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
'target_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
'webhook_credential': DEFAULT_SUMMARY_FIELDS,
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
}
@ -3501,6 +3502,8 @@ class WorkflowApprovalSerializer(UnifiedJobSerializer):
kwargs={'pk': obj.workflow_approval_template.pk})
res['approve'] = self.reverse('api:workflow_approval_approve', kwargs={'pk': obj.pk})
res['deny'] = self.reverse('api:workflow_approval_deny', kwargs={'pk': obj.pk})
if obj.approved_or_denied_by:
res['approved_or_denied_by'] = self.reverse('api:user_detail', kwargs={'pk': obj.approved_or_denied_by.pk})
return res
@ -3851,25 +3854,17 @@ class JobEventSerializer(BaseSerializer):
return d
def to_representation(self, obj):
ret = super(JobEventSerializer, self).to_representation(obj)
# Show full stdout for event detail view, truncate only for list view.
if hasattr(self.context.get('view', None), 'retrieve'):
return ret
data = super(JobEventSerializer, self).to_representation(obj)
# Show full stdout for playbook_on_* events.
if obj and obj.event.startswith('playbook_on'):
return ret
return data
# If the view logic says to not trunctate (request was to the detail view or a param was used)
if self.context.get('no_truncate', False):
return data
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
if max_bytes > 0 and 'stdout' in ret and len(ret['stdout']) >= max_bytes:
ret['stdout'] = ret['stdout'][:(max_bytes - 1)] + u'\u2026'
set_count = 0
reset_count = 0
for m in ANSI_SGR_PATTERN.finditer(ret['stdout']):
if m.string[m.start():m.end()] == u'\u001b[0m':
reset_count += 1
else:
set_count += 1
ret['stdout'] += u'\u001b[0m' * (set_count - reset_count)
return ret
if 'stdout' in data:
data['stdout'] = truncate_stdout(data['stdout'], max_bytes)
return data
class JobEventWebSocketSerializer(JobEventSerializer):
@ -3964,22 +3959,14 @@ class AdHocCommandEventSerializer(BaseSerializer):
return res
def to_representation(self, obj):
ret = super(AdHocCommandEventSerializer, self).to_representation(obj)
# Show full stdout for event detail view, truncate only for list view.
if hasattr(self.context.get('view', None), 'retrieve'):
return ret
data = super(AdHocCommandEventSerializer, self).to_representation(obj)
# If the view logic says to not trunctate (request was to the detail view or a param was used)
if self.context.get('no_truncate', False):
return data
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
if max_bytes > 0 and 'stdout' in ret and len(ret['stdout']) >= max_bytes:
ret['stdout'] = ret['stdout'][:(max_bytes - 1)] + u'\u2026'
set_count = 0
reset_count = 0
for m in ANSI_SGR_PATTERN.finditer(ret['stdout']):
if m.string[m.start():m.end()] == u'\u001b[0m':
reset_count += 1
else:
set_count += 1
ret['stdout'] += u'\u001b[0m' * (set_count - reset_count)
return ret
if 'stdout' in data:
data['stdout'] = truncate_stdout(data['stdout'], max_bytes)
return data
class AdHocCommandEventWebSocketSerializer(AdHocCommandEventSerializer):

View File

@ -3768,12 +3768,23 @@ class JobEventList(ListAPIView):
serializer_class = serializers.JobEventSerializer
search_fields = ('stdout',)
def get_serializer_context(self):
context = super().get_serializer_context()
if self.request.query_params.get('no_truncate'):
context.update(no_truncate=True)
return context
class JobEventDetail(RetrieveAPIView):
model = models.JobEvent
serializer_class = serializers.JobEventSerializer
def get_serializer_context(self):
context = super().get_serializer_context()
context.update(no_truncate=True)
return context
class JobEventChildrenList(SubListAPIView):
@ -4002,12 +4013,23 @@ class AdHocCommandEventList(ListAPIView):
serializer_class = serializers.AdHocCommandEventSerializer
search_fields = ('stdout',)
def get_serializer_context(self):
context = super().get_serializer_context()
if self.request.query_params.get('no_truncate'):
context.update(no_truncate=True)
return context
class AdHocCommandEventDetail(RetrieveAPIView):
model = models.AdHocCommandEvent
serializer_class = serializers.AdHocCommandEventSerializer
def get_serializer_context(self):
context = super().get_serializer_context()
context.update(no_truncate=True)
return context
class BaseAdHocCommandEventsList(SubListAPIView):

View File

@ -70,12 +70,16 @@ class InventoryUpdateEventsList(SubListAPIView):
class InventoryScriptList(ListCreateAPIView):
deprecated = True
model = CustomInventoryScript
serializer_class = CustomInventoryScriptSerializer
class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
deprecated = True
model = CustomInventoryScript
serializer_class = CustomInventoryScriptSerializer
@ -92,6 +96,8 @@ class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
class InventoryScriptObjectRolesList(SubListAPIView):
deprecated = True
model = Role
serializer_class = RoleSerializer
parent_model = CustomInventoryScript
@ -105,6 +111,8 @@ class InventoryScriptObjectRolesList(SubListAPIView):
class InventoryScriptCopy(CopyAPIView):
deprecated = True
model = CustomInventoryScript
copy_return_serializer_class = CustomInventoryScriptSerializer

View File

@ -755,7 +755,7 @@ register(
allow_null=True,
default=False,
label=_('Enabled external log aggregation auditing'),
help_text=_('When enabled, all external logs emitted by Tower will also be written to /var/log/tower/external.log'),
help_text=_('When enabled, all external logs emitted by Tower will also be written to /var/log/tower/external.log. This is an experimental setting intended to be used for debugging external log aggregation issues (and may be subject to change in the future).'), # noqa
category=_('Logging'),
category_slug='logging',
)

View File

@ -0,0 +1,21 @@
# Generated by Django 2.2.4 on 2019-10-11 15:40
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main', '0096_v360_container_groups'),
]
operations = [
migrations.AddField(
model_name='workflowapproval',
name='approved_or_denied_by',
field=models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'workflowapproval', 'model_name': 'workflowapproval', 'app_label': 'main'}(class)s_approved+", to=settings.AUTH_USER_MODEL),
),
]

View File

@ -119,10 +119,11 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
tzinfo = r._dtstart.tzinfo
if tzinfo is utc:
return 'UTC'
fname = tzinfo._filename
for zone in all_zones:
if fname.endswith(zone):
return zone
fname = getattr(tzinfo, '_filename', None)
if fname:
for zone in all_zones:
if fname.endswith(zone):
return zone
logger.warn('Could not detect valid zoneinfo for {}'.format(self.rrule))
return ''

View File

@ -13,6 +13,9 @@ from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ObjectDoesNotExist
#from django import settings as tower_settings
# Django-CRUM
from crum import get_current_user
# AWX
from awx.api.versioning import reverse
from awx.main.models import (prevent_search, accepts_json, UnifiedJobTemplate,
@ -690,6 +693,14 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
default=False,
help_text=_("Shows when an approval node (with a timeout assigned to it) has timed out.")
)
approved_or_denied_by = models.ForeignKey(
'auth.User',
related_name='%s(class)s_approved+',
default=None,
null=True,
editable=False,
on_delete=models.SET_NULL,
)
@classmethod
@ -711,6 +722,7 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
def approve(self, request=None):
self.status = 'successful'
self.approved_or_denied_by = get_current_user()
self.save()
self.send_approval_notification('approved')
self.websocket_emit_status(self.status)
@ -719,6 +731,7 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
def deny(self, request=None):
self.status = 'failed'
self.approved_or_denied_by = get_current_user()
self.save()
self.send_approval_notification('denied')
self.websocket_emit_status(self.status)

View File

@ -3,6 +3,7 @@ import stat
import time
import yaml
import tempfile
import logging
from base64 import b64encode
from django.conf import settings
@ -11,6 +12,8 @@ from django.utils.functional import cached_property
from awx.main.utils.common import parse_yaml_or_json
logger = logging.getLogger('awx.main.scheduler')
class PodManager(object):
@ -21,32 +24,33 @@ class PodManager(object):
if not self.credential.kubernetes:
raise RuntimeError('Pod deployment cannot occur without a Kubernetes credential')
self.kube_api.create_namespaced_pod(body=self.pod_definition,
namespace=self.namespace,
_request_timeout=settings.AWX_CONTAINER_GROUP_DEFAULT_LAUNCH_TIMEOUT)
_request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
# We don't do any fancy timeout logic here because it is handled
# at a higher level in the job spawning process. See
# settings.AWX_ISOLATED_LAUNCH_TIMEOUT and settings.AWX_ISOLATED_CONNECTION_TIMEOUT
while True:
num_retries = settings.AWX_CONTAINER_GROUP_POD_LAUNCH_RETRIES
for retry_attempt in range(num_retries - 1):
logger.debug(f"Checking for pod {self.pod_name}. Attempt {retry_attempt + 1} of {num_retries}")
pod = self.kube_api.read_namespaced_pod(name=self.pod_name,
namespace=self.namespace,
_request_timeout=settings.AWX_CONTAINER_GROUP_DEFAULT_LAUNCH_TIMEOUT)
_request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
if pod.status.phase != 'Pending':
break
time.sleep(1)
else:
logger.debug(f"Pod {self.pod_name} is Pending.")
time.sleep(settings.AWX_CONTAINER_GROUP_POD_LAUNCH_RETRY_DELAY)
continue
if pod.status.phase == 'Running':
logger.debug(f"Pod {self.pod_name} is online.")
return pod
else:
raise RuntimeError(f"Unhandled Pod phase: {pod.status.phase}")
logger.warn(f"Pod {self.pod_name} did not start. Status is {pod.status.phase}.")
def delete(self):
return self.kube_api.delete_namespaced_pod(name=self.pod_name,
namespace=self.namespace,
_request_timeout=settings.AWX_CONTAINER_GROUP_DEFAULT_LAUNCH_TIMEOUT)
_request_timeout=settings.AWX_CONTAINER_GROUP_K8S_API_TIMEOUT)
@property
def namespace(self):

View File

@ -40,6 +40,9 @@ from django.utils.translation import ugettext_lazy as _
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
# Kubernetes
from kubernetes.client.rest import ApiException
# Django-CRUM
from crum import impersonate
@ -73,6 +76,7 @@ from awx.main.utils import (get_ssh_version, update_scm_url,
ignore_inventory_computed_fields,
ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager,
get_awx_version)
from awx.main.utils.ansible import read_ansible_config
from awx.main.utils.common import get_ansible_version, _get_ansible_version, get_custom_venv_choices
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
from awx.main.utils.reload import stop_local_services
@ -1183,6 +1187,18 @@ class BaseTask(object):
'''
Run the job/task and capture its output.
'''
self.instance = self.model.objects.get(pk=pk)
containerized = self.instance.is_containerized
pod_manager = None
if containerized:
# Here we are trying to launch a pod before transitioning the job into a running
# state. For some scenarios (like waiting for resources to become available) we do this
# rather than marking the job as error or failed. This is not always desirable. Cases
# such as invalid authentication should surface as an error.
pod_manager = self.deploy_container_group_pod(self.instance)
if not pod_manager:
return
# self.instance because of the update_model pattern and when it's used in callback handlers
self.instance = self.update_model(pk, status='running',
start_args='') # blank field to remove encrypted passwords
@ -1208,7 +1224,6 @@ class BaseTask(object):
try:
isolated = self.instance.is_isolated()
containerized = self.instance.is_containerized
self.instance.send_notification_templates("running")
private_data_dir = self.build_private_data_dir(self.instance)
self.pre_run_hook(self.instance, private_data_dir)
@ -1287,6 +1302,10 @@ class BaseTask(object):
},
}
if containerized:
# We don't want HOME passed through to container groups.
params['envvars'].pop('HOME')
if isinstance(self.instance, AdHocCommand):
params['module'] = self.build_module_name(self.instance)
params['module_args'] = self.build_module_args(self.instance)
@ -1316,16 +1335,6 @@ class BaseTask(object):
params.pop('inventory'),
os.path.join(private_data_dir, 'inventory')
)
pod_manager = None
if containerized:
from awx.main.scheduler.kubernetes import PodManager # Avoid circular import
params['envvars'].pop('HOME')
pod_manager = PodManager(self.instance)
self.cleanup_paths.append(pod_manager.kube_config)
pod_manager.deploy()
self.instance.execution_node = pod_manager.pod_name
self.instance.save(update_fields=['execution_node'])
ansible_runner.utils.dump_artifacts(params)
isolated_manager_instance = isolated_manager.IsolatedManager(
@ -1385,6 +1394,42 @@ class BaseTask(object):
raise AwxTaskError.TaskError(self.instance, rc)
def deploy_container_group_pod(self, task):
from awx.main.scheduler.kubernetes import PodManager # Avoid circular import
pod_manager = PodManager(self.instance)
self.cleanup_paths.append(pod_manager.kube_config)
try:
log_name = task.log_format
logger.debug(f"Launching pod for {log_name}.")
pod_manager.deploy()
except (ApiException, Exception) as exc:
if isinstance(exc, ApiException) and exc.status == 403:
try:
if 'exceeded quota' in json.loads(exc.body)['message']:
# If the k8s cluster does not have capacity, we move the
# job back into pending and wait until the next run of
# the task manager. This does not exactly play well with
# our current instance group precendence logic, since it
# will just sit here forever if kubernetes returns this
# error.
logger.warn(exc.body)
logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.")
self.update_model(task.pk, status='pending')
return
except Exception:
logger.exception(f"Unable to handle response from Kubernetes API for {log_name}.")
logger.exception(f"Error when launching pod for {log_name}")
self.update_model(task.pk, status='error', result_traceback=traceback.format_exc())
return
self.update_model(task.pk, execution_node=pod_manager.pod_name)
return pod_manager
@task()
class RunJob(BaseTask):
'''
@ -1529,14 +1574,22 @@ class RunJob(BaseTask):
if authorize:
env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='')
for env_key, folder, default in (
('ANSIBLE_COLLECTIONS_PATHS', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
('ANSIBLE_ROLES_PATH', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles')):
path_vars = (
('ANSIBLE_COLLECTIONS_PATHS', 'collections_paths', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
('ANSIBLE_ROLES_PATH', 'roles_path', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles'))
config_values = read_ansible_config(job.project.get_project_path(), list(map(lambda x: x[1], path_vars)))
for env_key, config_setting, folder, default in path_vars:
paths = default.split(':')
if env_key in env:
for path in env[env_key].split(':'):
if path not in paths:
paths = [env[env_key]] + paths
elif config_setting in config_values:
for path in config_values[config_setting].split(':'):
if path not in paths:
paths = [config_values[config_setting]] + paths
paths = [os.path.join(private_data_dir, folder)] + paths
env[env_key] = os.pathsep.join(paths)
@ -1790,7 +1843,10 @@ class RunJob(BaseTask):
if job.is_containerized:
from awx.main.scheduler.kubernetes import PodManager # prevent circular import
PodManager(job).delete()
pm = PodManager(job)
logger.debug(f"Deleting pod {pm.pod_name}")
pm.delete()
try:
inventory = job.inventory

View File

@ -368,6 +368,7 @@ class TestGenericRun():
task = tasks.RunJob()
task.update_model = mock.Mock(return_value=job)
task.model.objects.get = mock.Mock(return_value=job)
task.build_private_data_files = mock.Mock(side_effect=OSError())
with mock.patch('awx.main.tasks.copy_tree'):
@ -387,6 +388,7 @@ class TestGenericRun():
task = tasks.RunJob()
task.update_model = mock.Mock(wraps=update_model_wrapper)
task.model.objects.get = mock.Mock(return_value=job)
task.build_private_data_files = mock.Mock()
with mock.patch('awx.main.tasks.copy_tree'):
@ -578,6 +580,7 @@ class TestAdhocRun(TestJobExecution):
task = tasks.RunAdHocCommand()
task.update_model = mock.Mock(wraps=adhoc_update_model_wrapper)
task.model.objects.get = mock.Mock(return_value=adhoc_job)
task.build_inventory = mock.Mock()
with pytest.raises(Exception):

View File

@ -5,11 +5,15 @@
import codecs
import re
import os
import logging
from itertools import islice
from configparser import ConfigParser
# Django
from django.utils.encoding import smart_str
logger = logging.getLogger('awx.main.utils.ansible')
__all__ = ['skip_directory', 'could_be_playbook', 'could_be_inventory']
@ -97,3 +101,20 @@ def could_be_inventory(project_path, dir_path, filename):
except IOError:
return None
return inventory_rel_path
def read_ansible_config(project_path, variables_of_interest):
fnames = ['/etc/ansible/ansible.cfg']
if project_path:
fnames.insert(0, os.path.join(project_path, 'ansible.cfg'))
values = {}
try:
parser = ConfigParser()
parser.read(fnames)
if 'defaults' in parser:
for var in variables_of_interest:
if var in parser['defaults']:
values[var] = parser['defaults'][var]
except Exception:
logger.exception('Failed to read ansible configuration(s) {}'.format(fnames))
return values

View File

@ -38,18 +38,22 @@ from django.apps import apps
logger = logging.getLogger('awx.main.utils')
__all__ = ['get_object_or_400', 'camelcase_to_underscore', 'underscore_to_camelcase', 'memoize', 'memoize_delete',
'get_ansible_version', 'get_ssh_version', 'get_licenser', 'get_awx_version', 'update_scm_url',
'get_type_for_model', 'get_model_for_type', 'copy_model_by_class', 'region_sorting',
'copy_m2m_relationships', 'prefetch_page_capabilities', 'to_python_boolean',
'ignore_inventory_computed_fields', 'ignore_inventory_group_removal',
'_inventory_updates', 'get_pk_from_dict', 'getattrd', 'getattr_dne', 'NoDefaultProvided',
'get_current_apps', 'set_current_apps',
'extract_ansible_vars', 'get_search_fields', 'get_system_task_capacity', 'get_cpu_capacity', 'get_mem_capacity',
'wrap_args_with_proot', 'build_proot_temp_dir', 'check_proot_installed', 'model_to_dict',
'NullablePromptPseudoField', 'model_instance_diff', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError', 'get_custom_venv_choices', 'get_external_account',
'task_manager_bulk_reschedule', 'schedule_task_manager', 'classproperty', 'create_temporary_fifo']
__all__ = [
'get_object_or_400', 'camelcase_to_underscore', 'underscore_to_camelcase', 'memoize',
'memoize_delete', 'get_ansible_version', 'get_ssh_version', 'get_licenser',
'get_awx_version', 'update_scm_url', 'get_type_for_model', 'get_model_for_type',
'copy_model_by_class', 'region_sorting', 'copy_m2m_relationships',
'prefetch_page_capabilities', 'to_python_boolean', 'ignore_inventory_computed_fields',
'ignore_inventory_group_removal', '_inventory_updates', 'get_pk_from_dict', 'getattrd',
'getattr_dne', 'NoDefaultProvided', 'get_current_apps', 'set_current_apps',
'extract_ansible_vars', 'get_search_fields', 'get_system_task_capacity',
'get_cpu_capacity', 'get_mem_capacity', 'wrap_args_with_proot', 'build_proot_temp_dir',
'check_proot_installed', 'model_to_dict', 'NullablePromptPseudoField',
'model_instance_diff', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError',
'get_custom_venv_choices', 'get_external_account', 'task_manager_bulk_reschedule',
'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout',
]
def get_object_or_400(klass, *args, **kwargs):
@ -1088,3 +1092,19 @@ def create_temporary_fifo(data):
).start()
return path
def truncate_stdout(stdout, size):
from awx.main.constants import ANSI_SGR_PATTERN
if size <= 0 or len(stdout) <= size:
return stdout
stdout = stdout[:(size - 1)] + u'\u2026'
set_count, reset_count = 0, 0
for m in ANSI_SGR_PATTERN.finditer(stdout):
if m.group() == u'\u001b[0m':
reset_count += 1
else:
set_count += 1
return stdout + u'\u001b[0m' * (set_count - reset_count)

View File

@ -39,8 +39,9 @@ import uuid
from time import time
from jinja2 import Environment
from six import integer_types, PY3
from six.moves import configparser
from ansible.module_utils.six import integer_types, PY3
from ansible.module_utils.six.moves import configparser
try:
import argparse
@ -152,7 +153,7 @@ class VMWareInventory(object):
try:
text = str(text)
except UnicodeEncodeError:
text = text.encode('ascii', 'ignore')
text = text.encode('utf-8')
print('%s %s' % (datetime.datetime.now(), text))
def show(self):
@ -186,14 +187,14 @@ class VMWareInventory(object):
def write_to_cache(self, data):
''' Dump inventory to json file '''
with open(self.cache_path_cache, 'wb') as f:
f.write(json.dumps(data))
with open(self.cache_path_cache, 'w') as f:
f.write(json.dumps(data, indent=2))
def get_inventory_from_cache(self):
''' Read in jsonified inventory '''
jdata = None
with open(self.cache_path_cache, 'rb') as f:
with open(self.cache_path_cache, 'r') as f:
jdata = f.read()
return json.loads(jdata)
@ -343,10 +344,22 @@ class VMWareInventory(object):
'pwd': self.password,
'port': int(self.port)}
if hasattr(ssl, 'SSLContext') and not self.validate_certs:
if self.validate_certs and hasattr(ssl, 'SSLContext'):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
kwargs['sslContext'] = context
elif self.validate_certs and not hasattr(ssl, 'SSLContext'):
sys.exit('pyVim does not support changing verification mode with python < 2.7.9. Either update '
'python or use validate_certs=false.')
elif not self.validate_certs and hasattr(ssl, 'SSLContext'):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
context.check_hostname = False
kwargs['sslContext'] = context
elif not self.validate_certs and not hasattr(ssl, 'SSLContext'):
# Python 2.7.9 < or RHEL/CentOS 7.4 <
pass
return self._get_instances(kwargs)
@ -390,7 +403,7 @@ class VMWareInventory(object):
instances = [x for x in instances if x.name == self.args.host]
instance_tuples = []
for instance in sorted(instances):
for instance in instances:
if self.guest_props:
ifacts = self.facts_from_proplist(instance)
else:
@ -614,7 +627,14 @@ class VMWareInventory(object):
lastref = lastref[x]
else:
lastref[x] = val
if self.args.debug:
self.debugl("For %s" % vm.name)
for key in list(rdata.keys()):
if isinstance(rdata[key], dict):
for ikey in list(rdata[key].keys()):
self.debugl("Property '%s.%s' has value '%s'" % (key, ikey, rdata[key][ikey]))
else:
self.debugl("Property '%s' has value '%s'" % (key, rdata[key]))
return rdata
def facts_from_vobj(self, vobj, level=0):
@ -685,7 +705,7 @@ class VMWareInventory(object):
if vobj.isalnum():
rdata = vobj
else:
rdata = vobj.decode('ascii', 'ignore')
rdata = vobj.encode('utf-8').decode('utf-8')
elif issubclass(type(vobj), bool) or isinstance(vobj, bool):
rdata = vobj
elif issubclass(type(vobj), integer_types) or isinstance(vobj, integer_types):

View File

@ -67,7 +67,9 @@ DATABASES = {
}
}
AWX_CONTAINER_GROUP_DEFAULT_LAUNCH_TIMEOUT = 10
AWX_CONTAINER_GROUP_K8S_API_TIMEOUT = 10
AWX_CONTAINER_GROUP_POD_LAUNCH_RETRIES = 100
AWX_CONTAINER_GROUP_POD_LAUNCH_RETRY_DELAY = 5
AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE = 'default'
AWX_CONTAINER_GROUP_DEFAULT_IMAGE = 'ansible/ansible-runner'

View File

@ -372,9 +372,7 @@ table, tbody {
.List-noItems {
margin-top: 52px;
display: flex;
align-items: center;
justify-content: center;
display: inline-block;
width: 100%;
height: 200px;
border-radius: 5px;
@ -383,7 +381,7 @@ table, tbody {
color: @list-no-items-txt;
text-transform: uppercase;
text-align: center;
padding: 10px;
padding: 80px 10px;
}
.modal-body > .List-noItems {

View File

@ -52,7 +52,6 @@
height: calc(~"100vh - 80px");
}
@media screen and (min-width: 768px){
.NetworkingExtraVars .modal-dialog{
width: 700px;

View File

@ -20,16 +20,18 @@ function AtTabController ($state) {
group.register(scope);
};
vm.go = () => {
vm.handleClick = () => {
if (scope.state._disabled || scope.state._active) {
return;
}
if (scope.state._go) {
$state.go(scope.state._go, scope.state._params, { reload: true });
} else {
group.clearActive();
scope.state._active = true;
return;
}
group.clearActive();
scope.state._active = true;
if (scope.state._onClickActivate) {
scope.state._onClickActivate();
}
};
}

View File

@ -2,6 +2,6 @@
ng-attr-disabled="{{ state._disabled || undefined }}"
ng-class="{ 'at-Tab--active': state._active, 'at-Tab--disabled': state._disabled }"
ng-hide="{{ state._hide }}"
ng-click="state._go && vm.go();">
ng-click="vm.handleClick();">
<ng-transclude></ng-transclude>
</button>

View File

@ -1,5 +1,5 @@
<div ui-view="credentials"></div>
<a class="containerGroups-messageBar-link"href="https://docs.ansible.com/ansible-tower/latest/html/userguide/instance_groups.html" target="_blank" style="color: white">
<a class="containerGroups-messageBar-link" href="https://docs.ansible.com/ansible-tower/latest/html/administration/external_execution_envs.html#container-group-considerations" target="_blank" style="color: white">
<div class="Section-messageBar">
<i class="Section-messageBar-warning fa fa-warning"></i>
<span class="Section-messageBar-text">This feature is tech preview, and is subject to change in a future release. Click here for documentation.</span>
@ -21,13 +21,15 @@
{{ vm.form.extraVars.toggleLabel }}
</span>
</label>
<div ng-class="{'ContainerGroups-codeMirror': vm.form.extraVars.isOpen }">
<at-switch on-toggle="vm.toggle(instance)" switch-on="vm.form.extraVars.isOpen"
switch-disabled="vm.rowAction.toggle._disabled"></at-switch>
<div ng-disabled="{{vm.form.extraVars.disabled}}" ng-class="{'ContainerGroups-codeMirror': vm.form.extraVars.isOpen }">
<at-switch on-toggle="vm.toggle(instance)" switch-on="vm.form.extraVars.isOpen"
switch-disabled="vm.switchDisabled"></at-switch>
</div>
<at-code-mirror
ng-disabled="{{vm.form.extraVars.disabled}}"
ng-if="vm.form.extraVars.isOpen"
col="4" tab="3"
ng-class="{'containerGroups-codeMirror-disabled': vm.form.extraVars.disabled}"
class="Form-formGroup--fullWidth"
variables="vm.form.extraVars.value"
label="{{ vm.form.extraVars.label }}"

View File

@ -4,7 +4,10 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
instanceGroup,
credential
} = models;
let canEdit = false;
if (instanceGroup.has('options', 'actions.PUT')) {
canEdit = instanceGroup.model.OPTIONS.actions.PUT;
}
if (!instanceGroup.get('is_containerized')) {
return $state.go(
'instanceGroups.edit',
@ -21,6 +24,8 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
vm.lookUpTitle = strings.get('container.LOOK_UP_TITLE');
vm.form = instanceGroup.createFormSchema('post');
vm.switchDisabled = false;
vm.form.disabled = !instanceGroup.has('options', 'actions.PUT');
vm.form.name.required = true;
vm.form.credential = {
type: 'field',
@ -48,14 +53,23 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
_go: 'instanceGroups.containerGroupJobs',
_params: { instance_group_id: instanceGroup.get('id') }
}
};
vm.form.extraVars = {
label: strings.get('container.POD_SPEC_LABEL'),
value: EditContainerGroupDataset.data.pod_spec_override || instanceGroup.model.OPTIONS.actions.PUT.pod_spec_override.default,
name: 'extraVars',
toggleLabel: strings.get('container.POD_SPEC_TOGGLE')
};
if (!canEdit) {
vm.form.extraVars = {
label: strings.get('container.POD_SPEC_LABEL'),
value: EditContainerGroupDataset.data.pod_spec_override || "---",
name: 'extraVars',
disabled: true
};
vm.switchDisabled = true;
} else {
vm.form.extraVars = {
label: strings.get('container.POD_SPEC_LABEL'),
value: EditContainerGroupDataset.data.pod_spec_override || instanceGroup.model.OPTIONS.actions.PUT.pod_spec_override.default,
name: 'extraVars',
toggleLabel: strings.get('container.POD_SPEC_TOGGLE')
};
}
function sanitizeVars (str) {
// Quick function to test if the host vars are a json-object-string,
@ -90,7 +104,7 @@ function EditContainerGroupController($rootScope, $scope, $state, models, string
}
const podSpecValue = sanitizeVars(EditContainerGroupDataset.data.pod_spec_override);
const defaultPodSpecValue = sanitizeVars(instanceGroup.model.OPTIONS.actions.PUT.pod_spec_override.default);
const defaultPodSpecValue = canEdit ? sanitizeVars(instanceGroup.model.OPTIONS.actions.PUT.pod_spec_override.default) : '---';
if ((podSpecValue !== '---') && podSpecValue && podSpecValue.trim() !== defaultPodSpecValue.trim()) {
vm.form.extraVars.isOpen = true;

View File

@ -100,6 +100,7 @@
.at-Row-container{
flex-wrap: wrap;
}
.containerGroups-messageBar-link:hover{
text-decoration: underline;
}

View File

@ -101,7 +101,7 @@
</div>
<div class="at-Row-actions" >
<capacity-bar ng-show="!instance_group.credential" label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_USED_CAPACITY') }}" capacity="instance_group.consumed_capacity" total-capacity="instance_group.capacity"></capacity-bar>
<at-row-action ng-class="{'at-Row-actions-noCredential': !instance_group.credential}" icon="fa-trash" ng-click="vm.deleteInstanceGroup(instance_group)" ng-if="vm.rowAction.trash(instance_group)">
<at-row-action icon="fa-trash" ng-click="vm.deleteInstanceGroup(instance_group)" ng-if="vm.rowAction.trash(instance_group)">
</at-row-action>
</div>
</div>

View File

@ -20,10 +20,11 @@
flex: 1 0;
height: @height;
width: 100%;
margin-right: 20px;
@media screen and (max-width: @breakpoint){
margin-right: 0px;
height: inherit;
margin-right: 20px;
@media screen and (max-width: @breakpoint){
height: inherit;
margin-right: 0px;
max-width: none;
}
}

View File

@ -189,6 +189,36 @@ export default [ 'ProcessErrors', 'CredentialTypeModel', 'TemplatesStrings', '$f
modal.show($filter('sanitize')(vm.promptDataClone.templateName));
vm.promptData.triggerModalOpen = false;
vm._savedPromptData = {
1: _.cloneDeep(vm.promptDataClone)
};
Object.keys(vm.steps).forEach(step => {
if (!vm.steps[step].tab) {
return;
}
vm.steps[step].tab._onClickActivate = () => {
if (vm._savedPromptData[vm.steps[step].tab.order]) {
vm.promptDataClone = vm._savedPromptData[vm.steps[step].tab.order];
}
Object.keys(vm.steps).forEach(tabStep => {
if (!vm.steps[tabStep].tab) {
return;
}
if (vm.steps[tabStep].tab.order < vm.steps[step].tab.order) {
vm.steps[tabStep].tab._disabled = false;
vm.steps[tabStep].tab._active = false;
} else if (vm.steps[tabStep].tab.order === vm.steps[step].tab.order) {
vm.steps[tabStep].tab._disabled = false;
vm.steps[tabStep].tab._active = true;
} else {
vm.steps[tabStep].tab._disabled = true;
vm.steps[tabStep].tab._active = false;
}
});
scope.$broadcast('promptTabChange', { step });
};
});
modal.onClose = () => {
scope.$emit('launchModalOpen', false);
};
@ -214,19 +244,39 @@ export default [ 'ProcessErrors', 'CredentialTypeModel', 'TemplatesStrings', '$f
return;
}
}
let nextStep;
Object.keys(vm.steps).forEach(step => {
if(vm.steps[step].tab) {
if(vm.steps[step].tab.order === currentTab.order) {
vm.steps[step].tab._active = false;
vm.steps[step].tab._disabled = true;
} else if(vm.steps[step].tab.order === currentTab.order + 1) {
activeTab = currentTab;
vm.steps[step].tab._active = true;
vm.steps[step].tab._disabled = false;
scope.$broadcast('promptTabChange', { step });
}
if (!vm.steps[step].tab) {
return;
}
if (vm.steps[step].tab.order === currentTab.order + 1) {
nextStep = step;
}
});
if (!nextStep) {
return;
}
// Save the current promptData state in case we need to revert
vm._savedPromptData[currentTab.order] = _.cloneDeep(vm.promptDataClone);
Object.keys(vm.steps).forEach(tabStep => {
if (!vm.steps[tabStep].tab) {
return;
}
if (vm.steps[tabStep].tab.order < vm.steps[nextStep].tab.order) {
vm.steps[tabStep].tab._disabled = false;
vm.steps[tabStep].tab._active = false;
} else if (vm.steps[tabStep].tab.order === vm.steps[nextStep].tab.order) {
vm.steps[tabStep].tab._disabled = false;
vm.steps[tabStep].tab._active = true;
} else {
vm.steps[tabStep].tab._disabled = true;
vm.steps[tabStep].tab._active = false;
}
});
scope.$broadcast('promptTabChange', { step: nextStep });
};
vm.keypress = (event) => {

View File

@ -22,7 +22,7 @@
read-only-prompts="vm.readOnlyPrompts">
</prompt-credential>
</div>
<div ng-if="vm.steps.other_prompts.includeStep" ng-show="vm.steps.other_prompts.tab._active" id="prompt_other_prompts_step">
<div ng-if="vm.steps.other_prompts.includeStep && vm.steps.other_prompts.tab._active" id="prompt_other_prompts_step">
<prompt-other-prompts
prompt-data="vm.promptDataClone"
other-prompts-form="vm.forms.otherPrompts"

View File

@ -77,7 +77,7 @@ EXAMPLES = '''
# Example for using tower_inventory.yml file
plugin: tower
plugin: awx.awx.tower
host: your_ansible_tower_server_network_address
username: your_ansible_tower_username
password: your_ansible_tower_password
@ -116,7 +116,7 @@ except ImportError:
class InventoryModule(BaseInventoryPlugin):
NAME = 'tower'
NAME = 'awx.awx.tower' # REPLACE
# Stays backward compatible with tower inventory script.
# If the user supplies '@tower_inventory' as path, the plugin will read from environment variables.
no_config_file_supplied = False

View File

@ -21,6 +21,12 @@
regexp: '^extends_documentation_fragment: awx.awx.auth$'
replace: 'extends_documentation_fragment: {{ collection_namespace }}.{{ collection_package }}.auth'
with_items: "{{ module_files.files }}"
- name: Change files to support desired namespace and package names
replace:
path: "{{ playbook_dir }}/plugins/inventory/tower.py"
regexp: "^ NAME = 'awx.awx.tower' # REPLACE$"
replace: " NAME = '{{ collection_namespace }}.{{ collection_package }}.tower' # REPLACE"
when:
- (collection_package != 'awx') or (collection_namespace != 'awx')

View File

@ -225,6 +225,7 @@ class AssociationMixin(object):
def __init__(self, connection, resource):
self.conn = connection
self.resource = {
'approval_notification': 'notification_templates',
'start_notification': 'notification_templates',
'success_notification': 'notification_templates',
'failure_notification': 'notification_templates',
@ -299,11 +300,21 @@ JobTemplateNotificationDisAssociation.targets.update({
class WorkflowJobTemplateNotificationAssociation(NotificationAssociateMixin, CustomAction):
resource = 'workflow_job_templates'
action = 'associate'
targets = NotificationAssociateMixin.targets.copy()
class WorkflowJobTemplateNotificationDisAssociation(NotificationAssociateMixin, CustomAction):
resource = 'workflow_job_templates'
action = 'disassociate'
targets = NotificationAssociateMixin.targets.copy()
WorkflowJobTemplateNotificationAssociation.targets.update({
'approval_notification': ['notification_templates_approvals', 'notification_template'],
})
WorkflowJobTemplateNotificationDisAssociation.targets.update({
'approval_notification': ['notification_templates_approvals', 'notification_template'],
})
class ProjectNotificationAssociation(NotificationAssociateMixin, CustomAction):
@ -329,11 +340,21 @@ class InventorySourceNotificationDisAssociation(NotificationAssociateMixin, Cust
class OrganizationNotificationAssociation(NotificationAssociateMixin, CustomAction):
resource = 'organizations'
action = 'associate'
targets = NotificationAssociateMixin.targets.copy()
class OrganizationNotificationDisAssociation(NotificationAssociateMixin, CustomAction):
resource = 'organizations'
action = 'disassociate'
targets = NotificationAssociateMixin.targets.copy()
OrganizationNotificationAssociation.targets.update({
'approval_notification': ['notification_templates_approvals', 'notification_template'],
})
OrganizationNotificationDisAssociation.targets.update({
'approval_notification': ['notification_templates_approvals', 'notification_template'],
})
class SettingsList(CustomAction):

View File

@ -1,7 +1,7 @@
AWX Command Line Interface
==========================
awx is the official command-line client for AWX. It:
`awx` is the official command-line client for AWX. It:
* Uses naming and structure consistent with the AWX HTTP API
* Provides consistent output formats with optional machine-parsable formats

View File

@ -95,7 +95,7 @@ class ResourceOptionsParser(object):
def get_allowed_options(self):
self.allowed_options = self.page.connection.options(
self.page.endpoint + '1'
self.page.endpoint + '1/'
).headers.get('Allow', '').split(', ')
def build_list_actions(self):

View File

@ -73,7 +73,7 @@ def monitor_workflow(response, session, print_stdout=True, timeout=None,
def monitor(response, session, print_stdout=True, timeout=None, interval=.25):
get = response.url.get
payload = {'order_by': 'start_line'}
payload = {'order_by': 'start_line', 'no_truncate': True}
if response.type == 'job':
events = response.related.job_events.get
else: