Merge branch 'downstream' into devel

This commit is contained in:
Ryan Petrello
2020-05-08 16:48:48 -04:00
23 changed files with 11276 additions and 9788 deletions

View File

@@ -30,6 +30,7 @@ except ImportError:
HAS_DJANGO = False HAS_DJANGO = False
else: else:
from django.db.backends.base import schema from django.db.backends.base import schema
from django.db.models import indexes
from django.db.backends.utils import names_digest from django.db.backends.utils import names_digest
@@ -50,6 +51,7 @@ if HAS_DJANGO is True:
return h.hexdigest()[:length] return h.hexdigest()[:length]
schema.names_digest = names_digest schema.names_digest = names_digest
indexes.names_digest = names_digest
def find_commands(management_dir): def find_commands(management_dir):

View File

@@ -10,6 +10,7 @@ import socket
from socket import SHUT_RDWR from socket import SHUT_RDWR
# Django # Django
from django.db import connection
from django.conf import settings from django.conf import settings
from django.http import Http404 from django.http import Http404
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
@@ -130,7 +131,8 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
setting.save(update_fields=['value']) setting.save(update_fields=['value'])
settings_change_list.append(key) settings_change_list.append(key)
if settings_change_list: if settings_change_list:
handle_setting_changes.delay(settings_change_list) connection.on_commit(lambda: handle_setting_changes.delay(settings_change_list))
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
instance = self.get_object() instance = self.get_object()
@@ -145,7 +147,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
setting.delete() setting.delete()
settings_change_list.append(setting.key) settings_change_list.append(setting.key)
if settings_change_list: if settings_change_list:
handle_setting_changes.delay(settings_change_list) connection.on_commit(lambda: handle_setting_changes.delay(settings_change_list))
# When TOWER_URL_BASE is deleted from the API, reset it to the hostname # When TOWER_URL_BASE is deleted from the API, reset it to the hostname
# used to make the request as a default. # used to make the request as a default.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -4,6 +4,7 @@
# Python # Python
import datetime import datetime
import time import time
import json
import logging import logging
import re import re
import copy import copy
@@ -2578,11 +2579,26 @@ class satellite6(PluginFileInjector):
def inventory_as_dict(self, inventory_update, private_data_dir): def inventory_as_dict(self, inventory_update, private_data_dir):
ret = super(satellite6, self).inventory_as_dict(inventory_update, private_data_dir) ret = super(satellite6, self).inventory_as_dict(inventory_update, private_data_dir)
group_patterns = '[]'
group_prefix = 'foreman_'
want_hostcollections = False
want_ansible_ssh_host = False want_ansible_ssh_host = False
want_facts = True
foreman_opts = inventory_update.source_vars_dict.copy() foreman_opts = inventory_update.source_vars_dict.copy()
for k, v in foreman_opts.items(): for k, v in foreman_opts.items():
if k == 'satellite6_want_ansible_ssh_host' and isinstance(v, bool): if k == 'satellite6_group_patterns' and isinstance(v, str):
group_patterns = v
elif k == 'satellite6_group_prefix' and isinstance(v, str):
group_prefix = v
elif k == 'satellite6_want_hostcollections' and isinstance(v, bool):
want_hostcollections = v
elif k == 'satellite6_want_ansible_ssh_host' and isinstance(v, bool):
want_ansible_ssh_host = v want_ansible_ssh_host = v
elif k == 'satellite6_want_facts' and isinstance(v, bool):
want_facts = v
else:
ret[k] = str(v)
# Compatibility content # Compatibility content
group_by_hostvar = { group_by_hostvar = {
@@ -2605,13 +2621,61 @@ class satellite6(PluginFileInjector):
"key": "foreman['content_facet_attributes']['content_view_name'] | " "key": "foreman['content_facet_attributes']['content_view_name'] | "
"lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"} "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')"}
} }
ret['keyed_groups'] = [group_by_hostvar[grouping_name] for grouping_name in group_by_hostvar]
ret['legacy_hostvars'] = True ret['legacy_hostvars'] = True # convert hostvar structure to the form used by the script
ret['want_facts'] = True
ret['want_params'] = True ret['want_params'] = True
ret['group_prefix'] = group_prefix
ret['want_hostcollections'] = want_hostcollections
ret['want_facts'] = want_facts
if want_ansible_ssh_host: if want_ansible_ssh_host:
ret['compose'] = {'ansible_ssh_host': "foreman['ip6'] | default(foreman['ip'], true)"} ret['compose'] = {'ansible_ssh_host': "foreman['ip6'] | default(foreman['ip'], true)"}
ret['keyed_groups'] = [group_by_hostvar[grouping_name] for grouping_name in group_by_hostvar]
def form_keyed_group(group_pattern):
"""
Converts foreman group_pattern to
inventory plugin keyed_group
e.g. {app_param}-{tier_param}-{dc_param}
becomes
"%s-%s-%s" | format(app_param, tier_param, dc_param)
"""
if type(group_pattern) is not str:
return None
params = re.findall('{[^}]*}', group_pattern)
if len(params) == 0:
return None
param_names = []
for p in params:
param_names.append(p[1:-1].strip()) # strip braces and space
# form keyed_group key by
# replacing curly braces with '%s'
# (for use with jinja's format filter)
key = group_pattern
for p in params:
key = key.replace(p, '%s', 1)
# apply jinja filter to key
key = '"{}" | format({})'.format(key, ', '.join(param_names))
keyed_group = {'key': key,
'separator': ''}
return keyed_group
try:
group_patterns = json.loads(group_patterns)
if type(group_patterns) is list:
for group_pattern in group_patterns:
keyed_group = form_keyed_group(group_pattern)
if keyed_group:
ret['keyed_groups'].append(keyed_group)
except json.JSONDecodeError:
logger.warning('Could not parse group_patterns. Expected JSON-formatted string, found: {}'
.format(group_patterns))
return ret return ret

View File

@@ -67,7 +67,7 @@ from awx.main.queue import CallbackQueueDispatcher
from awx.main.isolated import manager as isolated_manager from awx.main.isolated import manager as isolated_manager
from awx.main.dispatch.publish import task from awx.main.dispatch.publish import task
from awx.main.dispatch import get_local_queuename, reaper from awx.main.dispatch import get_local_queuename, reaper
from awx.main.utils import (get_ssh_version, update_scm_url, from awx.main.utils import (update_scm_url,
ignore_inventory_computed_fields, ignore_inventory_computed_fields,
ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager, ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager,
get_awx_version) get_awx_version)
@@ -288,7 +288,7 @@ def handle_setting_changes(setting_keys):
setting.startswith('LOG_AGGREGATOR') setting.startswith('LOG_AGGREGATOR')
for setting in setting_keys for setting in setting_keys
]): ]):
connection.on_commit(reconfigure_rsyslog) reconfigure_rsyslog()
@task(queue='tower_broadcast_all') @task(queue='tower_broadcast_all')
@@ -897,21 +897,14 @@ class BaseTask(object):
private_data = self.build_private_data(instance, private_data_dir) private_data = self.build_private_data(instance, private_data_dir)
private_data_files = {'credentials': {}} private_data_files = {'credentials': {}}
if private_data is not None: if private_data is not None:
ssh_ver = get_ssh_version()
ssh_too_old = True if ssh_ver == "unknown" else Version(ssh_ver) < Version("6.0")
openssh_keys_supported = ssh_ver != "unknown" and Version(ssh_ver) >= Version("6.5")
for credential, data in private_data.get('credentials', {}).items(): for credential, data in private_data.get('credentials', {}).items():
# Bail out now if a private key was provided in OpenSSH format
# and we're running an earlier version (<6.5).
if 'OPENSSH PRIVATE KEY' in data and not openssh_keys_supported:
raise RuntimeError(OPENSSH_KEY_ERROR)
# OpenSSH formatted keys must have a trailing newline to be # OpenSSH formatted keys must have a trailing newline to be
# accepted by ssh-add. # accepted by ssh-add.
if 'OPENSSH PRIVATE KEY' in data and not data.endswith('\n'): if 'OPENSSH PRIVATE KEY' in data and not data.endswith('\n'):
data += '\n' data += '\n'
# For credentials used with ssh-add, write to a named pipe which # For credentials used with ssh-add, write to a named pipe which
# will be read then closed, instead of leaving the SSH key on disk. # will be read then closed, instead of leaving the SSH key on disk.
if credential and credential.credential_type.namespace in ('ssh', 'scm') and not ssh_too_old: if credential and credential.credential_type.namespace in ('ssh', 'scm'):
try: try:
os.mkdir(os.path.join(private_data_dir, 'env')) os.mkdir(os.path.join(private_data_dir, 'env'))
except OSError as e: except OSError as e:

View File

@@ -107,11 +107,6 @@ def workflow_job_template_factory():
return create_workflow_job_template return create_workflow_job_template
@pytest.fixture
def get_ssh_version(mocker):
return mocker.patch('awx.main.tasks.get_ssh_version', return_value='OpenSSH_6.9p1, LibreSSL 2.1.8')
@pytest.fixture @pytest.fixture
def job_template_with_survey_passwords_unit(job_template_with_survey_passwords_factory): def job_template_with_survey_passwords_unit(job_template_with_survey_passwords_factory):
return job_template_with_survey_passwords_factory(persisted=False) return job_template_with_survey_passwords_factory(persisted=False)

View File

@@ -1,5 +1,7 @@
base_source_var: value_of_var
compose: compose:
ansible_ssh_host: foreman['ip6'] | default(foreman['ip'], true) ansible_ssh_host: foreman['ip6'] | default(foreman['ip'], true)
group_prefix: foo_group_prefix
keyed_groups: keyed_groups:
- key: foreman['environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_') | regex_replace('none', '') - key: foreman['environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_') | regex_replace('none', '')
prefix: foreman_environment_ prefix: foreman_environment_
@@ -16,7 +18,12 @@ keyed_groups:
- key: foreman['content_facet_attributes']['content_view_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_') - key: foreman['content_facet_attributes']['content_view_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_')
prefix: foreman_content_view_ prefix: foreman_content_view_
separator: '' separator: ''
- key: '"%s-%s-%s" | format(app, tier, color)'
separator: ''
- key: '"%s-%s" | format(app, color)'
separator: ''
legacy_hostvars: true legacy_hostvars: true
plugin: theforeman.foreman.foreman plugin: theforeman.foreman.foreman
want_facts: true want_facts: true
want_hostcollections: true
want_params: true want_params: true

View File

@@ -6,12 +6,12 @@ user = fooo
password = fooo password = fooo
[ansible] [ansible]
group_patterns = foo_group_patterns group_patterns = ["{app}-{tier}-{color}", "{app}-{color}"]
want_facts = True want_facts = True
want_hostcollections = True want_hostcollections = True
group_prefix = foo_group_prefix group_prefix = foo_group_prefix
want_ansible_ssh_host = True want_ansible_ssh_host = True
rich_params = True rich_params = False
[cache] [cache]
path = /tmp path = /tmp

View File

@@ -64,11 +64,10 @@ INI_TEST_VARS = {
'tags': 'Creator:jmarshall, peanutbutter:jelly' 'tags': 'Creator:jmarshall, peanutbutter:jelly'
}, },
'satellite6': { 'satellite6': {
'satellite6_group_patterns': 'foo_group_patterns', 'satellite6_group_patterns': '["{app}-{tier}-{color}", "{app}-{color}"]',
'satellite6_group_prefix': 'foo_group_prefix', 'satellite6_group_prefix': 'foo_group_prefix',
'satellite6_want_hostcollections': True, 'satellite6_want_hostcollections': True,
'satellite6_want_ansible_ssh_host': True, 'satellite6_want_ansible_ssh_host': True,
'satellite6_rich_params': True,
'satellite6_want_facts': True 'satellite6_want_facts': True
}, },

View File

@@ -43,7 +43,7 @@ logger = logging.getLogger('awx.main.utils')
__all__ = [ __all__ = [
'get_object_or_400', 'camelcase_to_underscore', 'underscore_to_camelcase', 'memoize', 'get_object_or_400', 'camelcase_to_underscore', 'underscore_to_camelcase', 'memoize',
'memoize_delete', 'get_ansible_version', 'get_ssh_version', 'get_licenser', 'get_awx_http_client_headers', 'memoize_delete', 'get_ansible_version', 'get_licenser', 'get_awx_http_client_headers',
'get_awx_version', 'update_scm_url', 'get_type_for_model', 'get_model_for_type', 'get_awx_version', 'update_scm_url', 'get_type_for_model', 'get_model_for_type',
'copy_model_by_class', 'region_sorting', 'copy_m2m_relationships', 'copy_model_by_class', 'region_sorting', 'copy_m2m_relationships',
'prefetch_page_capabilities', 'to_python_boolean', 'ignore_inventory_computed_fields', 'prefetch_page_capabilities', 'to_python_boolean', 'ignore_inventory_computed_fields',
@@ -190,20 +190,6 @@ def get_ansible_version():
return _get_ansible_version('ansible') return _get_ansible_version('ansible')
@memoize()
def get_ssh_version():
'''
Return SSH version installed.
'''
try:
proc = subprocess.Popen(['ssh', '-V'],
stderr=subprocess.PIPE)
result = smart_str(proc.communicate()[1])
return result.split(" ")[0].split("_")[1]
except Exception:
return 'unknown'
def get_awx_version(): def get_awx_version():
''' '''
Return AWX version as reported by setuptools. Return AWX version as reported by setuptools.

View File

@@ -1,5 +1,6 @@
import os import os
import shutil
import tempfile
import urllib.parse as urlparse import urllib.parse as urlparse
from django.conf import settings from django.conf import settings
@@ -112,6 +113,10 @@ def construct_rsyslog_conf_template(settings=settings):
def reconfigure_rsyslog(): def reconfigure_rsyslog():
tmpl = construct_rsyslog_conf_template() tmpl = construct_rsyslog_conf_template()
with open('/var/lib/awx/rsyslog/rsyslog.conf', 'w') as f: # Write config to a temp file then move it to preserve atomicity
f.write(tmpl + '\n') with tempfile.TemporaryDirectory(prefix='rsyslog-conf-') as temp_dir:
path = temp_dir + '/rsyslog.conf.temp'
with open(path, 'w') as f:
f.write(tmpl + '\n')
shutil.move(path, '/var/lib/awx/rsyslog/rsyslog.conf')
supervisor_service_command(command='restart', service='awx-rsyslogd') supervisor_service_command(command='restart', service='awx-rsyslogd')

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -51,7 +51,7 @@ command = rsyslogd -n -i /var/run/awx-rsyslog/rsyslog.pid -f /var/lib/awx/rsyslo
autostart = true autostart = true
autorestart = true autorestart = true
stopwaitsecs = 5 stopwaitsecs = 5
stopsignal=KILL stopsignal=TERM
stopasgroup=true stopasgroup=true
killasgroup=true killasgroup=true
redirect_stderr=true redirect_stderr=true

View File

@@ -58,7 +58,7 @@ data:
autostart = true autostart = true
autorestart = true autorestart = true
stopwaitsecs = 5 stopwaitsecs = 5
stopsignal=KILL stopsignal=TERM
stopasgroup=true stopasgroup=true
killasgroup=true killasgroup=true
redirect_stderr=true redirect_stderr=true

View File

@@ -76,7 +76,7 @@ command = rsyslogd -n -i /var/run/awx-rsyslog/rsyslog.pid -f /var/lib/awx/rsyslo
autostart = true autostart = true
autorestart = true autorestart = true
stopwaitsecs = 5 stopwaitsecs = 5
stopsignal=KILL stopsignal=TERM
stopasgroup=true stopasgroup=true
killasgroup=true killasgroup=true
redirect_stderr=true redirect_stderr=true