Merge branch 'downstream' into devel

This commit is contained in:
Ryan Petrello 2020-04-28 12:18:13 -04:00
commit aad371d224
No known key found for this signature in database
GPG Key ID: F2AA5F2122351777
34 changed files with 291 additions and 194 deletions

View File

@ -44,8 +44,8 @@ class FixedSlidingWindow():
def cleanup(self, now_bucket=None):
now_bucket = now_bucket or now_seconds()
if self.start_time + 60 <= now_bucket:
self.start_time = now_bucket + 60 + 1
if self.start_time + 60 < now_bucket:
self.start_time = now_bucket - 60
# Delete old entries
for k in list(self.buckets.keys()):
@ -53,16 +53,15 @@ class FixedSlidingWindow():
del self.buckets[k]
def record(self, ts=None):
ts = ts or datetime.datetime.now()
now_bucket = int((ts - datetime.datetime(1970,1,1)).total_seconds())
now_bucket = ts or dt_to_seconds(datetime.datetime.now())
val = self.buckets.get(now_bucket, 0)
self.buckets[now_bucket] = val + 1
self.cleanup(now_bucket)
def render(self):
self.cleanup()
def render(self, ts=None):
self.cleanup(now_bucket=ts)
return sum(self.buckets.values()) or 0

View File

@ -121,6 +121,17 @@ class InstanceManager(models.Manager):
if not hostname:
hostname = settings.CLUSTER_HOST_ID
with advisory_lock('instance_registration_%s' % hostname):
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
# detect any instances with the same IP address.
# if one exists, set it to None
inst_conflicting_ip = self.filter(ip_address=ip_address).exclude(hostname=hostname)
if inst_conflicting_ip.exists():
for other_inst in inst_conflicting_ip:
other_hostname = other_inst.hostname
other_inst.ip_address = None
other_inst.save(update_fields=['ip_address'])
logger.warning("IP address {0} conflict detected, ip address unset for host {1}.".format(ip_address, other_hostname))
instance = self.filter(hostname=hostname)
if instance.exists():
instance = instance.get()

View File

@ -2577,6 +2577,12 @@ class satellite6(PluginFileInjector):
def inventory_as_dict(self, inventory_update, private_data_dir):
ret = super(satellite6, self).inventory_as_dict(inventory_update, private_data_dir)
want_ansible_ssh_host = False
foreman_opts = inventory_update.source_vars_dict.copy()
for k, v in foreman_opts.items():
if k == 'satellite6_want_ansible_ssh_host' and isinstance(v, bool):
want_ansible_ssh_host = v
# Compatibility content
group_by_hostvar = {
"environment": {"prefix": "foreman_environment_",
@ -2603,6 +2609,9 @@ class satellite6(PluginFileInjector):
ret['want_facts'] = True
ret['want_params'] = True
if want_ansible_ssh_host:
ret['compose'] = {'ansible_ssh_host': "foreman['ip6'] | default(foreman['ip'], true)"}
return ret

View File

@ -2074,29 +2074,34 @@ class RunProjectUpdate(BaseTask):
if settings.GALAXY_IGNORE_CERTS:
env['ANSIBLE_GALAXY_IGNORE'] = True
# Set up the public Galaxy server, if enabled
galaxy_configured = False
if settings.PUBLIC_GALAXY_ENABLED:
galaxy_servers = [settings.PUBLIC_GALAXY_SERVER]
galaxy_servers = [settings.PUBLIC_GALAXY_SERVER] # static setting
else:
galaxy_configured = True
galaxy_servers = []
# Set up fallback Galaxy servers, if configured
if settings.FALLBACK_GALAXY_SERVERS:
galaxy_configured = True
galaxy_servers = settings.FALLBACK_GALAXY_SERVERS + galaxy_servers
# Set up the primary Galaxy server, if configured
if settings.PRIMARY_GALAXY_URL:
galaxy_configured = True
galaxy_servers = [{'id': 'primary_galaxy'}] + galaxy_servers
for key in GALAXY_SERVER_FIELDS:
value = getattr(settings, 'PRIMARY_GALAXY_{}'.format(key.upper()))
if value:
galaxy_servers[0][key] = value
for server in galaxy_servers:
for key in GALAXY_SERVER_FIELDS:
if not server.get(key):
continue
env_key = ('ANSIBLE_GALAXY_SERVER_{}_{}'.format(server.get('id', 'unnamed'), key)).upper()
env[env_key] = server[key]
if galaxy_servers:
# now set the precedence of galaxy servers
env['ANSIBLE_GALAXY_SERVER_LIST'] = ','.join([server.get('id', 'unnamed') for server in galaxy_servers])
if galaxy_configured:
for server in galaxy_servers:
for key in GALAXY_SERVER_FIELDS:
if not server.get(key):
continue
env_key = ('ANSIBLE_GALAXY_SERVER_{}_{}'.format(server.get('id', 'unnamed'), key)).upper()
env[env_key] = server[key]
if galaxy_servers:
# now set the precedence of galaxy servers
env['ANSIBLE_GALAXY_SERVER_LIST'] = ','.join([server.get('id', 'unnamed') for server in galaxy_servers])
return env
def _build_scm_url_extra_vars(self, project_update):

View File

@ -1,3 +1,5 @@
compose:
ansible_ssh_host: foreman['ip6'] | default(foreman['ip'], true)
keyed_groups:
- key: foreman['environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9\_]', '_') | regex_replace('none', '')
prefix: foreman_environment_

View File

@ -0,0 +1,69 @@
import datetime
from awx.main.analytics.broadcast_websocket import FixedSlidingWindow
from awx.main.analytics.broadcast_websocket import dt_to_seconds
class TestFixedSlidingWindow():
def ts(self, **kwargs):
e = {
'year': 1985,
'month': 1,
'day': 1,
'hour': 1,
}
return dt_to_seconds(datetime.datetime(**kwargs, **e))
def test_record_same_minute(self):
"""
Legend:
- = record()
^ = render()
|---| = 1 minute, 60 seconds
....................
|------------------------------------------------------------|
^^^^^^^^^^^^^^^^^^^^
"""
fsw = FixedSlidingWindow(self.ts(minute=0, second=0, microsecond=0))
for i in range(20):
fsw.record(self.ts(minute=0, second=i, microsecond=0))
assert (i + 1) == fsw.render(self.ts(minute=0, second=i, microsecond=0))
def test_record_same_minute_render_diff_minute(self):
"""
Legend:
- = record()
^ = render()
|---| = 1 minute, 60 seconds
....................
|------------------------------------------------------------|
^^ ^
AB C
|------------------------------------------------------------|
^^^^^^^^^^^^^^^^^^^^^
DEEEEEEEEEEEEEEEEEEEF
"""
fsw = FixedSlidingWindow(self.ts(minute=0, second=0, microsecond=0))
for i in range(20):
fsw.record(self.ts(minute=0, second=i, microsecond=0))
assert 20 == fsw.render(self.ts(minute=0, second=19, microsecond=0)), \
"A. The second of the last record() call"
assert 20 == fsw.render(self.ts(minute=0, second=20, microsecond=0)), \
"B. The second after the last record() call"
assert 20 == fsw.render(self.ts(minute=0, second=59, microsecond=0)), \
"C. Last second in the same minute that all record() called in"
assert 20 == fsw.render(self.ts(minute=1, second=0, microsecond=0)), \
"D. First second of the minute following the minute that all record() calls in"
for i in range(20):
assert 20 - i == fsw.render(self.ts(minute=1, second=i, microsecond=0)), \
"E. Sliding window where 1 record() should drop from the results each time"
assert 0 == fsw.render(self.ts(minute=1, second=20, microsecond=0)), \
"F. First second one minute after all record() calls"

View File

@ -44,7 +44,7 @@ data_loggly = {
'https',
'\n'.join([
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
'action(type="omhttp" server="logs-01.loggly.com" serverport="80" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/")', # noqa
'action(type="omhttp" server="logs-01.loggly.com" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/")', # noqa
])
),
(
@ -77,7 +77,7 @@ data_loggly = {
None,
'\n'.join([
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
'action(type="omhttp" server="yoursplunk" serverport="443" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
'action(type="omhttp" server="yoursplunk" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
])
),
(
@ -88,7 +88,7 @@ data_loggly = {
None,
'\n'.join([
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
'action(type="omhttp" server="yoursplunk" serverport="80" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
'action(type="omhttp" server="yoursplunk" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
])
),
(
@ -99,7 +99,7 @@ data_loggly = {
None,
'\n'.join([
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
])
),
(
@ -110,7 +110,7 @@ data_loggly = {
None,
'\n'.join([
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
])
),
(
@ -121,7 +121,7 @@ data_loggly = {
'https',
'\n'.join([
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="on" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
])
),
(
@ -132,7 +132,7 @@ data_loggly = {
None,
'\n'.join([
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" errorfile="/var/log/tower/rsyslog.err" action.resumeInterval="5" restpath="services/collector/event")', # noqa
])
),
]

View File

@ -60,6 +60,7 @@ def construct_rsyslog_conf_template(settings=settings):
# https://github.com/rsyslog/rsyslog-doc/blob/master/source/configuration/modules/omhttp.rst
ssl = 'on' if parsed.scheme == 'https' else 'off'
skip_verify = 'off' if settings.LOG_AGGREGATOR_VERIFY_CERT else 'on'
allow_unsigned = 'off' if settings.LOG_AGGREGATOR_VERIFY_CERT else 'on'
if not port:
port = 443 if parsed.scheme == 'https' else 80
@ -68,6 +69,7 @@ def construct_rsyslog_conf_template(settings=settings):
f'server="{host}"',
f'serverport="{port}"',
f'usehttps="{ssl}"',
f'allowunsignedcerts="{allow_unsigned}"',
f'skipverifyhost="{skip_verify}"',
'action.resumeRetryCount="-1"',
'template="awx"',

View File

@ -136,9 +136,9 @@
register: doesRequirementsExist
- name: fetch galaxy roles from requirements.yml
command: ansible-galaxy install -r requirements.yml -p {{roles_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
command: ansible-galaxy install -r roles/requirements.yml -p {{roles_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
args:
chdir: "{{project_path|quote}}/roles"
chdir: "{{project_path|quote}}"
register: galaxy_result
when: doesRequirementsExist.stat.exists
changed_when: "'was installed successfully' in galaxy_result.stdout"
@ -157,9 +157,9 @@
register: doesCollectionRequirementsExist
- name: fetch galaxy collections from collections/requirements.yml
command: ansible-galaxy collection install -r requirements.yml -p {{collections_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
command: ansible-galaxy collection install -r collections/requirements.yml -p {{collections_destination|quote}}{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
args:
chdir: "{{project_path|quote}}/collections"
chdir: "{{project_path|quote}}"
register: galaxy_collection_result
when: doesCollectionRequirementsExist.stat.exists
changed_when: "'Installing ' in galaxy_collection_result.stdout"

View File

@ -80,7 +80,7 @@
collection="vm.approvals"
dataset="vm.dataset"
iterator="template"
base-path="unified_job_templates"
base-path="workflow_approvals"
query-set="vm.queryset"
hide-view-per-page="true">
</paginate>

View File

@ -116,7 +116,7 @@
</div>
</div>
</div>
<div class="License-file--middle License-helperText" translate>
<div class="License-file--middle License-helperText">
<div class="License-separator"></div>
<div translate>OR</div>
<div class="License-separator"></div>

View File

@ -60,15 +60,6 @@ msgstr "<b>我同意最终用户许可证协议</b>"
msgid "<b>User analytics</b>: This data is used to enhance future releases of the Tower Software and help streamline customer experience and success."
msgstr "<b>用户分析</b>:这些数据用于增强未来的 Tower 软件发行版本,并帮助简化客户体验和成功。"
#: client/src/license/license.partial.html:119
msgid ""
"<div class=\"License-separator\"></div>\n"
"\t\t\t\t\t\t<div translate=\"\">OR</div>\n"
"\t\t\t\t\t\t<div class=\"License-separator\"></div>"
msgstr "<div class=\"License-separator\"></div>\n"
"\\t\\t\\t\\t\\t\\t<div translate=\"\">或</div>\n"
"\\t\\t\\t\\t\\t\\t<div class=\"License-separator\"></div>"
#: client/src/login/loginModal/loginModal.partial.html:26
msgid ""
"<i class=\"LoginModal-alertIcon fa fa-exclamation-triangle\"></i>\n"

View File

@ -364,18 +364,16 @@ def main():
# End backwards compatability
state = module.params.get('state')
# Attempt to look up the related items the user specified (these will fail the module if not found)
if organization:
org_id = module.resolve_name_to_id('organizations', organization)
if user:
user_id = module.resolve_name_to_id('users', user)
if team:
team_id = module.resolve_name_to_id('teams', team)
# Deprication warnings
for legacy_input in OLD_INPUT_NAMES:
if module.params.get(legacy_input) is not None:
module.deprecate(msg='{0} parameter has been deprecated, please use inputs instead'.format(legacy_input), version="3.6")
if kind:
module.deprecate(msg='The kind parameter has been deprecated, please use credential_type instead', version="3.6")
cred_type_id = module.resolve_name_to_id('credential_types', credential_type if credential_type else KIND_CHOICES[kind])
if organization:
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up the object based on the provided name, credential type and optional organization
lookup_data = {
@ -384,13 +382,23 @@ def main():
}
if organization:
lookup_data['organization'] = org_id
credential = module.get_one('credentials', **{'data': lookup_data})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(credential)
# Attempt to look up the related items the user specified (these will fail the module if not found)
if user:
user_id = module.resolve_name_to_id('users', user)
if team:
team_id = module.resolve_name_to_id('teams', team)
# Create credential input from legacy inputs
credential_inputs = {}
for legacy_input in OLD_INPUT_NAMES:
if module.params.get(legacy_input) is not None:
module.deprecate(msg='{0} parameter has been deprecated, please use inputs instead'.format(legacy_input), version="3.6")
credential_inputs[legacy_input] = module.params.get(legacy_input)
if inputs:
credential_inputs.update(inputs)
@ -415,14 +423,10 @@ def main():
if team:
credential_fields['team'] = team_id
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(credential)
elif state == 'present':
# If the state was present we can let the module build or update the existing group, this will return on its own
module.create_or_update_if_needed(
credential, credential_fields, endpoint='credentials', item_type='credential'
)
# If the state was present we can let the module build or update the existing group, this will return on its own
module.create_or_update_if_needed(
credential, credential_fields, endpoint='credentials', item_type='credential'
)
if __name__ == '__main__':

View File

@ -144,9 +144,9 @@ def main():
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(credential_type)
elif state == 'present':
# If the state was present and we can let the module build or update the existing credential type, this will return on its own
module.create_or_update_if_needed(credential_type, credential_type_params, endpoint='credential_types', item_type='credential type')
# If the state was present and we can let the module build or update the existing credential type, this will return on its own
module.create_or_update_if_needed(credential_type, credential_type_params, endpoint='credential_types', item_type='credential type')
if __name__ == '__main__':

View File

@ -123,6 +123,10 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(group)
# Create the data that gets sent for create and update
group_fields = {
'name': new_name if new_name else name,
@ -149,15 +153,11 @@ def main():
if id_list:
association_fields[relationship] = id_list
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(group)
elif state == 'present':
# If the state was present we can let the module build or update the existing group, this will return on its own
module.create_or_update_if_needed(
group, group_fields, endpoint='groups', item_type='group',
associations=association_fields
)
# If the state was present we can let the module build or update the existing group, this will return on its own
module.create_or_update_if_needed(
group, group_fields, endpoint='groups', item_type='group',
associations=association_fields
)
if __name__ == '__main__':

View File

@ -119,6 +119,10 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(host)
# Create the data that gets sent for create and update
host_fields = {
'name': new_name if new_name else name,
@ -130,12 +134,8 @@ def main():
if variables is not None:
host_fields['variables'] = json.dumps(variables)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(host)
elif state == 'present':
# If the state was present and we can let the module build or update the existing host, this will return on its own
module.create_or_update_if_needed(host, host_fields, endpoint='hosts', item_type='host')
# If the state was present and we can let the module build or update the existing host, this will return on its own
module.create_or_update_if_needed(host, host_fields, endpoint='hosts', item_type='host')
if __name__ == '__main__':

View File

@ -119,6 +119,10 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(inventory)
# Create the data that gets sent for create and update
inventory_fields = {
'name': name,
@ -131,16 +135,12 @@ def main():
if variables is not None:
inventory_fields['variables'] = json.dumps(variables)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(inventory)
elif state == 'present':
# We need to perform a check to make sure you are not trying to convert a regular inventory into a smart one.
if inventory and inventory['kind'] == '' and inventory_fields['kind'] == 'smart':
module.fail_json(msg='You cannot turn a regular inventory into a "smart" inventory.')
# We need to perform a check to make sure you are not trying to convert a regular inventory into a smart one.
if inventory and inventory['kind'] == '' and inventory_fields['kind'] == 'smart':
module.fail_json(msg='You cannot turn a regular inventory into a "smart" inventory.')
# If the state was present and we can let the module build or update the existing inventory, this will return on its own
module.create_or_update_if_needed(inventory, inventory_fields, endpoint='inventories', item_type='inventory')
# If the state was present and we can let the module build or update the existing inventory, this will return on its own
module.create_or_update_if_needed(inventory, inventory_fields, endpoint='inventories', item_type='inventory')
if __name__ == '__main__':

View File

@ -198,6 +198,10 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(inventory_source)
# Create the data that gets sent for create and update
inventory_source_fields = {
'name': new_name if new_name else name,
@ -234,12 +238,8 @@ def main():
if state == 'present' and not inventory_source and not inventory_source_fields['source']:
module.fail_json(msg="If creating a new inventory source, the source param must be present")
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(inventory_source)
elif state == 'present':
# If the state was present we can let the module build or update the existing inventory_source, this will return on its own
module.create_or_update_if_needed(inventory_source, inventory_source_fields, endpoint='inventory_sources', item_type='inventory source')
# If the state was present we can let the module build or update the existing inventory_source, this will return on its own
module.create_or_update_if_needed(inventory_source, inventory_source_fields, endpoint='inventory_sources', item_type='inventory source')
if __name__ == '__main__':

View File

@ -414,6 +414,10 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
# Create the data that gets sent for create and update
new_fields = {}
new_fields['name'] = new_name if new_name else name
@ -490,23 +494,19 @@ def main():
module._encrypted_changed_warning('survey_spec', existing_item, warning=True)
on_change = update_survey
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
elif state == 'present':
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='job_templates', item_type='job_template',
associations={
'credentials': credentials_ids,
'labels': labels_ids,
'notification_templates_success': notification_success_ids,
'notification_templates_started': notification_start_ids,
'notification_templates_error': notification_error_ids
},
on_create=on_change, on_update=on_change,
)
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='job_templates', item_type='job_template',
associations={
'credentials': credentials_ids,
'labels': labels_ids,
'notification_templates_success': notification_success_ids,
'notification_templates_started': notification_start_ids,
'notification_templates_error': notification_error_ids
},
on_create=on_change, on_update=on_change,
)
if __name__ == '__main__':

View File

@ -403,6 +403,11 @@ def main():
messages = module.params.get('messages')
state = module.params.get('state')
# Deprecation warnings
for legacy_input in OLD_INPUT_NAMES:
if module.params.get(legacy_input) is not None:
module.deprecate(msg='{0} parameter has been deprecated, please use notification_configuration instead.'.format(legacy_input), version="3.6")
# Attempt to look up the related items the user specified (these will fail the module if not found)
organization_id = None
if organization:
@ -416,11 +421,14 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
# Create notification_configuration from legacy inputs
final_notification_configuration = {}
for legacy_input in OLD_INPUT_NAMES:
if module.params.get(legacy_input) is not None:
module.deprecate(msg='{0} parameter has been deprecated, please use notification_configuration instead.'.format(legacy_input), version="3.6")
final_notification_configuration[legacy_input] = module.params.get(legacy_input)
# Give anything in notification_configuration prescedence over the individual inputs
if notification_configuration is not None:
@ -440,17 +448,13 @@ def main():
if messages is not None:
new_fields['messages'] = messages
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
elif state == 'present':
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='notification_templates', item_type='notification_template',
associations={
}
)
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='notification_templates', item_type='notification_template',
associations={
}
)
if __name__ == '__main__':

View File

@ -108,6 +108,10 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(organization)
# Create the data that gets sent for create and update
org_fields = {'name': name}
if description is not None:
@ -117,12 +121,8 @@ def main():
if max_hosts is not None:
org_fields['max_hosts'] = max_hosts
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(organization)
elif state == 'present':
# If the state was present and we can let the module build or update the existing organization, this will return on its own
module.create_or_update_if_needed(organization, org_fields, endpoint='organizations', item_type='organization')
# If the state was present and we can let the module build or update the existing organization, this will return on its own
module.create_or_update_if_needed(organization, org_fields, endpoint='organizations', item_type='organization')
if __name__ == '__main__':

View File

@ -236,6 +236,10 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(project)
# Create the data that gets sent for create and update
project_fields = {
'name': name,
@ -260,7 +264,7 @@ def main():
if scm_type == '':
project_fields['local_path'] = local_path
if state != 'absent' and (scm_update_cache_timeout != 0 and scm_update_on_launch is not True):
if scm_update_cache_timeout != 0 and scm_update_on_launch is not True:
module.warn('scm_update_cache_timeout will be ignored since scm_update_on_launch was not set to true')
# If we are doing a not manual project, register our on_change method
@ -269,12 +273,8 @@ def main():
if wait and scm_type != '':
on_change = wait_for_project_update
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(project)
elif state == 'present':
# If the state was present and we can let the module build or update the existing project, this will return on its own
module.create_or_update_if_needed(project, project_fields, endpoint='projects', item_type='project', on_create=on_change, on_update=on_change)
# If the state was present and we can let the module build or update the existing project, this will return on its own
module.create_or_update_if_needed(project, project_fields, endpoint='projects', item_type='project', on_create=on_change, on_update=on_change)
if __name__ == '__main__':

View File

@ -102,6 +102,10 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(team)
# Create the data that gets sent for create and update
team_fields = {
'name': new_name if new_name else name,
@ -110,12 +114,8 @@ def main():
if description is not None:
team_fields['description'] = description
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(team)
elif state == 'present':
# If the state was present and we can let the module build or update the existing team, this will return on its own
module.create_or_update_if_needed(team, team_fields, endpoint='teams', item_type='team')
# If the state was present and we can let the module build or update the existing team, this will return on its own
module.create_or_update_if_needed(team, team_fields, endpoint='teams', item_type='team')
if __name__ == '__main__':

View File

@ -147,6 +147,10 @@ def main():
}
})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
# Create the data that gets sent for create and update
new_fields = {}
if username:
@ -164,12 +168,8 @@ def main():
if password:
new_fields['password'] = password
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
elif state == 'present':
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(existing_item, new_fields, endpoint='users', item_type='user')
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(existing_item, new_fields, endpoint='users', item_type='user')
if __name__ == '__main__':

View File

@ -175,6 +175,13 @@ def main():
organization_id = module.resolve_name_to_id('organizations', organization)
search_fields['organization'] = new_fields['organization'] = organization_id
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('workflow_job_templates', **{'data': search_fields})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
inventory = module.params.get('inventory')
if inventory:
new_fields['inventory'] = module.resolve_name_to_id('inventories', inventory)
@ -183,9 +190,6 @@ def main():
if webhook_credential:
new_fields['webhook_credential'] = module.resolve_name_to_id('webhook_credential', webhook_credential)
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('workflow_job_templates', **{'data': search_fields})
# Create the data that gets sent for create and update
new_fields['name'] = new_name if new_name else name
for field_name in (
@ -213,16 +217,12 @@ def main():
module._encrypted_changed_warning('survey_spec', existing_item, warning=True)
on_change = update_survey
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
elif state == 'present':
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='workflow_job_templates', item_type='workflow_job_template',
on_create=on_change, on_update=on_change
)
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='workflow_job_templates', item_type='workflow_job_template',
on_create=on_change, on_update=on_change
)
if __name__ == '__main__':

View File

@ -218,6 +218,13 @@ def main():
workflow_job_template_id = wfjt_data['id']
search_fields['workflow_job_template'] = new_fields['workflow_job_template'] = workflow_job_template_id
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('workflow_job_template_nodes', **{'data': search_fields})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
unified_job_template = module.params.get('unified_job_template')
if unified_job_template:
new_fields['unified_job_template'] = module.resolve_name_to_id('unified_job_templates', unified_job_template)
@ -226,9 +233,6 @@ def main():
if inventory:
new_fields['inventory'] = module.resolve_name_to_id('inventory', inventory)
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('workflow_job_template_nodes', **{'data': search_fields})
# Create the data that gets sent for create and update
for field_name in (
'identifier', 'extra_data', 'scm_branch', 'job_type', 'job_tags', 'skip_tags',
@ -262,16 +266,12 @@ def main():
# In the case of a new object, the utils need to know it is a node
new_fields['type'] = 'workflow_job_template_node'
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
elif state == 'present':
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='workflow_job_template_nodes', item_type='workflow_job_template_node',
associations=association_fields
)
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='workflow_job_template_nodes', item_type='workflow_job_template_node',
associations=association_fields
)
if __name__ == '__main__':

View File

@ -40,17 +40,13 @@
that:
- "result is changed"
- name: Delete the source inventory
- name: Delete the inventory source with an invalid cred, source_project, sourece_script specified
tower_inventory_source:
name: "{{ openstack_inv_source }}"
description: Source for Test inventory
inventory: "{{ openstack_inv }}"
credential: "{{ openstack_cred }}"
overwrite: true
update_on_launch: true
source_vars:
private: false
source: openstack
credential: "Does Not Exit"
source_project: "Does Not Exist"
source_script: "Does Not Exist"
state: absent
- assert:

View File

@ -147,10 +147,11 @@
- name: Delete Job Template 1
tower_job_template:
name: "{{ jt1 }}"
project: "{{ proj1 }}"
inventory: Demo Inventory
playbook: hello_world.yml
job_type: run
project: "Does Not Exist"
inventory: "Does Not Exist"
webhook_credential: "Does Not Exist"
state: absent
register: result

View File

@ -104,9 +104,11 @@
unified_job_template: "{{ jt1_name }}"
workflow: "{{ wfjt_name }}"
- name: Delete a workflow job template
- name: Delete a workflow job template with an invalid inventory and webook_credential
tower_workflow_job_template:
name: "{{ wfjt_name }}"
inventory: "Does Not Exist"
webhook_credential: "Does Not Exist"
state: absent
register: result

View File

@ -188,6 +188,10 @@ def main():
}
})
if state is 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
# Create the data that gets sent for create and update
new_fields = {}
{% for option in item['json']['actions']['POST'] %}
@ -203,20 +207,16 @@ def main():
{% endif %}
{% endfor %}
if state is 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
elif state is 'present':
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='{{ item_type }}', item_type='{{ singular_item_type }}',
associations={
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item, new_fields,
endpoint='{{ item_type }}', item_type='{{ singular_item_type }}',
associations={
{% for association in associations[item_type] | default([]) %}
'{{ association['endpoint'] }}': {{ association['related_item'] }}_ids,
'{{ association['endpoint'] }}': {{ association['related_item'] }}_ids,
{% endfor %}
}
)
}
)
if __name__ == '__main__':

View File

@ -50,7 +50,7 @@ stderr_logfile_maxbytes=0
command = rsyslogd -n -i /var/run/awx-rsyslog/rsyslog.pid -f /var/lib/awx/rsyslog/rsyslog.conf
autostart = true
autorestart = true
stopwaitsecs = 1
stopwaitsecs = 5
stopsignal=KILL
stopasgroup=true
killasgroup=true

View File

@ -57,7 +57,7 @@ data:
command = rsyslogd -n -i /var/run/awx-rsyslog/rsyslog.pid -f /var/lib/awx/rsyslog/rsyslog.conf
autostart = true
autorestart = true
stopwaitsecs = 1
stopwaitsecs = 5
stopsignal=KILL
stopasgroup=true
killasgroup=true

View File

@ -75,7 +75,7 @@ stdout_logfile_maxbytes=0
command = rsyslogd -n -i /var/run/awx-rsyslog/rsyslog.pid -f /var/lib/awx/rsyslog/rsyslog.conf
autostart = true
autorestart = true
stopwaitsecs = 1
stopwaitsecs = 5
stopsignal=KILL
stopasgroup=true
killasgroup=true

View File

@ -28,8 +28,10 @@ SOSREPORT_TOWER_DIRS = [
"/var/log/tower",
"/var/log/nginx",
"/var/log/supervisor",
"/var/log/redis",
"/etc/opt/rh/rh-redis5/redis.conf",
"/etc/redis.conf",
"/var/opt/rh/rh-redis5/log/redis/redis.log",
"/var/log/dist-upgrade",
"/var/log/installer",
"/var/log/unattended-upgrades",