Compare commits

..

15 Commits

Author SHA1 Message Date
John Westcott IV
af59abbbc4 Fixing NUL characters in event data 2023-05-02 14:37:35 -04:00
John Westcott IV
8ab3514428 Fixing ValueError becoming DataError 2023-05-02 11:47:12 -04:00
John Westcott IV
98781a82c7 Merge branch 'feature-django-upgrade' of github.com:ansible/awx into feature-django-upgrade 2023-05-02 11:45:51 -04:00
John Westcott IV
d3fabe81d1 Fixing using QuerySet.iterator() after prefetch_related() without specifying chunk_size is deprecated 2023-04-28 15:32:20 -04:00
John Westcott IV
b274d0e5ef Removing deprecated django.utils.timezone.utc alias in favor of datetime.timezone.utc 2023-04-28 15:32:20 -04:00
John Westcott IV
4494412f0c Replacing depricated index_togeather with new indexes 2023-04-28 15:31:28 -04:00
John Westcott IV
b82bec7d04 Replacing psycopg2.copy_expert with psycopg3.copy 2023-04-28 12:35:49 -04:00
John Westcott IV
2cee1caad2 Fixing final CI error 2023-04-28 12:35:49 -04:00
John Westcott IV
c3045b1169 Updating old migrations for psycopg3
We have both psycopg2 and 3 installed in the AWX venv.

Old versions of Django only used psycopg2 but 4.2 now supports 3

Django 4.2 detects psycopg3 first and will use that over psycopg2

So old migrations needed to be updated to support psycopg3
2023-04-28 12:35:49 -04:00
John Westcott IV
27024378bc Upgrading djgno to 4.2 LTS 2023-04-28 12:35:49 -04:00
John Westcott IV
8eff90d4c0 Adding upgrade to django-oauth-toolkit pre-migraiton 2023-04-28 12:35:49 -04:00
John Westcott IV
9b633b6492 Fixing final CI error 2023-04-27 08:00:56 -04:00
John Westcott IV
11dbc56ecb Updating old migrations for psycopg3
We have both psycopg2 and 3 installed in the AWX venv.

Old versions of Django only used psycopg2 but 4.2 now supports 3

Django 4.2 detects psycopg3 first and will use that over psycopg2

So old migrations needed to be updated to support psycopg3
2023-04-26 09:10:25 -04:00
John Westcott IV
4c1bd1e88e Upgrading djgno to 4.2 LTS 2023-04-26 09:10:25 -04:00
John Westcott IV
865cb7518e Adding upgrade to django-oauth-toolkit pre-migraiton 2023-04-26 09:10:25 -04:00
128 changed files with 974 additions and 1973 deletions

View File

@@ -19,8 +19,6 @@ body:
required: true
- label: I understand that AWX is open source software provided for free and that I might not receive a timely response.
required: true
- label: I am **NOT** reporting a (potential) security vulnerability. (These should be emailed to `security@ansible.com` instead.)
required: true
- type: textarea
id: summary

View File

@@ -3,7 +3,7 @@ name: CI
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
DEV_DOCKER_TAG_BASE: ghcr.io/${{ github.repository_owner }}
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
on:
pull_request:

View File

@@ -42,10 +42,7 @@ TACACS ?= false
VENV_BASE ?= /var/lib/awx/venv
DEV_DOCKER_OWNER ?= ansible
# Docker will only accept lowercase, so github names like Paul need to be paul
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
DEV_DOCKER_TAG_BASE ?= ghcr.io/ansible
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
@@ -660,12 +657,10 @@ awx-kube-dev-build: Dockerfile.kube-dev
## generate UI .pot file, an empty template of strings yet to be translated
pot: $(UI_BUILD_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-template --clean
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-template --clean
## generate UI .po files for each locale (will update translated strings for `en`)
po: $(UI_BUILD_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
$(NPM_BIN) --prefix awx/ui_next --loglevel warn run extract-strings -- --clean
## generate API django .pot .po
messages:

View File

@@ -347,7 +347,7 @@ class FieldLookupBackend(BaseFilterBackend):
args.append(Q(**{k: v}))
for role_name in role_filters:
if not hasattr(queryset.model, 'accessible_pk_qs'):
raise ParseError(_('Cannot apply role_level filter to this list because its model does not use roles for access control.'))
raise ParseError(_('Cannot apply role_level filter to this list because its model ' 'does not use roles for access control.'))
args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
if or_filters:
q = Q()

View File

@@ -169,7 +169,7 @@ class APIView(views.APIView):
self.__init_request_error__ = exc
except UnsupportedMediaType as exc:
exc.detail = _(
'You did not use correct Content-Type in your HTTP request. If you are using our REST API, the Content-Type must be application/json'
'You did not use correct Content-Type in your HTTP request. ' 'If you are using our REST API, the Content-Type must be application/json'
)
self.__init_request_error__ = exc
return drf_request
@@ -522,16 +522,14 @@ class SubListAPIView(ParentMixin, ListAPIView):
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
sublist_qs = self.get_sublist_queryset(parent)
if not self.filter_read_permission:
return optimize_queryset(self.get_sublist_queryset(parent))
qs = self.request.user.get_queryset(self.model)
if hasattr(self, 'parent_key'):
# This is vastly preferable for ReverseForeignKey relationships
return qs.filter(**{self.parent_key: parent})
return qs.distinct() & self.get_sublist_queryset(parent).distinct()
return optimize_queryset(sublist_qs)
qs = self.request.user.get_queryset(self.model).distinct()
return qs & sublist_qs
def get_sublist_queryset(self, parent):
return getattrd(parent, self.relationship)
return getattrd(parent, self.relationship).distinct()
class DestroyAPIView(generics.DestroyAPIView):
@@ -580,6 +578,15 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
d.update({'parent_key': getattr(self, 'parent_key', None)})
return d
def get_queryset(self):
if hasattr(self, 'parent_key'):
# Prefer this filtering because ForeignKey allows us more assumptions
parent = self.get_parent_object()
self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model)
return qs.filter(**{self.parent_key: parent})
return super(SubListCreateAPIView, self).get_queryset()
def create(self, request, *args, **kwargs):
# If the object ID was not specified, it probably doesn't exist in the
# DB yet. We want to see if we can create it. The URL may choose to

View File

@@ -71,7 +71,7 @@ class Metadata(metadata.SimpleMetadata):
'url': _('URL for this {}.'),
'related': _('Data structure with URLs of related resources.'),
'summary_fields': _(
'Data structure with name/description for related resources. The output for some objects may be limited for performance reasons.'
'Data structure with name/description for related resources. ' 'The output for some objects may be limited for performance reasons.'
),
'created': _('Timestamp when this {} was created.'),
'modified': _('Timestamp when this {} was last modified.'),

View File

@@ -220,7 +220,7 @@ class CopySerializer(serializers.Serializer):
view = self.context.get('view', None)
obj = view.get_object()
if name == obj.name:
raise serializers.ValidationError(_('The original object is already named {}, a copy from it cannot have the same name.'.format(name)))
raise serializers.ValidationError(_('The original object is already named {}, a copy from' ' it cannot have the same name.'.format(name)))
return attrs
@@ -760,7 +760,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
class UnifiedJobSerializer(BaseSerializer):
show_capabilities = ['start', 'delete']
event_processing_finished = serializers.BooleanField(
help_text=_('Indicates whether all of the events generated by this unified job have been saved to the database.'), read_only=True
help_text=_('Indicates whether all of the events generated by this ' 'unified job have been saved to the database.'), read_only=True
)
class Meta:
@@ -1579,7 +1579,7 @@ class ProjectPlaybooksSerializer(ProjectSerializer):
class ProjectInventoriesSerializer(ProjectSerializer):
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, not comprehensive.'))
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, ' 'not comprehensive.'))
class Meta:
model = Project
@@ -2905,7 +2905,7 @@ class CredentialSerializer(BaseSerializer):
):
if getattr(self.instance, related_objects).count() > 0:
raise ValidationError(
_('You cannot change the credential type of the credential, as it may break the functionality of the resources using it.')
_('You cannot change the credential type of the credential, as it may break the functionality' ' of the resources using it.')
)
return credential_type
@@ -2925,7 +2925,7 @@ class CredentialSerializerCreate(CredentialSerializer):
default=None,
write_only=True,
allow_null=True,
help_text=_('Write-only field used to add user to owner role. If provided, do not give either team or organization. Only valid for creation.'),
help_text=_('Write-only field used to add user to owner role. If provided, ' 'do not give either team or organization. Only valid for creation.'),
)
team = serializers.PrimaryKeyRelatedField(
queryset=Team.objects.all(),
@@ -2933,14 +2933,14 @@ class CredentialSerializerCreate(CredentialSerializer):
default=None,
write_only=True,
allow_null=True,
help_text=_('Write-only field used to add team to owner role. If provided, do not give either user or organization. Only valid for creation.'),
help_text=_('Write-only field used to add team to owner role. If provided, ' 'do not give either user or organization. Only valid for creation.'),
)
organization = serializers.PrimaryKeyRelatedField(
queryset=Organization.objects.all(),
required=False,
default=None,
allow_null=True,
help_text=_('Inherit permissions from organization roles. If provided on creation, do not give either user or team.'),
help_text=_('Inherit permissions from organization roles. If provided on creation, ' 'do not give either user or team.'),
)
class Meta:
@@ -2962,7 +2962,7 @@ class CredentialSerializerCreate(CredentialSerializer):
if len(owner_fields) > 1:
received = ", ".join(sorted(owner_fields))
raise serializers.ValidationError(
{"detail": _("Only one of 'user', 'team', or 'organization' should be provided, received {} fields.".format(received))}
{"detail": _("Only one of 'user', 'team', or 'organization' should be provided, " "received {} fields.".format(received))}
)
if attrs.get('team'):
@@ -3622,7 +3622,7 @@ class SystemJobSerializer(UnifiedJobSerializer):
try:
return obj.result_stdout
except StdoutMaxBytesExceeded as e:
return _("Standard Output too large to display ({text_size} bytes), only download supported for sizes over {supported_size} bytes.").format(
return _("Standard Output too large to display ({text_size} bytes), " "only download supported for sizes over {supported_size} bytes.").format(
text_size=e.total, supported_size=e.supported
)
@@ -4536,7 +4536,7 @@ class JobLaunchSerializer(BaseSerializer):
if cred.unique_hash() in provided_mapping.keys():
continue # User replaced credential with new of same type
errors.setdefault('credentials', []).append(
_('Removing {} credential at launch time without replacement is not supported. Provided list lacked credential(s): {}.').format(
_('Removing {} credential at launch time without replacement is not supported. ' 'Provided list lacked credential(s): {}.').format(
cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])
)
)
@@ -5019,7 +5019,7 @@ class NotificationTemplateSerializer(BaseSerializer):
for subevent in event_messages:
if subevent not in ('running', 'approved', 'timed_out', 'denied'):
error_list.append(
_("Workflow Approval event '{}' invalid, must be one of 'running', 'approved', 'timed_out', or 'denied'").format(subevent)
_("Workflow Approval event '{}' invalid, must be one of " "'running', 'approved', 'timed_out', or 'denied'").format(subevent)
)
continue
subevent_messages = event_messages[subevent]
@@ -5559,7 +5559,7 @@ class InstanceGroupSerializer(BaseSerializer):
instances = serializers.SerializerMethodField()
is_container_group = serializers.BooleanField(
required=False,
help_text=_('Indicates whether instances in this group are containerized.Containerized groups have a designated Openshift or Kubernetes cluster.'),
help_text=_('Indicates whether instances in this group are containerized.' 'Containerized groups have a designated Openshift or Kubernetes cluster.'),
)
# NOTE: help_text is duplicated from field definitions, no obvious way of
# both defining field details here and also getting the field's help_text
@@ -5570,7 +5570,7 @@ class InstanceGroupSerializer(BaseSerializer):
required=False,
initial=0,
label=_('Policy Instance Percentage'),
help_text=_("Minimum percentage of all instances that will be automatically assigned to this group when new instances come online."),
help_text=_("Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online."),
)
policy_instance_minimum = serializers.IntegerField(
default=0,
@@ -5578,7 +5578,7 @@ class InstanceGroupSerializer(BaseSerializer):
required=False,
initial=0,
label=_('Policy Instance Minimum'),
help_text=_("Static minimum number of Instances that will be automatically assign to this group when new instances come online."),
help_text=_("Static minimum number of Instances that will be automatically assign to " "this group when new instances come online."),
)
max_concurrent_jobs = serializers.IntegerField(
default=0,

View File

@@ -565,7 +565,7 @@ class LaunchConfigCredentialsBase(SubListAttachDetachAPIView):
if self.relationship not in ask_mapping:
return {"msg": _("Related template cannot accept {} on launch.").format(self.relationship)}
elif sub.passwords_needed:
return {"msg": _("Credential that requires user input on launch cannot be used in saved launch configuration.")}
return {"msg": _("Credential that requires user input on launch " "cannot be used in saved launch configuration.")}
ask_field_name = ask_mapping[self.relationship]
@@ -2501,7 +2501,7 @@ class JobTemplateSurveySpec(GenericAPIView):
return Response(
dict(
error=_(
"$encrypted$ is a reserved keyword for password question defaults, survey question {idx} is type {survey_item[type]}."
"$encrypted$ is a reserved keyword for password question defaults, " "survey question {idx} is type {survey_item[type]}."
).format(**context)
),
status=status.HTTP_400_BAD_REQUEST,
@@ -3333,6 +3333,7 @@ class JobLabelList(SubListAPIView):
serializer_class = serializers.LabelSerializer
parent_model = models.Job
relationship = 'labels'
parent_key = 'job'
class WorkflowJobLabelList(JobLabelList):
@@ -4055,7 +4056,7 @@ class UnifiedJobStdout(RetrieveAPIView):
return super(UnifiedJobStdout, self).retrieve(request, *args, **kwargs)
except models.StdoutMaxBytesExceeded as e:
response_message = _(
"Standard Output too large to display ({text_size} bytes), only download supported for sizes over {supported_size} bytes."
"Standard Output too large to display ({text_size} bytes), " "only download supported for sizes over {supported_size} bytes."
).format(text_size=e.total, supported_size=e.supported)
if request.accepted_renderer.format == 'json':
return Response({'range': {'start': 0, 'end': 1, 'absolute_end': 1}, 'content': response_message})

View File

@@ -50,7 +50,7 @@ class UnifiedJobDeletionMixin(object):
return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
else:
# if it has been > 1 minute, events are probably lost
logger.warning('Allowing deletion of {} through the API without all events processed.'.format(obj.log_format))
logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
# Manually cascade delete events if unpartitioned job
if obj.has_unpartitioned_events:

View File

@@ -114,7 +114,7 @@ class WebhookReceiverBase(APIView):
# Ensure that the full contents of the request are captured for multiple uses.
request.body
logger.debug("headers: {}\ndata: {}\n".format(request.headers, request.data))
logger.debug("headers: {}\n" "data: {}\n".format(request.headers, request.data))
obj = self.get_object()
self.check_signature(obj)

View File

@@ -35,7 +35,7 @@ class TestStringListBooleanField:
field = StringListBooleanField()
with pytest.raises(ValidationError) as e:
field.to_internal_value(value)
assert e.value.detail[0] == "Expected None, True, False, a string or list of strings but got {} instead.".format(type(value))
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_representation_valid(self, value_in, value_known):
@@ -48,7 +48,7 @@ class TestStringListBooleanField:
field = StringListBooleanField()
with pytest.raises(ValidationError) as e:
field.to_representation(value)
assert e.value.detail[0] == "Expected None, True, False, a string or list of strings but got {} instead.".format(type(value))
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
class TestListTuplesField:
@@ -67,7 +67,7 @@ class TestListTuplesField:
field = ListTuplesField()
with pytest.raises(ValidationError) as e:
field.to_internal_value(value)
assert e.value.detail[0] == "Expected a list of tuples of max length 2 but got {} instead.".format(t)
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " "but got {} instead.".format(t)
class TestStringListPathField:

View File

@@ -2234,7 +2234,7 @@ class WorkflowJobAccess(BaseAccess):
if not node_access.can_add({'reference_obj': node}):
wj_add_perm = False
if not wj_add_perm and self.save_messages:
self.messages['workflow_job_template'] = _('You do not have permission to the workflow job resources required for relaunch.')
self.messages['workflow_job_template'] = _('You do not have permission to the workflow job ' 'resources required for relaunch.')
return wj_add_perm
def can_cancel(self, obj):

View File

@@ -399,7 +399,10 @@ def _copy_table(table, query, path):
file_path = os.path.join(path, table + '_table.csv')
file = FileSplitter(filespec=file_path)
with connection.cursor() as cursor:
cursor.copy_expert(query, file)
with cursor.copy(query) as copy:
while data := copy.read():
byte_data = bytes(data)
file.write(byte_data.decode())
return file.file_list()

View File

@@ -87,7 +87,7 @@ class RecordedQueryLog(object):
)
log.commit()
log.execute(
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) VALUES (?, ?, ?, ?, ?, ?, ?);',
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) ' 'VALUES (?, ?, ?, ?, ?, ?, ?);',
(os.getpid(), version, ' '.join(sys.argv), seconds, sql, explain, bt),
)
log.commit()

View File

@@ -9,6 +9,7 @@ from django.conf import settings
from django.utils.functional import cached_property
from django.utils.timezone import now as tz_now
from django.db import transaction, connection as django_connection
from django.db.utils import DataError
from django_guid import set_guid
import psutil
@@ -191,10 +192,16 @@ class CallbackBrokerWorker(BaseWorker):
e._retry_count = retry_count
# special sanitization logic for postgres treatment of NUL 0x00 char
if (retry_count == 1) and isinstance(exc_indv, ValueError) and ("\x00" in e.stdout):
e.stdout = e.stdout.replace("\x00", "")
if retry_count >= self.INDIVIDUAL_EVENT_RETRIES:
if (retry_count == 1) and isinstance(exc_indv, DataError):
# The easiest place is in stdout. This raises as an error stating that it can't save a NUL character
if "\x00" in e.stdout:
e.stdout = e.stdout.replace("\x00", "")
# There is also a chance that NUL char is embedded in event data which is part of a JSON blob. In that case we, thankfully, get a different exception
if 'unsupported Unicode escape sequence' in str(exc_indv):
e.event_data = json.loads(
json.dumps(e.event_data).replace("\x00", "").replace("\\x00", "").replace("\u0000", "").replace("\\u0000", "")
)
elif retry_count >= self.INDIVIDUAL_EVENT_RETRIES:
logger.error(f'Hit max retries ({retry_count}) saving individual Event error: {str(exc_indv)}\ndata:\n{e.__dict__}')
events.remove(e)
else:

View File

@@ -800,7 +800,7 @@ class CredentialTypeInjectorField(JSONSchemaField):
def validate_env_var_allowed(self, env_var):
if env_var.startswith('ANSIBLE_'):
raise django_exceptions.ValidationError(
_('Environment variable {} may affect Ansible configuration so its use is not allowed in credentials.').format(env_var),
_('Environment variable {} may affect Ansible configuration so its ' 'use is not allowed in credentials.').format(env_var),
code='invalid',
params={'value': env_var},
)

View File

@@ -23,7 +23,7 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N', help='Remove activity stream events more than N days old')
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would be removed)')
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would ' 'be removed)')
def init_logging(self):
log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0]))

View File

@@ -152,7 +152,7 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N', help='Remove jobs/updates executed more than N days ago. Defaults to 90.')
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would be removed)')
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would ' 'be removed)')
parser.add_argument('--jobs', dest='only_jobs', action='store_true', default=False, help='Remove jobs')
parser.add_argument('--ad-hoc-commands', dest='only_ad_hoc_commands', action='store_true', default=False, help='Remove ad hoc commands')
parser.add_argument('--project-updates', dest='only_project_updates', action='store_true', default=False, help='Remove project updates')

View File

@@ -44,7 +44,7 @@ class Command(BaseCommand):
'- To list all (now deprecated) custom virtual environments run:',
'awx-manage list_custom_venvs',
'',
'- To export the contents of a (deprecated) virtual environment, run the following command while supplying the path as an argument:',
'- To export the contents of a (deprecated) virtual environment, ' 'run the following command while supplying the path as an argument:',
'awx-manage export_custom_venv /path/to/venv',
'',
'- Run these commands with `-q` to remove tool tips.',

View File

@@ -13,7 +13,7 @@ class Command(BaseCommand):
Deprovision a cluster node
"""
help = 'Remove instance from the database. Specify `--hostname` to use this command.'
help = 'Remove instance from the database. ' 'Specify `--hostname` to use this command.'
def add_arguments(self, parser):
parser.add_argument('--hostname', dest='hostname', type=str, help='Hostname used during provisioning')

View File

@@ -22,7 +22,7 @@ class Command(BaseCommand):
'# Discovered Virtual Environments:',
'\n'.join(venvs),
'',
'- To export the contents of a (deprecated) virtual environment, run the following command while supplying the path as an argument:',
'- To export the contents of a (deprecated) virtual environment, ' 'run the following command while supplying the path as an argument:',
'awx-manage export_custom_venv /path/to/venv',
'',
'- To view the connections a (deprecated) virtual environment had in the database, run the following command while supplying the path as an argument:',

View File

@@ -122,7 +122,7 @@ class URLModificationMiddleware(MiddlewareMixin):
field_class=fields.DictField,
read_only=True,
label=_('Formats of all available named urls'),
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
help_text=_('Read-only list of key-value pairs that shows the standard format of all ' 'available named URLs.'),
category=_('Named URL'),
category_slug='named-url',
)

View File

@@ -2,9 +2,6 @@
# Python
from __future__ import unicode_literals
# Psycopg2
from psycopg2.extensions import AsIs
# Django
from django.db import connection, migrations, models, OperationalError, ProgrammingError
from django.conf import settings
@@ -136,8 +133,8 @@ class Migration(migrations.Migration):
),
),
migrations.RunSQL(
[("CREATE INDEX host_ansible_facts_default_gin ON %s USING gin" "(ansible_facts jsonb_path_ops);", [AsIs(Host._meta.db_table)])],
[('DROP INDEX host_ansible_facts_default_gin;', None)],
sql="CREATE INDEX host_ansible_facts_default_gin ON {} USING gin(ansible_facts jsonb_path_ops);".format(Host._meta.db_table),
reverse_sql='DROP INDEX host_ansible_facts_default_gin;',
),
# SCM file-based inventories
migrations.AddField(

View File

@@ -12,17 +12,20 @@ def migrate_event_data(apps, schema_editor):
# https://www.postgresql.org/docs/9.1/datatype-numeric.html)
for tblname in ('main_jobevent', 'main_inventoryupdateevent', 'main_projectupdateevent', 'main_adhoccommandevent', 'main_systemjobevent'):
with connection.cursor() as cursor:
# This loop used to do roughly the following:
# Rename the table to _old_<tablename>
# Create a new table form the old table (it would have no rows)
# Drop the old sequnce and create a new on tied to the new table and set the sequence to the last number from the old table
# This used to work with postgres spitting out a NOTICE and DETAIL
# With the django 4.2 upgrade that changed to an ERROR and HINT
# By the time we hit the 4.2 upgrade, no one should be upgrading a database this old directly to this new schema
# So we no longer really care about having to do all of this work, we only need a table with a bigint ID field
# And this can be achieved by just changing the id column type...
# rename the current event table
cursor.execute(f'ALTER TABLE {tblname} RENAME TO _old_{tblname};')
# create a *new* table with the same schema
cursor.execute(f'CREATE TABLE {tblname} (LIKE _old_{tblname} INCLUDING ALL);')
# alter the *new* table so that the primary key is a big int
cursor.execute(f'ALTER TABLE {tblname} ALTER COLUMN id TYPE bigint USING id::bigint;')
# recreate counter for the new table's primary key to
# start where the *old* table left off (we have to do this because the
# counter changed from an int to a bigint)
cursor.execute(f'CREATE SEQUENCE IF NOT EXISTS "{tblname}_id_seq";')
cursor.execute(f"SELECT setval('{tblname}_id_seq', COALESCE((SELECT MAX(id)+1 FROM _old_{tblname}), 1), false);")
cursor.execute(f'DROP TABLE _old_{tblname};')
class FakeAlterField(migrations.AlterField):
def database_forwards(self, *args):

View File

@@ -1,4 +1,4 @@
# Generated by Django 4.2 on 2023-05-09 19:02
# Generated by Django 4.2 on 2023-04-21 14:43
import awx.main.fields
import awx.main.utils.polymorphic
@@ -9,102 +9,12 @@ import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main', '0183_pre_django_upgrade'),
]
operations = [
migrations.RenameIndex(
model_name='adhoccommandevent',
new_name='main_adhocc_ad_hoc__1e4d24_idx',
old_fields=('ad_hoc_command', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='adhoccommandevent',
new_name='main_adhocc_ad_hoc__e72142_idx',
old_fields=('ad_hoc_command', 'job_created', 'event'),
),
migrations.RenameIndex(
model_name='adhoccommandevent',
new_name='main_adhocc_ad_hoc__a57777_idx',
old_fields=('ad_hoc_command', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='inventoryupdateevent',
new_name='main_invent_invento_f72b21_idx',
old_fields=('inventory_update', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='inventoryupdateevent',
new_name='main_invent_invento_364dcb_idx',
old_fields=('inventory_update', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_40a56d_idx',
old_fields=('job', 'job_created', 'parent_uuid'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_3c4a4a_idx',
old_fields=('job', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_51c382_idx',
old_fields=('job', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_0ddc6b_idx',
old_fields=('job', 'job_created', 'event'),
),
migrations.RenameIndex(
model_name='projectupdateevent',
new_name='main_projec_project_449bbd_idx',
old_fields=('project_update', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='projectupdateevent',
new_name='main_projec_project_69559a_idx',
old_fields=('project_update', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='projectupdateevent',
new_name='main_projec_project_c44b7c_idx',
old_fields=('project_update', 'job_created', 'event'),
),
migrations.RenameIndex(
model_name='role',
new_name='main_rbac_r_content_979bdd_idx',
old_fields=('content_type', 'object_id'),
),
migrations.RenameIndex(
model_name='roleancestorentry',
new_name='main_rbac_r_ancesto_b44606_idx',
old_fields=('ancestor', 'content_type_id', 'role_field'),
),
migrations.RenameIndex(
model_name='roleancestorentry',
new_name='main_rbac_r_ancesto_22b9f0_idx',
old_fields=('ancestor', 'content_type_id', 'object_id'),
),
migrations.RenameIndex(
model_name='roleancestorentry',
new_name='main_rbac_r_ancesto_c87b87_idx',
old_fields=('ancestor', 'descendent'),
),
migrations.RenameIndex(
model_name='systemjobevent',
new_name='main_system_system__e39825_idx',
old_fields=('system_job', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='systemjobevent',
new_name='main_system_system__73537a_idx',
old_fields=('system_job', 'job_created', 'counter'),
),
migrations.AlterField(
model_name='activitystream',
name='unified_job',

View File

@@ -0,0 +1,102 @@
# Generated by Django 4.2 on 2023-04-28 19:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0184_django_upgrade'),
]
operations = [
migrations.RenameIndex(
model_name='adhoccommandevent',
new_name='main_adhocc_ad_hoc__a57777_idx',
old_fields=('ad_hoc_command', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='adhoccommandevent',
new_name='main_adhocc_ad_hoc__e72142_idx',
old_fields=('ad_hoc_command', 'job_created', 'event'),
),
migrations.RenameIndex(
model_name='adhoccommandevent',
new_name='main_adhocc_ad_hoc__1e4d24_idx',
old_fields=('ad_hoc_command', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='inventoryupdateevent',
new_name='main_invent_invento_f72b21_idx',
old_fields=('inventory_update', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='inventoryupdateevent',
new_name='main_invent_invento_364dcb_idx',
old_fields=('inventory_update', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_51c382_idx',
old_fields=('job', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_0ddc6b_idx',
old_fields=('job', 'job_created', 'event'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_40a56d_idx',
old_fields=('job', 'job_created', 'parent_uuid'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_3c4a4a_idx',
old_fields=('job', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='projectupdateevent',
new_name='main_projec_project_c44b7c_idx',
old_fields=('project_update', 'job_created', 'event'),
),
migrations.RenameIndex(
model_name='projectupdateevent',
new_name='main_projec_project_449bbd_idx',
old_fields=('project_update', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='projectupdateevent',
new_name='main_projec_project_69559a_idx',
old_fields=('project_update', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='role',
new_name='main_rbac_r_content_979bdd_idx',
old_fields=('content_type', 'object_id'),
),
migrations.RenameIndex(
model_name='roleancestorentry',
new_name='main_rbac_r_ancesto_22b9f0_idx',
old_fields=('ancestor', 'content_type_id', 'object_id'),
),
migrations.RenameIndex(
model_name='roleancestorentry',
new_name='main_rbac_r_ancesto_b44606_idx',
old_fields=('ancestor', 'content_type_id', 'role_field'),
),
migrations.RenameIndex(
model_name='roleancestorentry',
new_name='main_rbac_r_ancesto_c87b87_idx',
old_fields=('ancestor', 'descendent'),
),
migrations.RenameIndex(
model_name='systemjobevent',
new_name='main_system_system__e39825_idx',
old_fields=('system_job', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='systemjobevent',
new_name='main_system_system__73537a_idx',
old_fields=('system_job', 'job_created', 'counter'),
),
]

View File

@@ -158,7 +158,7 @@ class ec2(PluginFileInjector):
return {
# vars that change
'ec2_block_devices': (
"dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings | map(attribute='ebs.volume_id') | list))"
"dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings " "| map(attribute='ebs.volume_id') | list))"
),
'ec2_dns_name': 'public_dns_name',
'ec2_group_name': 'placement.group_name',
@@ -635,7 +635,7 @@ class satellite6(PluginFileInjector):
"environment": {
"prefix": "{}environment_".format(group_prefix),
"separator": "",
"key": "foreman['environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')",
"key": "foreman['environment_name'] | lower | regex_replace(' ', '') | " "regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')",
},
"location": {
"prefix": "{}location_".format(group_prefix),
@@ -656,7 +656,7 @@ class satellite6(PluginFileInjector):
"content_view": {
"prefix": "{}content_view_".format(group_prefix),
"separator": "",
"key": "foreman['content_facet_attributes']['content_view_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
"key": "foreman['content_facet_attributes']['content_view_name'] | " "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
},
}

View File

@@ -91,7 +91,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
related_name='credentials',
null=False,
on_delete=models.CASCADE,
help_text=_('Specify the type of credential you want to create. Refer to the documentation for details on each type.'),
help_text=_('Specify the type of credential you want to create. Refer ' 'to the documentation for details on each type.'),
)
managed = models.BooleanField(default=False, editable=False)
organization = models.ForeignKey(
@@ -103,7 +103,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
related_name='credentials',
)
inputs = CredentialInputField(
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. Refer to the documentation for example syntax.')
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.')
)
admin_role = ImplicitRoleField(
parent_role=[
@@ -346,12 +346,12 @@ class CredentialType(CommonModelNameNotUnique):
managed = models.BooleanField(default=False, editable=False)
namespace = models.CharField(max_length=1024, null=True, default=None, editable=False)
inputs = CredentialTypeInputField(
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. Refer to the documentation for example syntax.')
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.')
)
injectors = CredentialTypeInjectorField(
blank=True,
default=dict,
help_text=_('Enter injectors using either JSON or YAML syntax. Refer to the documentation for example syntax.'),
help_text=_('Enter injectors using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.'),
)
@classmethod
@@ -605,7 +605,9 @@ ManagedCredentialType(
'id': 'become_method',
'label': gettext_noop('Privilege Escalation Method'),
'type': 'string',
'help_text': gettext_noop('Specify a method for "become" operations. This is equivalent to specifying the --become-method Ansible parameter.'),
'help_text': gettext_noop(
'Specify a method for "become" operations. This is ' 'equivalent to specifying the --become-method ' 'Ansible parameter.'
),
},
{
'id': 'become_username',
@@ -747,7 +749,7 @@ ManagedCredentialType(
'id': 'host',
'label': gettext_noop('Host (Authentication URL)'),
'type': 'string',
'help_text': gettext_noop('The host to authenticate with. For example, https://openstack.business.com/v2.0/'),
'help_text': gettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'),
},
{
'id': 'project',
@@ -798,7 +800,7 @@ ManagedCredentialType(
'id': 'host',
'label': gettext_noop('VCenter Host'),
'type': 'string',
'help_text': gettext_noop('Enter the hostname or IP address that corresponds to your VMware vCenter.'),
'help_text': gettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'),
},
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
{
@@ -823,7 +825,7 @@ ManagedCredentialType(
'id': 'host',
'label': gettext_noop('Satellite 6 URL'),
'type': 'string',
'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat Satellite 6 server. For example, https://satellite.example.org'),
'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'),
},
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
{
@@ -848,7 +850,7 @@ ManagedCredentialType(
'id': 'username',
'label': gettext_noop('Service Account Email Address'),
'type': 'string',
'help_text': gettext_noop('The email address assigned to the Google Compute Engine service account.'),
'help_text': gettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'),
},
{
'id': 'project',
@@ -868,7 +870,7 @@ ManagedCredentialType(
'format': 'ssh_private_key',
'secret': True,
'multiline': True,
'help_text': gettext_noop('Paste the contents of the PEM file associated with the service account email.'),
'help_text': gettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'),
},
],
'required': ['username', 'ssh_key_data'],
@@ -886,7 +888,7 @@ ManagedCredentialType(
'id': 'subscription',
'label': gettext_noop('Subscription ID'),
'type': 'string',
'help_text': gettext_noop('Subscription ID is an Azure construct, which is mapped to a username.'),
'help_text': gettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'),
},
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
{
@@ -907,7 +909,7 @@ ManagedCredentialType(
'id': 'cloud_environment',
'label': gettext_noop('Azure Cloud Environment'),
'type': 'string',
'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when using Azure GovCloud or Azure stack.'),
'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'),
},
],
'required': ['subscription'],
@@ -1038,7 +1040,7 @@ ManagedCredentialType(
'label': gettext_noop('Username'),
'type': 'string',
'help_text': gettext_noop(
'Red Hat Ansible Automation Platform username id to authenticate as.This should not be set if an OAuth token is being used.'
'Red Hat Ansible Automation Platform username id to authenticate as.' 'This should not be set if an OAuth token is being used.'
),
},
{
@@ -1052,7 +1054,7 @@ ManagedCredentialType(
'label': gettext_noop('OAuth Token'),
'type': 'string',
'secret': True,
'help_text': gettext_noop('An OAuth token to use to authenticate with.This should not be set if username/password are being used.'),
'help_text': gettext_noop('An OAuth token to use to authenticate with.' 'This should not be set if username/password are being used.'),
},
{'id': 'verify_ssl', 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False},
],
@@ -1163,7 +1165,7 @@ ManagedCredentialType(
'id': 'auth_url',
'label': gettext_noop('Auth Server URL'),
'type': 'string',
'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using SSO auth.'),
'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'),
},
{
'id': 'token',

View File

@@ -106,28 +106,28 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
has_active_failures = models.BooleanField(
default=False,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. Flag indicating whether any hosts in this inventory have failed.'),
help_text=_('This field is deprecated and will be removed in a future release. ' 'Flag indicating whether any hosts in this inventory have failed.'),
)
total_hosts = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. Total number of hosts in this inventory.'),
help_text=_('This field is deprecated and will be removed in a future release. ' 'Total number of hosts in this inventory.'),
)
hosts_with_active_failures = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. Number of hosts in this inventory with active failures.'),
help_text=_('This field is deprecated and will be removed in a future release. ' 'Number of hosts in this inventory with active failures.'),
)
total_groups = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. Total number of groups in this inventory.'),
help_text=_('This field is deprecated and will be removed in a future release. ' 'Total number of groups in this inventory.'),
)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
help_text=_(
'This field is deprecated and will be removed in a future release. Flag indicating whether this inventory has any external inventory sources.'
'This field is deprecated and will be removed in a future release. ' 'Flag indicating whether this inventory has any external inventory sources.'
),
)
total_inventory_sources = models.PositiveIntegerField(
@@ -424,7 +424,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
for t in tasks:
t.task_impact = t._get_task_impact()
UnifiedJob.objects.bulk_update(tasks, ['task_impact'])
logger.debug("Finished updating inventory computed fields, pk={0}, in {1:.3f} seconds".format(self.pk, time.time() - start_time))
logger.debug("Finished updating inventory computed fields, pk={0}, in " "{1:.3f} seconds".format(self.pk, time.time() - start_time))
def websocket_emit_status(self, status):
connection.on_commit(
@@ -1055,16 +1055,16 @@ class InventorySourceOptions(BaseModel):
# the actual inventory source being used (Amazon requires Amazon
# credentials; Rackspace requires Rackspace credentials; etc...)
if source.replace('ec2', 'aws') != cred.kind:
return _('Cloud-based inventory sources (such as %s) require credentials for the matching cloud service.') % source
return _('Cloud-based inventory sources (such as %s) require ' 'credentials for the matching cloud service.') % source
# Allow an EC2 source to omit the credential. If Tower is running on
# an EC2 instance with an IAM Role assigned, boto will use credentials
# from the instance metadata instead of those explicitly provided.
elif source in CLOUD_PROVIDERS and source != 'ec2':
return _('Credential is required for a cloud source.')
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
return _('Credentials of type machine, source control, insights and vault are ' 'disallowed for custom inventory sources.')
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
return _('Credentials of type insights and vault are ' 'disallowed for scm inventory sources.')
return None
def get_cloud_credential(self):

View File

@@ -101,7 +101,7 @@ class JobOptions(BaseModel):
max_length=1024,
default='',
blank=True,
help_text=_('Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.'),
help_text=_('Branch to use in job run. Project default used if blank. ' 'Only allowed if project allow_override field is set to true.'),
)
forks = models.PositiveIntegerField(
blank=True,
@@ -253,7 +253,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
job_slice_count = models.PositiveIntegerField(
blank=True,
default=1,
help_text=_("The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1."),
help_text=_("The number of jobs to slice into at runtime. " "Will cause the Job Template to launch a workflow if value is greater than 1."),
)
admin_role = ImplicitRoleField(parent_role=['organization.job_template_admin_role'])
@@ -596,12 +596,12 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
job_slice_number = models.PositiveIntegerField(
blank=True,
default=0,
help_text=_("If part of a sliced job, the ID of the inventory slice operated on. If not part of sliced job, parameter is not used."),
help_text=_("If part of a sliced job, the ID of the inventory slice operated on. " "If not part of sliced job, parameter is not used."),
)
job_slice_count = models.PositiveIntegerField(
blank=True,
default=1,
help_text=_("If ran as part of sliced jobs, the total number of slices. If 1, job is not part of a sliced job."),
help_text=_("If ran as part of sliced jobs, the total number of slices. " "If 1, job is not part of a sliced job."),
)
def _get_parent_field_name(self):

View File

@@ -675,4 +675,4 @@ class WebhookMixin(models.Model):
if response.status_code < 400:
logger.debug("Webhook status update sent.")
else:
logger.error("Posting webhook status failed, code: {}\n" "{}\nPayload sent: {}".format(response.status_code, response.text, json.dumps(data)))
logger.error("Posting webhook status failed, code: {}\n" "{}\n" "Payload sent: {}".format(response.status_code, response.text, json.dumps(data)))

View File

@@ -284,7 +284,7 @@ class JobNotificationMixin(object):
'workflow_url',
'scm_branch',
'artifacts',
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark', 'processed', 'rescued', 'ignored']},
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark' 'processed', 'rescued', 'ignored']},
{
'summary_fields': [
{

View File

@@ -74,7 +74,7 @@ class ProjectOptions(models.Model):
return []
local_path = models.CharField(
max_length=1024, blank=True, help_text=_('Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.')
max_length=1024, blank=True, help_text=_('Local path (relative to PROJECTS_ROOT) containing ' 'playbooks and related files for this project.')
)
scm_type = models.CharField(
@@ -276,11 +276,11 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
scm_update_cache_timeout = models.PositiveIntegerField(
default=0,
blank=True,
help_text=_('The number of seconds after the last project update ran that a new project update will be launched as a job dependency.'),
help_text=_('The number of seconds after the last project update ran that a new ' 'project update will be launched as a job dependency.'),
)
allow_override = models.BooleanField(
default=False,
help_text=_('Allow changing the SCM branch or revision in a job template that uses this project.'),
help_text=_('Allow changing the SCM branch or revision in a job template ' 'that uses this project.'),
)
# credential (keys) used to validate content signature

View File

@@ -1137,11 +1137,9 @@ class UnifiedJob(
if total > max_supported:
raise StdoutMaxBytesExceeded(total, max_supported)
# psycopg2's copy_expert writes bytes, but callers of this
# psycopg3's copy writes bytes, but callers of this
# function assume a str-based fd will be returned; decode
# .write() calls on the fly to maintain this interface
_write = fd.write
fd.write = lambda s: _write(smart_str(s))
tbl = self._meta.db_table + 'event'
created_by_cond = ''
if self.has_unpartitioned_events:
@@ -1150,7 +1148,9 @@ class UnifiedJob(
created_by_cond = f"job_created='{self.created.isoformat()}' AND "
sql = f"copy (select stdout from {tbl} where {created_by_cond}{self.event_parent_key}={self.id} and stdout != '' order by start_line) to stdout" # nosql
cursor.copy_expert(sql, fd)
with cursor.copy(sql) as copy:
while data := copy.read():
fd.write(smart_str(bytes(data)))
if hasattr(fd, 'name'):
# If we're dealing with a physical file, use `sed` to clean

View File

@@ -82,7 +82,7 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
related_name='%(class)ss_always',
)
all_parents_must_converge = models.BooleanField(
default=False, help_text=_("If enabled then the node will only run if all of the parent nodes have met the criteria to reach this node")
default=False, help_text=_("If enabled then the node will only run if all of the parent nodes " "have met the criteria to reach this node")
)
unified_job_template = models.ForeignKey(
'UnifiedJobTemplate',
@@ -181,7 +181,7 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
max_length=512,
default=uuid4,
blank=False,
help_text=_('An identifier for this node that is unique within its workflow. It is copied to workflow job nodes corresponding to this node.'),
help_text=_('An identifier for this node that is unique within its workflow. ' 'It is copied to workflow job nodes corresponding to this node.'),
)
instance_groups = OrderedManyToManyField(
'InstanceGroup',
@@ -334,7 +334,7 @@ class WorkflowJobNode(WorkflowNodeBase):
accepted_fields, ignored_fields, errors = ujt_obj._accept_or_ignore_job_kwargs(**node_prompts_data)
if errors:
logger.info(
_('Bad launch configuration starting template {template_pk} as part of workflow {workflow_pk}. Errors:\n{error_text}').format(
_('Bad launch configuration starting template {template_pk} as part of ' 'workflow {workflow_pk}. Errors:\n{error_text}').format(
template_pk=ujt_obj.pk, workflow_pk=self.pk, error_text=errors
)
)
@@ -647,7 +647,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
null=True,
default=None,
on_delete=models.SET_NULL,
help_text=_("If automatically created for a sliced job run, the job template the workflow job was created from."),
help_text=_("If automatically created for a sliced job run, the job template " "the workflow job was created from."),
)
is_sliced_job = models.BooleanField(default=False)
is_bulk_job = models.BooleanField(default=False)
@@ -714,7 +714,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
wj = self.get_workflow_job()
while wj and wj.workflow_job_template_id:
if wj.pk in wj_ids:
logger.critical('Cycles detected in the workflow jobs graph, this is not normal and suggests task manager degeneracy.')
logger.critical('Cycles detected in the workflow jobs graph, ' 'this is not normal and suggests task manager degeneracy.')
break
wj_ids.add(wj.pk)
ancestors.append(wj.workflow_job_template)

View File

@@ -8,7 +8,7 @@ class CustomNotificationBase(object):
DEFAULT_APPROVAL_RUNNING_MSG = 'The approval node "{{ approval_node_name }}" needs review. This node can be viewed at: {{ workflow_url }}'
DEFAULT_APPROVAL_RUNNING_BODY = (
'The approval node "{{ approval_node_name }}" needs review. This approval node can be viewed at: {{ workflow_url }}\n\n{{ job_metadata }}'
'The approval node "{{ approval_node_name }}" needs review. ' 'This approval node can be viewed at: {{ workflow_url }}\n\n{{ job_metadata }}'
)
DEFAULT_APPROVAL_APPROVED_MSG = 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}'

View File

@@ -32,7 +32,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
"success": {"body": DEFAULT_BODY},
"error": {"body": DEFAULT_BODY},
"workflow_approval": {
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. ' 'This node can be viewed at: {{ workflow_url }}"}'},
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},

View File

@@ -639,7 +639,7 @@ class AWXReceptorJob:
#
RECEPTOR_CONFIG_STARTER = (
{'local-only': None},
{'log-level': 'info'},
{'log-level': 'debug'},
{'node': {'firewallrules': [{'action': 'reject', 'tonode': settings.CLUSTER_HOST_ID, 'toservice': 'control'}]}},
{'control-service': {'service': 'control', 'filename': '/var/run/receptor/receptor.sock', 'permissions': '0660'}},
{'work-command': {'worktype': 'local', 'command': 'ansible-runner', 'params': 'worker', 'allowruntimeparams': True}},

View File

@@ -2,8 +2,8 @@ import pytest
import tempfile
import os
import re
import shutil
import csv
from io import StringIO
from django.utils.timezone import now
from datetime import timedelta
@@ -20,15 +20,16 @@ from awx.main.models import (
)
@pytest.fixture
def sqlite_copy_expert(request):
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
# behavior to test that it writes a file that contains stdout from events
path = tempfile.mkdtemp(prefix="copied_tables")
class MockCopy:
headers = None
results = None
sent_data = False
def write_stdout(self, sql, fd):
def __init__(self, sql, parent_connection):
# Would be cool if we instead properly disected the SQL query and verified
# it that way. But instead, we just take the naive approach here.
self.results = None
self.headers = None
sql = sql.strip()
assert sql.startswith("COPY (")
assert sql.endswith(") TO STDOUT WITH CSV HEADER")
@@ -51,29 +52,49 @@ def sqlite_copy_expert(request):
elif not line.endswith(","):
sql_new[-1] = sql_new[-1].rstrip(",")
sql = "\n".join(sql_new)
parent_connection.execute(sql)
self.results = parent_connection.fetchall()
self.headers = [i[0] for i in parent_connection.description]
self.execute(sql)
results = self.fetchall()
headers = [i[0] for i in self.description]
def read(self):
if not self.sent_data:
mem_file = StringIO()
csv_handle = csv.writer(
mem_file,
delimiter=",",
quoting=csv.QUOTE_ALL,
escapechar="\\",
lineterminator="\n",
)
if self.headers:
csv_handle.writerow(self.headers)
if self.results:
csv_handle.writerows(self.results)
self.sent_data = True
return memoryview((mem_file.getvalue()).encode())
return None
csv_handle = csv.writer(
fd,
delimiter=",",
quoting=csv.QUOTE_ALL,
escapechar="\\",
lineterminator="\n",
)
csv_handle.writerow(headers)
csv_handle.writerows(results)
def __enter__(self):
return self
setattr(SQLiteCursorWrapper, "copy_expert", write_stdout)
request.addfinalizer(lambda: shutil.rmtree(path))
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, "copy_expert"))
return path
def __exit__(self, exc_type, exc_val, exc_tb):
pass
@pytest.fixture
def sqlite_copy(request, mocker):
# copy is postgres-specific, and SQLite doesn't support it; mock its
# behavior to test that it writes a file that contains stdout from events
def write_stdout(self, sql):
mock_copy = MockCopy(sql, self)
return mock_copy
mocker.patch.object(SQLiteCursorWrapper, 'copy', write_stdout, create=True)
@pytest.mark.django_db
def test_copy_tables_unified_job_query(sqlite_copy_expert, project, inventory, job_template):
def test_copy_tables_unified_job_query(sqlite_copy, project, inventory, job_template):
"""
Ensure that various unified job types are in the output of the query.
"""
@@ -127,7 +148,7 @@ def workflow_job(states=["new", "new", "new", "new", "new"]):
@pytest.mark.django_db
def test_copy_tables_workflow_job_node_query(sqlite_copy_expert, workflow_job):
def test_copy_tables_workflow_job_node_query(sqlite_copy, workflow_job):
time_start = now() - timedelta(hours=9)
with tempfile.TemporaryDirectory() as tmpdir:

View File

@@ -77,7 +77,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
}
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 400
assert response.data['detail'][0] == ("Only one of 'user', 'team', or 'organization' should be provided, received organization, team, user fields.")
assert response.data['detail'][0] == ("Only one of 'user', 'team', or 'organization' should be provided, " "received organization, team, user fields.")
@pytest.mark.django_db
@@ -925,7 +925,7 @@ def test_credential_type_mutability(patch, organization, admin, credentialtype_s
response = _change_credential_type()
assert response.status_code == 400
expected = ['You cannot change the credential type of the credential, as it may break the functionality of the resources using it.']
expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
@@ -962,7 +962,7 @@ def test_vault_credential_type_mutability(patch, organization, admin, credential
response = _change_credential_type()
assert response.status_code == 400
expected = ['You cannot change the credential type of the credential, as it may break the functionality of the resources using it.']
expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
@@ -994,7 +994,7 @@ def test_cloud_credential_type_mutability(patch, organization, admin, credential
response = _change_credential_type()
assert response.status_code == 400
expected = ['You cannot change the credential type of the credential, as it may break the functionality of the resources using it.']
expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)

View File

@@ -51,16 +51,6 @@ def test_job_relaunch_permission_denied_response(post, get, inventory, project,
r = post(reverse('api:job_relaunch', kwargs={'pk': job.pk}), {}, jt_user, expect=201)
@pytest.mark.django_db
def test_label_sublist(get, admin_user, organization):
job = Job.objects.create()
label = Label.objects.create(organization=organization, name='Steve')
job.labels.add(label)
r = get(url=reverse('api:job_label_list', kwargs={'pk': job.pk}), user=admin_user, expect=200)
assert r.data['count'] == 1
assert r.data['results'].pop()['id'] == label.id
@pytest.mark.django_db
def test_job_relaunch_prompts_not_accepted_response(post, get, inventory, project, credential, net_credential, machine_credential):
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
@@ -224,7 +214,7 @@ class TestControllerNode:
return AdHocCommand.objects.create(inventory=inventory)
@pytest.mark.django_db
def test_field_controller_node_exists(self, sqlite_copy_expert, admin_user, job, project_update, inventory_update, adhoc, get, system_job_factory):
def test_field_controller_node_exists(self, sqlite_copy, admin_user, job, project_update, inventory_update, adhoc, get, system_job_factory):
system_job = system_job_factory()
r = get(reverse('api:unified_job_list') + '?id={}'.format(job.id), admin_user, expect=200)

View File

@@ -57,7 +57,7 @@ def _mk_inventory_update(created=None):
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
],
)
def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
def test_text_stdout(sqlite_copy, Parent, Child, relation, view, get, admin):
job = Parent()
job.save()
for i in range(3):
@@ -79,7 +79,7 @@ def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, adm
],
)
@pytest.mark.parametrize('download', [True, False])
def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, view, download, get, admin):
def test_ansi_stdout_filtering(sqlite_copy, Parent, Child, relation, view, download, get, admin):
job = Parent()
job.save()
for i in range(3):
@@ -111,7 +111,7 @@ def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, view
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
],
)
def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
def test_colorized_html_stdout(sqlite_copy, Parent, Child, relation, view, get, admin):
job = Parent()
job.save()
for i in range(3):
@@ -134,7 +134,7 @@ def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
],
)
def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
def test_stdout_line_range(sqlite_copy, Parent, Child, relation, view, get, admin):
job = Parent()
job.save()
for i in range(20):
@@ -146,7 +146,7 @@ def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, ge
@pytest.mark.django_db
def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin):
def test_text_stdout_from_system_job_events(sqlite_copy, get, admin):
created = tz_now()
job = SystemJob(created=created)
job.save()
@@ -158,7 +158,7 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin):
@pytest.mark.django_db
def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin):
def test_text_stdout_with_max_stdout(sqlite_copy, get, admin):
created = tz_now()
job = SystemJob(created=created)
job.save()
@@ -185,7 +185,7 @@ def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin):
)
@pytest.mark.parametrize('fmt', ['txt', 'ansi'])
@mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings
def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin):
def test_max_bytes_display(sqlite_copy, Parent, Child, relation, view, fmt, get, admin):
created = tz_now()
job = Parent(created=created)
job.save()
@@ -255,7 +255,7 @@ def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin):
],
)
@pytest.mark.parametrize('fmt', ['txt', 'ansi', 'txt_download', 'ansi_download'])
def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin, fmt):
def test_text_with_unicode_stdout(sqlite_copy, Parent, Child, relation, view, get, admin, fmt):
job = Parent()
job.save()
for i in range(3):
@@ -267,7 +267,7 @@ def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, v
@pytest.mark.django_db
def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin):
def test_unicode_with_base64_ansi(sqlite_copy, get, admin):
created = tz_now()
job = Job(created=created)
job.save()

View File

@@ -1,8 +1,6 @@
# Python
import pytest
from unittest import mock
import tempfile
import shutil
import urllib.parse
from unittest.mock import PropertyMock
@@ -789,25 +787,43 @@ def oauth_application(admin):
return Application.objects.create(name='test app', user=admin, client_type='confidential', authorization_grant_type='password')
@pytest.fixture
def sqlite_copy_expert(request):
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
# behavior to test that it writes a file that contains stdout from events
path = tempfile.mkdtemp(prefix='job-event-stdout')
class MockCopy:
events = []
index = -1
def write_stdout(self, sql, fd):
# simulate postgres copy_expert support with ORM code
def __init__(self, sql):
self.events = []
parts = sql.split(' ')
tablename = parts[parts.index('from') + 1]
for cls in (JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent):
if cls._meta.db_table == tablename:
for event in cls.objects.order_by('start_line').all():
fd.write(event.stdout)
self.events.append(event.stdout)
setattr(SQLiteCursorWrapper, 'copy_expert', write_stdout)
request.addfinalizer(lambda: shutil.rmtree(path))
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, 'copy_expert'))
return path
def read(self):
self.index = self.index + 1
if self.index < len(self.events):
return memoryview(self.events[self.index].encode())
return None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
pass
@pytest.fixture
def sqlite_copy(request, mocker):
# copy is postgres-specific, and SQLite doesn't support it; mock its
# behavior to test that it writes a file that contains stdout from events
def write_stdout(self, sql):
mock_copy = MockCopy(sql)
return mock_copy
mocker.patch.object(SQLiteCursorWrapper, 'copy', write_stdout, create=True)
@pytest.fixture

View File

@@ -98,7 +98,7 @@ class TestJobNotificationMixin(object):
@pytest.mark.django_db
@pytest.mark.parametrize('JobClass', [AdHocCommand, InventoryUpdate, Job, ProjectUpdate, SystemJob, WorkflowJob])
def test_context(self, JobClass, sqlite_copy_expert, project, inventory_source):
def test_context(self, JobClass, sqlite_copy, project, inventory_source):
"""The Jinja context defines all of the fields that can be used by a template. Ensure that the context generated
for each job type has the expected structure."""
kwargs = {}

View File

@@ -121,7 +121,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
break
alias = 'file_reference_{}'.format(i)
else:
raise RuntimeError('Test not able to cope with >10 references by env vars. Something probably went very wrong.')
raise RuntimeError('Test not able to cope with >10 references by env vars. ' 'Something probably went very wrong.')
file_aliases[abs_file_path] = alias
for env_key in inverse_env[runner_path]:
env[env_key] = '{{{{ {} }}}}'.format(alias)
@@ -234,7 +234,7 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential
source_dir = os.path.join(base_dir, this_kind) # this_kind is a global
if not os.path.exists(source_dir):
raise FileNotFoundError('Maybe you never made reference files? MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}')
raise FileNotFoundError('Maybe you never made reference files? ' 'MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}')
files_dir = os.path.join(source_dir, 'files')
try:
expected_file_list = os.listdir(files_dir)

View File

@@ -371,7 +371,7 @@ class TestExtraVarSanitation(TestJobExecution):
# are deemed trustable, because they can only be added by users w/ enough
# privilege to add/modify a Job Template)
UNSAFE = "{{ lookup('pipe', 'ls -la') }}"
UNSAFE = '{{ lookup(' 'pipe' ',' 'ls -la' ') }}'
def test_vars_unsafe_by_default(self, job, private_data_dir, mock_me):
job.created_by = User(pk=123, username='angry-spud')

View File

@@ -88,6 +88,6 @@ def test_global_creation_always_possible(all_views):
creatable_view = View
if not creatable or not global_view:
continue
assert 'POST' in global_view().allowed_methods, 'Resource {} should be creatable in global list view {}. Can be created now in {}'.format(
assert 'POST' in global_view().allowed_methods, 'Resource {} should be creatable in global list view {}. ' 'Can be created now in {}'.format(
model, global_view, creatable_view
)

View File

@@ -93,7 +93,7 @@ class TestSmartFilterQueryFromString:
@pytest.mark.parametrize(
"filter_string",
[
'ansible_facts__facts__facts__blank=ansible_facts__a__b__c__ space =ggg',
'ansible_facts__facts__facts__blank=' 'ansible_facts__a__b__c__ space =ggg',
],
)
def test_invalid_filter_strings(self, mock_get_host_model, filter_string):
@@ -104,7 +104,7 @@ class TestSmartFilterQueryFromString:
@pytest.mark.parametrize(
"filter_string",
[
'created_by__password__icontains=pbkdf2search=foo or created_by__password__icontains=pbkdf2',
'created_by__password__icontains=pbkdf2' 'search=foo or created_by__password__icontains=pbkdf2',
'created_by__password__icontains=pbkdf2 or search=foo',
],
)

View File

@@ -716,7 +716,7 @@ def parse_yaml_or_json(vars_str, silent_failure=True):
if silent_failure:
return {}
raise ParseError(
_('Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error}).').format(json_error=str(json_err), yaml_error=str(yaml_err))
_('Cannot parse as JSON (error: {json_error}) or ' 'YAML (error: {yaml_error}).').format(json_error=str(json_err), yaml_error=str(yaml_err))
)
return vars_dict

View File

@@ -253,7 +253,7 @@ def dict_to_mem_data(data, inventory=None):
if isinstance(hv, dict):
host.variables.update(hv)
else:
logger.warning('Expected dict of vars for host "%s", got %s instead', hk, str(type(hv)))
logger.warning('Expected dict of vars for ' 'host "%s", got %s instead', hk, str(type(hv)))
group.add_host(host)
elif isinstance(hosts, (list, tuple)):
for hk in hosts:
@@ -262,13 +262,13 @@ def dict_to_mem_data(data, inventory=None):
continue
group.add_host(host)
else:
logger.warning('Expected dict or list of "hosts" for group "%s", got %s instead', k, str(type(hosts)))
logger.warning('Expected dict or list of "hosts" for ' 'group "%s", got %s instead', k, str(type(hosts)))
# Process group variables.
vars = v.get('vars', {})
if isinstance(vars, dict):
group.variables.update(vars)
else:
logger.warning('Expected dict of vars for group "%s", got %s instead', k, str(type(vars)))
logger.warning('Expected dict of vars for ' 'group "%s", got %s instead', k, str(type(vars)))
# Process child groups.
children = v.get('children', [])
if isinstance(children, (list, tuple)):
@@ -277,7 +277,7 @@ def dict_to_mem_data(data, inventory=None):
if child and c != 'ungrouped':
group.add_child_group(child)
else:
logger.warning('Expected list of children for group "%s", got %s instead', k, str(type(children)))
logger.warning('Expected list of children for ' 'group "%s", got %s instead', k, str(type(children)))
# Load host names from a list.
elif isinstance(v, (list, tuple)):
@@ -288,7 +288,7 @@ def dict_to_mem_data(data, inventory=None):
group.add_host(host)
else:
logger.warning('')
logger.warning('Expected dict or list for group "%s", got %s instead', k, str(type(v)))
logger.warning('Expected dict or list for group "%s", ' 'got %s instead', k, str(type(v)))
if k not in ['all', 'ungrouped']:
inventory.all_group.add_child_group(group)
@@ -299,6 +299,6 @@ def dict_to_mem_data(data, inventory=None):
if isinstance(meta_hostvars, dict):
v.variables.update(meta_hostvars)
else:
logger.warning('Expected dict of vars for host "%s", got %s instead', k, str(type(meta_hostvars)))
logger.warning('Expected dict of vars for ' 'host "%s", got %s instead', k, str(type(meta_hostvars)))
return inventory

View File

@@ -5,14 +5,13 @@ from typing import Dict
import aiohttp
from aiohttp import client_exceptions
import aioredis
from channels.layers import get_channel_layer
from django.conf import settings
from django.apps import apps
import asyncpg
import psycopg
from awx.main.analytics.broadcast_websocket import (
RelayWebsocketStats,
@@ -181,9 +180,6 @@ class WebsocketRelayConnection:
return
continue
except aioredis.errors.ConnectionClosedError:
logger.info(f"Producer {name} lost connection to Redis, shutting down.")
return
await websocket.send_json(wrap_broadcast_msg(group, msg))
except ConnectionResetError:
@@ -209,92 +205,64 @@ class WebSocketRelayManager(object):
# hostname -> ip
self.known_hosts: Dict[str, str] = dict()
async def on_heartbeet(self, conn, pid, channel, payload):
async def pg_consumer(self, conn):
try:
if not payload or channel != "web_heartbeet":
return
await conn.execute("LISTEN web_heartbeet")
async for notif in conn.notifies():
if notif is not None and notif.channel == "web_heartbeet":
try:
payload = json.loads(notif.payload)
except json.JSONDecodeError:
logmsg = "Failed to decode message from pg_notify channel `web_heartbeet`"
if logger.isEnabledFor(logging.DEBUG):
logmsg = "{} {}".format(logmsg, payload)
logger.warning(logmsg)
continue
try:
payload = json.loads(payload)
except json.JSONDecodeError:
logmsg = "Failed to decode message from pg_notify channel `web_heartbeet`"
if logger.isEnabledFor(logging.DEBUG):
logmsg = "{} {}".format(logmsg, payload)
logger.warning(logmsg)
return
# Skip if the message comes from the same host we are running on
# In this case, we'll be sharing a redis, no need to relay.
if payload.get("hostname") == self.local_hostname:
continue
# Skip if the message comes from the same host we are running on
# In this case, we'll be sharing a redis, no need to relay.
if payload.get("hostname") == self.local_hostname:
return
if payload.get("action") == "online":
hostname = payload.get("hostname")
ip = payload.get("ip")
if ip is None:
# If we don't get an IP, just try the hostname, maybe it resolves
ip = hostname
if ip is None:
logger.warning(f"Received invalid online heartbeet, missing hostname and ip: {payload}")
return
self.known_hosts[hostname] = ip
logger.debug(f"Web host {hostname} ({ip}) online heartbeat received.")
elif payload.get("action") == "offline":
hostname = payload.get("hostname")
ip = payload.get("ip")
if ip is None:
# If we don't get an IP, just try the hostname, maybe it resolves
ip = hostname
if ip is None:
logger.warning(f"Received invalid offline heartbeet, missing hostname and ip: {payload}")
return
self.cleanup_offline_host(ip)
logger.debug(f"Web host {hostname} ({ip}) offline heartbeat received.")
if payload.get("action") == "online":
hostname = payload["hostname"]
ip = payload["ip"]
if ip is None:
# If we don't get an IP, just try the hostname, maybe it resolves
ip = hostname
self.known_hosts[hostname] = ip
logger.debug(f"Web host {hostname} ({ip}) online heartbeat received.")
elif payload.get("action") == "offline":
hostname = payload["hostname"]
del self.known_hosts[hostname]
logger.debug(f"Web host {hostname} ({ip}) offline heartbeat received.")
except Exception as e:
# This catch-all is the same as the one above. asyncio will eat the exception
# but we want to know about it.
logger.exception(f"on_heartbeet exception: {e}")
def cleanup_offline_host(self, hostname):
"""
Given a hostname, try to cancel its task/connection and remove it from
the list of hosts we know about.
If the host isn't in the list, assume that it was already deleted and
don't error.
"""
if hostname in self.relay_connections:
self.relay_connections[hostname].cancel()
del self.relay_connections[hostname]
if hostname in self.known_hosts:
del self.known_hosts[hostname]
try:
self.stats_mgr.delete_remote_host_stats(hostname)
except KeyError:
pass
logger.exception(f"pg_consumer exception: {e}")
async def run(self):
event_loop = asyncio.get_running_loop()
self.stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
self.stats_mgr.start()
stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
stats_mgr.start()
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
database_conf = settings.DATABASES['default']
async_conn = await asyncpg.connect(
database=database_conf['NAME'],
async_conn = await psycopg.AsyncConnection.connect(
dbname=database_conf['NAME'],
host=database_conf['HOST'],
user=database_conf['USER'],
password=database_conf['PASSWORD'],
port=database_conf['PORT'],
# We cannot include these because asyncpg doesn't allow all the options that psycopg does.
# **database_conf.get("OPTIONS", {}),
**database_conf.get("OPTIONS", {}),
)
await async_conn.add_listener("web_heartbeet", self.on_heartbeet)
await async_conn.set_autocommit(True)
event_loop.create_task(self.pg_consumer(async_conn))
# Establishes a websocket connection to /websocket/relay on all API servers
while True:
# logger.info("Current known hosts: {}".format(self.known_hosts))
future_remote_hosts = self.known_hosts.keys()
current_remote_hosts = self.relay_connections.keys()
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
@@ -323,10 +291,13 @@ class WebSocketRelayManager(object):
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
for h in deleted_remote_hosts:
self.cleanup_offline_host(h)
self.relay_connections[h].cancel()
del self.relay_connections[h]
del self.known_hosts[h]
stats_mgr.delete_remote_host_stats(h)
for h in new_remote_hosts:
stats = self.stats_mgr.new_remote_host_stats(h)
stats = stats_mgr.new_remote_host_stats(h)
relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h])
relay_connection.start()
self.relay_connections[h] = relay_connection

View File

@@ -58,7 +58,7 @@ class ActionModule(ActionBase):
if res.status_code != 200:
result['failed'] = True
result['msg'] = 'Expected {} to return a status code of 200 but returned status code "{}" instead with content "{}".'.format(
result['msg'] = 'Expected {} to return a status code of 200 but returned status ' 'code "{}" instead with content "{}".'.format(
url, res.status_code, res.content
)
return result
@@ -87,7 +87,7 @@ class ActionModule(ActionBase):
continue
elif res.status_code != 200:
result['failed'] = True
result['msg'] = 'Expected {} to return a status code of 200 but returned status code "{}" instead with content "{}".'.format(
result['msg'] = 'Expected {} to return a status code of 200 but returned status ' 'code "{}" instead with content "{}".'.format(
playbook_url, res.status_code, res.content
)
return result

View File

@@ -269,7 +269,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
value = value[0]
if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None:
logger.warning(
"Could not map user detail '%s' from SAML attribute '%s'; update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
"Could not map user detail '%s' from SAML attribute '%s'; " "update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
conf_key[5:],
key,
self.name,

View File

@@ -100,7 +100,7 @@ register(
'AUTHENTICATION_BACKENDS',
field_class=AuthenticationBackendsField,
label=_('Authentication Backends'),
help_text=_('List of authentication backends that are enabled based on license features and other authentication settings.'),
help_text=_('List of authentication backends that are enabled based on ' 'license features and other authentication settings.'),
read_only=True,
depends_on=AuthenticationBackendsField.get_all_required_settings(),
category=_('Authentication'),
@@ -360,7 +360,7 @@ def _register_ldap(append=None):
default=None,
label=_('LDAP Deny Group'),
help_text=_(
'Group DN denied from login. If specified, user will not be allowed to login if a member of this group. Only one deny group is supported.'
'Group DN denied from login. If specified, user will not be ' 'allowed to login if a member of this group. Only one deny group ' 'is supported.'
),
category=_('LDAP'),
category_slug='ldap',
@@ -426,7 +426,7 @@ def _register_ldap(append=None):
field_class=LDAPTeamMapField,
default={},
label=_('LDAP Team Map'),
help_text=_('Mapping between team members (users) and LDAP groups. Configuration details are available in the documentation.'),
help_text=_('Mapping between team members (users) and LDAP groups. Configuration' ' details are available in the documentation.'),
category=_('LDAP'),
category_slug='ldap',
placeholder=collections.OrderedDict(
@@ -461,7 +461,7 @@ register(
allow_blank=True,
default='',
label=_('RADIUS Server'),
help_text=_('Hostname/IP of RADIUS server. RADIUS authentication is disabled if this setting is empty.'),
help_text=_('Hostname/IP of RADIUS server. RADIUS authentication is ' 'disabled if this setting is empty.'),
category=_('RADIUS'),
category_slug='radius',
placeholder='radius.example.com',
@@ -564,7 +564,9 @@ register(
read_only=True,
default=SocialAuthCallbackURL('google-oauth2'),
label=_('Google OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
category=_('Google OAuth2'),
category_slug='google-oauth2',
depends_on=['TOWER_URL_BASE'],
@@ -600,7 +602,7 @@ register(
field_class=fields.StringListField,
default=[],
label=_('Google OAuth2 Allowed Domains'),
help_text=_('Update this setting to restrict the domains who are allowed to login using Google OAuth2.'),
help_text=_('Update this setting to restrict the domains who are allowed to ' 'login using Google OAuth2.'),
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder=['example.com'],
@@ -656,7 +658,9 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github'),
label=_('GitHub OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
category=_('GitHub OAuth2'),
category_slug='github',
depends_on=['TOWER_URL_BASE'],
@@ -719,7 +723,9 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-org'),
label=_('GitHub Organization OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
depends_on=['TOWER_URL_BASE'],
@@ -754,7 +760,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Organization Name'),
help_text=_('The name of your GitHub organization, as used in your organization\'s URL: https://github.com/<yourorg>/.'),
help_text=_('The name of your GitHub organization, as used in your ' 'organization\'s URL: https://github.com/<yourorg>/.'),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
)
@@ -833,7 +839,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Team ID'),
help_text=_('Find the numeric team ID using the Github API: http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
help_text=_('Find the numeric team ID using the Github API: ' 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
category=_('GitHub Team OAuth2'),
category_slug='github-team',
)
@@ -872,7 +878,9 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-enterprise'),
label=_('GitHub Enterprise OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise',
depends_on=['TOWER_URL_BASE'],
@@ -884,7 +892,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise URL'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise documentation for more details.'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise',
)
@@ -896,7 +904,7 @@ register(
default='',
label=_('GitHub Enterprise API URL'),
help_text=_(
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github Enterprise documentation for more details.'
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise',
@@ -959,7 +967,9 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-enterprise-org'),
label=_('GitHub Enterprise Organization OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
category=_('GitHub Enterprise Organization OAuth2'),
category_slug='github-enterprise-org',
depends_on=['TOWER_URL_BASE'],
@@ -971,7 +981,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Organization URL'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise documentation for more details.'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-org',
)
@@ -983,7 +993,7 @@ register(
default='',
label=_('GitHub Enterprise Organization API URL'),
help_text=_(
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github Enterprise documentation for more details.'
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-org',
@@ -1018,7 +1028,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Organization Name'),
help_text=_('The name of your GitHub Enterprise organization, as used in your organization\'s URL: https://github.com/<yourorg>/.'),
help_text=_('The name of your GitHub Enterprise organization, as used in your ' 'organization\'s URL: https://github.com/<yourorg>/.'),
category=_('GitHub Enterprise Organization OAuth2'),
category_slug='github-enterprise-org',
)
@@ -1074,7 +1084,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Team URL'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise documentation for more details.'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-team',
)
@@ -1086,7 +1096,7 @@ register(
default='',
label=_('GitHub Enterprise Team API URL'),
help_text=_(
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github Enterprise documentation for more details.'
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-team',
@@ -1121,7 +1131,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Team ID'),
help_text=_('Find the numeric team ID using the Github Enterprise API: http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
help_text=_('Find the numeric team ID using the Github Enterprise API: ' 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
category=_('GitHub Enterprise Team OAuth2'),
category_slug='github-enterprise-team',
)
@@ -1160,7 +1170,9 @@ register(
read_only=True,
default=SocialAuthCallbackURL('azuread-oauth2'),
label=_('Azure AD OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail. '),
help_text=_(
'Provide this URL as the callback URL for your application as part' ' of your registration process. Refer to the' ' documentation for more detail. '
),
category=_('Azure AD OAuth2'),
category_slug='azuread-oauth2',
depends_on=['TOWER_URL_BASE'],
@@ -1279,7 +1291,7 @@ register(
field_class=fields.BooleanField,
default=True,
label=_('Automatically Create Organizations and Teams on SAML Login'),
help_text=_('When enabled (the default), mapped Organizations and Teams will be created automatically on successful SAML login.'),
help_text=_('When enabled (the default), mapped Organizations and Teams ' 'will be created automatically on successful SAML login.'),
category=_('SAML'),
category_slug='saml',
)
@@ -1306,7 +1318,7 @@ register(
read_only=True,
default=get_saml_metadata_url,
label=_('SAML Service Provider Metadata URL'),
help_text=_('If your identity provider (IdP) allows uploading an XML metadata file, you can download one from this URL.'),
help_text=_('If your identity provider (IdP) allows uploading an XML ' 'metadata file, you can download one from this URL.'),
category=_('SAML'),
category_slug='saml',
)
@@ -1334,7 +1346,7 @@ register(
required=True,
validators=[validate_certificate],
label=_('SAML Service Provider Public Certificate'),
help_text=_('Create a keypair to use as a service provider (SP) and include the certificate content here.'),
help_text=_('Create a keypair to use as a service provider (SP) ' 'and include the certificate content here.'),
category=_('SAML'),
category_slug='saml',
)
@@ -1346,7 +1358,7 @@ register(
required=True,
validators=[validate_private_key],
label=_('SAML Service Provider Private Key'),
help_text=_('Create a keypair to use as a service provider (SP) and include the private key content here.'),
help_text=_('Create a keypair to use as a service provider (SP) ' 'and include the private key content here.'),
category=_('SAML'),
category_slug='saml',
encrypted=True,
@@ -1357,7 +1369,7 @@ register(
field_class=SAMLOrgInfoField,
required=True,
label=_('SAML Service Provider Organization Info'),
help_text=_('Provide the URL, display name, and the name of your app. Refer to the documentation for example syntax.'),
help_text=_('Provide the URL, display name, and the name of your app. Refer to' ' the documentation for example syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict(
@@ -1371,7 +1383,7 @@ register(
allow_blank=True,
required=True,
label=_('SAML Service Provider Technical Contact'),
help_text=_('Provide the name and email address of the technical contact for your service provider. Refer to the documentation for example syntax.'),
help_text=_('Provide the name and email address of the technical contact for' ' your service provider. Refer to the documentation' ' for example syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict([('givenName', 'Technical Contact'), ('emailAddress', 'techsup@example.com')]),
@@ -1383,7 +1395,7 @@ register(
allow_blank=True,
required=True,
label=_('SAML Service Provider Support Contact'),
help_text=_('Provide the name and email address of the support contact for your service provider. Refer to the documentation for example syntax.'),
help_text=_('Provide the name and email address of the support contact for your' ' service provider. Refer to the documentation for' ' example syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict([('givenName', 'Support Contact'), ('emailAddress', 'support@example.com')]),
@@ -1445,7 +1457,9 @@ register(
allow_null=True,
default={'requestedAuthnContext': False},
label=_('SAML Security Config'),
help_text=_('A dict of key value pairs that are passed to the underlying python-saml security setting https://github.com/onelogin/python-saml#settings'),
help_text=_(
'A dict of key value pairs that are passed to the underlying' ' python-saml security setting' ' https://github.com/onelogin/python-saml#settings'
),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict(
@@ -1477,7 +1491,7 @@ register(
allow_null=True,
default=None,
label=_('SAML Service Provider extra configuration data'),
help_text=_('A dict of key value pairs to be passed to the underlying python-saml Service Provider configuration setting.'),
help_text=_('A dict of key value pairs to be passed to the underlying' ' python-saml Service Provider configuration setting.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict(),

View File

@@ -390,7 +390,7 @@ class LDAPSearchUnionField(fields.ListField):
search_args = []
for i in range(len(data)):
if not isinstance(data[i], list):
raise ValidationError('In order to ultilize LDAP Union, input element No. %d should be a search query array.' % (i + 1))
raise ValidationError('In order to ultilize LDAP Union, input element No. %d' ' should be a search query array.' % (i + 1))
try:
search_args.append(self.ldap_search_field_class().run_validation(data[i]))
except Exception as e:

View File

@@ -56,7 +56,7 @@ register(
field_class=fields.IntegerField,
min_value=100,
label=_('Max Job Events Retrieved by UI'),
help_text=_('Maximum number of job events for the UI to retrieve within a single request.'),
help_text=_('Maximum number of job events for the UI to retrieve within a ' 'single request.'),
category=_('UI'),
category_slug='ui',
)
@@ -65,7 +65,7 @@ register(
'UI_LIVE_UPDATES_ENABLED',
field_class=fields.BooleanField,
label=_('Enable Live Updates in the UI'),
help_text=_('If disabled, the page will not refresh when events are received. Reloading the page will be required to get the latest details.'),
help_text=_('If disabled, the page will not refresh when events are received. ' 'Reloading the page will be required to get the latest details.'),
category=_('UI'),
category_slug='ui',
)

View File

@@ -43,7 +43,6 @@ function LaunchButton({ resource, children }) {
const [surveyConfig, setSurveyConfig] = useState(null);
const [labels, setLabels] = useState([]);
const [isLaunching, setIsLaunching] = useState(false);
const [resourceCredentials, setResourceCredentials] = useState([]);
const [error, setError] = useState(null);
const handleLaunch = async () => {
@@ -84,13 +83,6 @@ function LaunchButton({ resource, children }) {
setLabels(allLabels);
}
if (launch.ask_credential_on_launch) {
const {
data: { results: templateCredentials },
} = await JobTemplatesAPI.readCredentials(resource.id);
setResourceCredentials(templateCredentials);
}
if (canLaunchWithoutPrompt(launch)) {
await launchWithParams({});
} else {
@@ -216,7 +208,6 @@ function LaunchButton({ resource, children }) {
labels={labels}
onLaunch={launchWithParams}
onCancel={() => setShowLaunchPrompt(false)}
resourceDefaultCredentials={resourceCredentials}
/>
)}
</>

View File

@@ -47,12 +47,6 @@ describe('LaunchButton', () => {
variables_needed_to_start: [],
},
});
JobTemplatesAPI.readCredentials.mockResolvedValue({
data: {
count: 0,
results: [],
},
});
});
afterEach(() => jest.clearAllMocks());

View File

@@ -19,7 +19,6 @@ function PromptModalForm({
labels,
surveyConfig,
instanceGroups,
resourceDefaultCredentials,
}) {
const { setFieldTouched, values } = useFormikContext();
const [showDescription, setShowDescription] = useState(false);
@@ -36,9 +35,9 @@ function PromptModalForm({
surveyConfig,
resource,
labels,
instanceGroups,
resourceDefaultCredentials
instanceGroups
);
const handleSubmit = async () => {
const postValues = {};
const setValue = (key, value) => {

View File

@@ -69,20 +69,6 @@ describe('LaunchPrompt', () => {
spec: [{ type: 'text', variable: 'foo' }],
},
});
JobTemplatesAPI.readCredentials.mockResolvedValue({
data: {
results: [
{
id: 5,
name: 'cred that prompts',
credential_type: 1,
inputs: {
password: 'ASK',
},
},
],
},
});
InstanceGroupsAPI.read.mockResolvedValue({
data: {
results: [
@@ -226,16 +212,6 @@ describe('LaunchPrompt', () => {
],
},
}}
resourceDefaultCredentials={[
{
id: 5,
name: 'cred that prompts',
credential_type: 1,
inputs: {
password: 'ASK',
},
},
]}
onLaunch={noop}
onCancel={noop}
surveyConfig={{
@@ -313,16 +289,6 @@ describe('LaunchPrompt', () => {
resource={resource}
onLaunch={noop}
onCancel={noop}
resourceDefaultCredentials={[
{
id: 5,
name: 'cred that prompts',
credential_type: 1,
inputs: {
password: 'ASK',
},
},
]}
/>
);
});

View File

@@ -1,6 +1,6 @@
import 'styled-components/macro';
import React, { useState, useCallback, useEffect } from 'react';
import { useHistory, useLocation } from 'react-router-dom';
import { useHistory } from 'react-router-dom';
import { t } from '@lingui/macro';
import { useField } from 'formik';
@@ -8,7 +8,7 @@ import styled from 'styled-components';
import { Alert, ToolbarItem } from '@patternfly/react-core';
import { CredentialsAPI, CredentialTypesAPI } from 'api';
import { getSearchableKeys } from 'components/PaginatedTable';
import { getQSConfig, parseQueryString, updateQueryString } from 'util/qs';
import { getQSConfig, parseQueryString } from 'util/qs';
import useRequest from 'hooks/useRequest';
import AnsibleSelect from '../../AnsibleSelect';
import OptionsList from '../../OptionsList';
@@ -31,18 +31,18 @@ function CredentialsStep({
allowCredentialsWithPasswords,
defaultCredentials = [],
}) {
const history = useHistory();
const location = useLocation();
const [field, meta, helpers] = useField({
name: 'credentials',
validate: (val) =>
credentialsValidator(
allowCredentialsWithPasswords,
val,
defaultCredentials ?? []
defaultCredentials
),
});
const [selectedType, setSelectedType] = useState(null);
const history = useHistory();
const {
result: types,
error: typesError,
@@ -104,32 +104,12 @@ function CredentialsStep({
credentialsValidator(
allowCredentialsWithPasswords,
field.value,
defaultCredentials ?? []
defaultCredentials
)
);
/* eslint-disable-next-line react-hooks/exhaustive-deps */
}, []);
const removeAllSearchTerms = (qsConfig) => {
const oldParams = parseQueryString(qsConfig, location.search);
Object.keys(oldParams).forEach((key) => {
oldParams[key] = null;
});
const defaultParams = {
...oldParams,
page: 1,
page_size: 5,
order_by: 'name',
};
const qs = updateQueryString(qsConfig, location.search, defaultParams);
pushHistoryState(qs);
};
const pushHistoryState = (qs) => {
const { pathname } = history.location;
history.push(qs ? `${pathname}?${qs}` : pathname);
};
if (isTypesLoading) {
return <ContentLoading />;
}
@@ -174,7 +154,9 @@ function CredentialsStep({
value={selectedType && selectedType.id}
onChange={(e, id) => {
// Reset query params when the category of credentials is changed
removeAllSearchTerms(QS_CONFIG);
history.replace({
search: '',
});
setSelectedType(types.find((o) => o.id === parseInt(id, 10)));
}}
/>

View File

@@ -168,9 +168,7 @@ describe('CredentialsStep', () => {
test('should reset query params (credential.page) when selected credential type is changed', async () => {
let wrapper;
const history = createMemoryHistory({
initialEntries: [
'?credential.page=2&credential.page_size=5&credential.order_by=name',
],
initialEntries: ['?credential.page=2'],
});
await act(async () => {
wrapper = mountWithContexts(

View File

@@ -46,8 +46,7 @@ export default function useLaunchSteps(
surveyConfig,
resource,
labels,
instanceGroups,
resourceDefaultCredentials
instanceGroups
) {
const [visited, setVisited] = useState({});
const [isReady, setIsReady] = useState(false);
@@ -57,7 +56,7 @@ export default function useLaunchSteps(
useCredentialsStep(
launchConfig,
resource,
resourceDefaultCredentials,
resource.summary_fields.credentials || [],
true
),
useCredentialPasswordsStep(

View File

@@ -122,18 +122,6 @@ function sortWeekday(a, b) {
}
function RunOnDetail({ type, options, prefix }) {
const weekdays = {
sunday: t`Sunday`,
monday: t`Monday`,
tuesday: t`Tuesday`,
wednesday: t`Wednesday`,
thursday: t`Thursday`,
friday: t`Friday`,
saturday: t`Saturday`,
day: t`day`,
weekday: t`weekday`,
weekendDay: t`weekend day`,
};
if (type === 'month') {
if (options.runOn === 'day') {
return (
@@ -144,16 +132,16 @@ function RunOnDetail({ type, options, prefix }) {
/>
);
}
const dayOfWeek = weekdays[options.runOnTheDay];
const dayOfWeek = options.runOnTheDay;
return (
<Detail
label={t`Run on`}
value={
options.runOnTheOccurrence === -1 ? (
options.runOnDayNumber === -1 ? (
t`The last ${dayOfWeek}`
) : (
<SelectOrdinal
value={options.runOnTheOccurrence}
value={options.runOnDayNumber}
one={`The first ${dayOfWeek}`}
two={`The second ${dayOfWeek}`}
_3={`The third ${dayOfWeek}`}
@@ -190,6 +178,18 @@ function RunOnDetail({ type, options, prefix }) {
/>
);
}
const weekdays = {
sunday: t`Sunday`,
monday: t`Monday`,
tuesday: t`Tuesday`,
wednesday: t`Wednesday`,
thursday: t`Thursday`,
friday: t`Friday`,
saturday: t`Saturday`,
day: t`day`,
weekday: t`weekday`,
weekendDay: t`weekend day`,
};
const weekday = weekdays[options.runOnTheDay];
const month = months[options.runOnTheMonth];
return (

View File

@@ -11,8 +11,7 @@ import { JobTemplatesAPI, SchedulesAPI, WorkflowJobTemplatesAPI } from 'api';
import { parseVariableField, jsonToYaml } from 'util/yaml';
import { useConfig } from 'contexts/Config';
import InstanceGroupLabels from 'components/InstanceGroupLabels';
import parseRuleObj, { UnsupportedRRuleError } from '../shared/parseRuleObj';
import UnsupportedRRuleAlert from '../shared/UnsupportedRRuleAlert';
import parseRuleObj from '../shared/parseRuleObj';
import FrequencyDetails from './FrequencyDetails';
import AlertModal from '../../AlertModal';
import { CardBody, CardActionsRow } from '../../Card';
@@ -183,20 +182,8 @@ function ScheduleDetail({ hasDaysToKeepField, schedule, surveyConfig }) {
month: t`Month`,
year: t`Year`,
};
let rruleError;
let frequency = [];
let frequencyOptions = {};
let exceptionFrequency = [];
let exceptionOptions = {};
try {
({ frequency, frequencyOptions, exceptionFrequency, exceptionOptions } =
parseRuleObj(schedule));
} catch (parseRuleError) {
if (parseRuleError instanceof UnsupportedRRuleError) {
rruleError = parseRuleError;
}
}
const { frequency, frequencyOptions, exceptionFrequency, exceptionOptions } =
parseRuleObj(schedule);
const repeatFrequency = frequency.length
? frequency.map((f) => frequencies[f]).join(', ')
: t`None (Run Once)`;
@@ -615,7 +602,6 @@ function ScheduleDetail({ hasDaysToKeepField, schedule, surveyConfig }) {
</PromptDetailList>
</>
)}
{rruleError && <UnsupportedRRuleAlert schedule={schedule} />}
<CardActionsRow>
{summary_fields?.user_capabilities?.edit && (
<Button

View File

@@ -587,31 +587,4 @@ describe('<ScheduleDetail />', () => {
(el) => el.prop('isDisabled') === true
);
});
test('should display warning for unsupported recurrence rules ', async () => {
const unsupportedSchedule = {
...schedule,
rrule:
'DTSTART:20221220T161500Z RRULE:FREQ=HOURLY;INTERVAL=1 EXRULE:FREQ=HOURLY;INTERVAL=1;BYDAY=TU;BYMONTHDAY=1,2,3,4,5,6,7 EXRULE:FREQ=HOURLY;INTERVAL=1;BYDAY=WE;BYMONTHDAY=2,3,4,5,6,7,8',
};
await act(async () => {
wrapper = mountWithContexts(
<Route
path="/templates/job_template/:id/schedules/:scheduleId"
component={() => <ScheduleDetail schedule={unsupportedSchedule} />}
/>,
{
context: {
router: {
history,
route: {
location: history.location,
match: { params: { id: 1 } },
},
},
},
}
);
});
expect(wrapper.find('UnsupportedRRuleAlert').length).toBe(1);
});
});

View File

@@ -1,32 +0,0 @@
import React from 'react';
import styled from 'styled-components';
import { t } from '@lingui/macro';
import { Alert } from '@patternfly/react-core';
const AlertWrapper = styled.div`
margin-top: var(--pf-global--spacer--lg);
margin-bottom: var(--pf-global--spacer--lg);
`;
const RulesTitle = styled.p`
margin-top: var(--pf-global--spacer--lg);
margin-bottom: var(--pf-global--spacer--lg);
font-weight: var(--pf-global--FontWeight--bold);
`;
export default function UnsupportedRRuleAlert({ schedule }) {
return (
<AlertWrapper>
<Alert
isInline
variant="danger"
ouiaId="schedule-warning"
title={t`This schedule uses complex rules that are not supported in the
UI. Please use the API to manage this schedule.`}
/>
<RulesTitle>{t`Schedule Rules`}:</RulesTitle>
<pre css="white-space: pre; font-family: var(--pf-global--FontFamily--monospace)">
{schedule.rrule.split(' ').join('\n')}
</pre>
</AlertWrapper>
);
}

View File

@@ -82,7 +82,11 @@ const frequencyTypes = {
};
function parseRrule(rruleString, schedule, values) {
const { frequency, options } = parseRule(rruleString, schedule);
const { frequency, options } = parseRule(
rruleString,
schedule,
values.exceptionFrequency
);
if (values.frequencyOptions[frequency]) {
throw new UnsupportedRRuleError(
@@ -101,7 +105,11 @@ function parseRrule(rruleString, schedule, values) {
}
function parseExRule(exruleString, schedule, values) {
const { frequency, options } = parseRule(exruleString, schedule);
const { frequency, options } = parseRule(
exruleString,
schedule,
values.exceptionFrequency
);
if (values.exceptionOptions[frequency]) {
throw new UnsupportedRRuleError(
@@ -121,7 +129,7 @@ function parseExRule(exruleString, schedule, values) {
};
}
function parseRule(ruleString, schedule) {
function parseRule(ruleString, schedule, frequencies) {
const {
origOptions: {
bymonth,
@@ -170,6 +178,9 @@ function parseRule(ruleString, schedule) {
throw new Error(`Unexpected rrule frequency: ${freq}`);
}
const frequency = frequencyTypes[freq];
if (frequencies.includes(frequency)) {
throw new Error(`Duplicate frequency types not supported (${frequency})`);
}
if (freq === RRule.WEEKLY && byweekday) {
options.daysOfWeek = byweekday;

View File

@@ -91,11 +91,6 @@ function CredentialEdit({ credential }) {
modifiedData.user = me.id;
}
}
if (credential.kind === 'vault' && !credential.inputs?.vault_id) {
delete modifiedData.inputs.vault_id;
}
const [{ data }] = await Promise.all([
CredentialsAPI.update(credId, modifiedData),
...destroyInputSources(),
@@ -105,7 +100,7 @@ function CredentialEdit({ credential }) {
return data;
},
[me, credId, credential]
[me, credId]
)
);

View File

@@ -47,14 +47,35 @@ class ItemNotDefined(Exception):
class ControllerModule(AnsibleModule):
url = None
AUTH_ARGSPEC = dict(
controller_host=dict(required=False, aliases=['tower_host'], fallback=(env_fallback, ['CONTROLLER_HOST', 'TOWER_HOST'])),
controller_username=dict(required=False, aliases=['tower_username'], fallback=(env_fallback, ['CONTROLLER_USERNAME', 'TOWER_USERNAME'])),
controller_password=dict(no_log=True, aliases=['tower_password'], required=False, fallback=(env_fallback, ['CONTROLLER_PASSWORD', 'TOWER_PASSWORD'])),
validate_certs=dict(type='bool', aliases=['tower_verify_ssl'], required=False, fallback=(env_fallback, ['CONTROLLER_VERIFY_SSL', 'TOWER_VERIFY_SSL'])),
controller_host=dict(
required=False,
aliases=['tower_host'],
fallback=(env_fallback, ['CONTROLLER_HOST', 'TOWER_HOST'])),
controller_username=dict(
required=False,
aliases=['tower_username'],
fallback=(env_fallback, ['CONTROLLER_USERNAME', 'TOWER_USERNAME'])),
controller_password=dict(
no_log=True,
aliases=['tower_password'],
required=False,
fallback=(env_fallback, ['CONTROLLER_PASSWORD', 'TOWER_PASSWORD'])),
validate_certs=dict(
type='bool',
aliases=['tower_verify_ssl'],
required=False,
fallback=(env_fallback, ['CONTROLLER_VERIFY_SSL', 'TOWER_VERIFY_SSL'])),
controller_oauthtoken=dict(
type='raw', no_log=True, aliases=['tower_oauthtoken'], required=False, fallback=(env_fallback, ['CONTROLLER_OAUTH_TOKEN', 'TOWER_OAUTH_TOKEN'])
),
controller_config_file=dict(type='path', aliases=['tower_config_file'], required=False, default=None),
type='raw',
no_log=True,
aliases=['tower_oauthtoken'],
required=False,
fallback=(env_fallback, ['CONTROLLER_OAUTH_TOKEN', 'TOWER_OAUTH_TOKEN'])),
controller_config_file=dict(
type='path',
aliases=['tower_config_file'],
required=False,
default=None),
)
short_params = {
'host': 'controller_host',
@@ -299,7 +320,9 @@ class ControllerAPIModule(ControllerModule):
def __init__(self, argument_spec, direct_params=None, error_callback=None, warn_callback=None, **kwargs):
kwargs['supports_check_mode'] = True
super().__init__(argument_spec=argument_spec, direct_params=direct_params, error_callback=error_callback, warn_callback=warn_callback, **kwargs)
super().__init__(
argument_spec=argument_spec, direct_params=direct_params, error_callback=error_callback, warn_callback=warn_callback, **kwargs
)
self.session = Request(cookies=CookieJar(), validate_certs=self.verify_ssl)
if 'update_secrets' in self.params:
@@ -307,6 +330,11 @@ class ControllerAPIModule(ControllerModule):
else:
self.update_secrets = True
@staticmethod
def param_to_endpoint(name):
exceptions = {'inventory': 'inventories', 'target_team': 'teams', 'workflow': 'workflow_job_templates'}
return exceptions.get(name, '{0}s'.format(name))
@staticmethod
def get_name_field_from_endpoint(endpoint):
return ControllerAPIModule.IDENTITY_FIELDS.get(endpoint, 'name')
@@ -377,7 +405,7 @@ class ControllerAPIModule(ControllerModule):
response['json']['next'] = next_page
return response
def get_one(self, endpoint, name_or_id=None, allow_none=True, check_exists=False, **kwargs):
def get_one(self, endpoint, name_or_id=None, allow_none=True, **kwargs):
new_kwargs = kwargs.copy()
if name_or_id:
name_field = self.get_name_field_from_endpoint(endpoint)
@@ -418,11 +446,6 @@ class ControllerAPIModule(ControllerModule):
# Or we weren't running with a or search and just got back too many to begin with.
self.fail_wanted_one(response, endpoint, new_kwargs.get('data'))
if check_exists:
name_field = self.get_name_field_from_endpoint(endpoint)
self.json_output['id'] = response['json']['results'][0]['id']
self.exit_json(**self.json_output)
return response['json']['results'][0]
def fail_wanted_one(self, response, endpoint, query_params):
@@ -430,8 +453,7 @@ class ControllerAPIModule(ControllerModule):
if len(sample['json']['results']) > 1:
sample['json']['results'] = sample['json']['results'][:2] + ['...more results snipped...']
url = self.build_url(endpoint, query_params)
host_length = len(self.host)
display_endpoint = url.geturl()[host_length:] # truncate to not include the base URL
display_endpoint = url.geturl()[len(self.host):] # truncate to not include the base URL
self.fail_json(
msg="Request to {0} returned {1} items, expected 1".format(display_endpoint, response['json']['count']),
query=query_params,
@@ -953,7 +975,11 @@ class ControllerAPIModule(ControllerModule):
# Attempt to delete our current token from /api/v2/tokens/
# Post to the tokens endpoint with baisc auth to try and get a token
endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/{0}/'.format(self.oauth_token_id)
api_token_url = (self.url._replace(path=endpoint, query=None)).geturl() # in error cases, fail_json exists before exception handling
api_token_url = (
self.url._replace(
path=endpoint, query=None # in error cases, fail_json exists before exception handling
)
).geturl()
try:
self.session.open(

View File

@@ -60,7 +60,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
type: str
skip_authorization:
description:
@@ -106,7 +106,7 @@ def main():
client_type=dict(choices=['public', 'confidential']),
organization=dict(required=True),
redirect_uris=dict(type="list", elements='str'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
skip_authorization=dict(type='bool'),
)
@@ -127,7 +127,7 @@ def main():
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up application based on the provided name and org ID
application = module.get_one('applications', name_or_id=name, check_exists=(state == 'exists'), **{'data': {'organization': org_id}})
application = module.get_one('applications', name_or_id=name, **{'data': {'organization': org_id}})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@@ -247,7 +247,7 @@ def main():
if organization:
lookup_data['organization'] = org_id
credential = module.get_one('credentials', name_or_id=name, check_exists=(state == 'exists'), **{'data': lookup_data})
credential = module.get_one('credentials', name_or_id=name, **{'data': lookup_data})
# Attempt to look up credential to copy based on the provided name
if copy_from:
@@ -265,6 +265,10 @@ def main():
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(credential)
if state == 'exists' and credential is not None:
# If credential exists and state is exists, we're done here.
module.exit_json(**module.json_output)
# Attempt to look up the related items the user specified (these will fail the module if not found)
if user:
user_id = module.resolve_name_to_id('users', user)

View File

@@ -48,7 +48,7 @@ options:
state:
description:
- Desired state of the resource.
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
default: "present"
type: str
@@ -80,7 +80,7 @@ def main():
target_credential=dict(required=True),
source_credential=dict(),
metadata=dict(type="dict"),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -101,7 +101,7 @@ def main():
'target_credential': target_credential_id,
'input_field_name': input_field_name,
}
credential_input_source = module.get_one('credential_input_sources', check_exists=(state == 'exists'), **{'data': lookup_data})
credential_input_source = module.get_one('credential_input_sources', **{'data': lookup_data})
if state == 'absent':
module.delete_if_needed(credential_input_source)

View File

@@ -59,7 +59,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''
@@ -98,7 +98,7 @@ def main():
kind=dict(choices=list(KIND_CHOICES.keys())),
inputs=dict(type='dict'),
injectors=dict(type='dict'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -124,7 +124,7 @@ def main():
credential_type_params['injectors'] = module.params.get('injectors')
# Attempt to look up credential_type based on the provided name
credential_type = module.get_one('credential_types', name_or_id=name, check_exists=(state == 'exists'))
credential_type = module.get_one('credential_types', name_or_id=name)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@@ -50,7 +50,7 @@ options:
state:
description:
- Desired state of the resource.
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
default: "present"
type: str
pull:
@@ -83,7 +83,7 @@ def main():
description=dict(),
organization=dict(),
credential=dict(),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
# NOTE: Default for pull differs from API (which is blank by default)
pull=dict(choices=['always', 'missing', 'never'], default='missing'),
)
@@ -99,7 +99,7 @@ def main():
state = module.params.get('state')
pull = module.params.get('pull')
existing_item = module.get_one('execution_environments', name_or_id=name, check_exists=(state == 'exists'))
existing_item = module.get_one('execution_environments', name_or_id=name)
if state == 'absent':
module.delete_if_needed(existing_item)

View File

@@ -67,7 +67,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
type: str
new_name:
description:
@@ -115,7 +115,7 @@ def main():
children=dict(type='list', elements='str', aliases=['groups']),
preserve_existing_hosts=dict(type='bool', default=False),
preserve_existing_children=dict(type='bool', default=False, aliases=['preserve_existing_groups']),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -135,7 +135,7 @@ def main():
inventory_id = module.resolve_name_to_id('inventories', inventory)
# Attempt to look up the object based on the provided name and inventory ID
group = module.get_one('groups', name_or_id=name, check_exists=(state == 'exists'), **{'data': {'inventory': inventory_id}})
group = module.get_one('groups', name_or_id=name, **{'data': {'inventory': inventory_id}})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@@ -50,7 +50,7 @@ options:
state:
description:
- Desired state of the resource.
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
default: "present"
type: str
extends_documentation_fragment: awx.awx.auth
@@ -83,7 +83,7 @@ def main():
inventory=dict(required=True),
enabled=dict(type='bool'),
variables=dict(type='dict'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -102,7 +102,7 @@ def main():
inventory_id = module.resolve_name_to_id('inventories', inventory)
# Attempt to look up host based on the provided name and inventory ID
host = module.get_one('hosts', name_or_id=name, check_exists=(state == 'exists'), **{'data': {'inventory': inventory_id}})
host = module.get_one('hosts', name_or_id=name, **{'data': {'inventory': inventory_id}})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@@ -81,7 +81,7 @@ options:
state:
description:
- Desired state of the resource.
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
default: "present"
type: str
extends_documentation_fragment: awx.awx.auth
@@ -107,7 +107,7 @@ def main():
policy_instance_list=dict(type='list', elements='str'),
pod_spec_override=dict(),
instances=dict(required=False, type="list", elements='str'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -128,7 +128,7 @@ def main():
state = module.params.get('state')
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('instance_groups', name_or_id=name, check_exists=(state == 'exists'))
existing_item = module.get_one('instance_groups', name_or_id=name)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@@ -78,7 +78,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''
@@ -149,7 +149,7 @@ def main():
host_filter=dict(),
instance_groups=dict(type="list", elements='str'),
prevent_instance_group_fallback=dict(type='bool'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
input_inventories=dict(type='list', elements='str'),
)
@@ -172,7 +172,7 @@ def main():
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up inventory based on the provided name and org ID
inventory = module.get_one('inventories', name_or_id=name, check_exists=(state == 'exists'), **{'data': {'organization': org_id}})
inventory = module.get_one('inventories', name_or_id=name, **{'data': {'organization': org_id}})
# Attempt to look up credential to copy based on the provided name
if copy_from:

View File

@@ -118,7 +118,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
type: str
notification_templates_started:
description:
@@ -192,7 +192,7 @@ def main():
notification_templates_started=dict(type="list", elements='str'),
notification_templates_success=dict(type="list", elements='str'),
notification_templates_error=dict(type="list", elements='str'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -219,7 +219,6 @@ def main():
inventory_source_object = module.get_one(
'inventory_sources',
name_or_id=name,
check_exists=(state == 'exists'),
**{
'data': {
'inventory': inventory_object['id'],

View File

@@ -264,6 +264,7 @@ options:
description:
- Maximum time in seconds to wait for a job to finish (server-side).
type: int
default: 0
job_slice_count:
description:
- The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1.
@@ -294,7 +295,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
type: str
notification_templates_started:
description:
@@ -443,7 +444,7 @@ def main():
notification_templates_success=dict(type="list", elements='str'),
notification_templates_error=dict(type="list", elements='str'),
prevent_instance_group_fallback=dict(type="bool"),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -483,7 +484,7 @@ def main():
new_fields['execution_environment'] = module.resolve_name_to_id('execution_environments', ee)
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('job_templates', name_or_id=name, check_exists=(state == 'exists'), **{'data': search_fields})
existing_item = module.get_one('job_templates', name_or_id=name, **{'data': search_fields})
# Attempt to look up credential to copy based on the provided name
if copy_from:

View File

@@ -41,7 +41,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "exists"]
choices: ["present"]
type: str
extends_documentation_fragment: awx.awx.auth
'''
@@ -62,7 +62,7 @@ def main():
name=dict(required=True),
new_name=dict(),
organization=dict(required=True),
state=dict(choices=['present', 'exists'], default='present'),
state=dict(choices=['present'], default='present'),
)
# Create a module for ourselves
@@ -72,7 +72,6 @@ def main():
name = module.params.get('name')
new_name = module.params.get("new_name")
organization = module.params.get('organization')
state = module.params.get("state")
# Attempt to look up the related items the user specified (these will fail the module if not found)
organization_id = None
@@ -83,7 +82,6 @@ def main():
existing_item = module.get_one(
'labels',
name_or_id=name,
check_exists=(state == 'exists'),
**{
'data': {
'organization': organization_id,

View File

@@ -97,7 +97,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
type: str
extends_documentation_fragment: awx.awx.auth
'''
@@ -222,7 +222,7 @@ def main():
notification_type=dict(choices=['email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
notification_configuration=dict(type='dict'),
messages=dict(type='dict'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -248,7 +248,6 @@ def main():
existing_item = module.get_one(
'notification_templates',
name_or_id=name,
check_exists=(state == 'exists'),
**{
'data': {
'organization': organization_id,

View File

@@ -52,7 +52,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
type: str
instance_groups:
description:
@@ -130,7 +130,7 @@ def main():
notification_templates_error=dict(type="list", elements='str'),
notification_templates_approvals=dict(type="list", elements='str'),
galaxy_credentials=dict(type="list", elements='str'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -146,7 +146,7 @@ def main():
state = module.params.get('state')
# Attempt to look up organization based on the provided name
organization = module.get_one('organizations', name_or_id=name, check_exists=(state == 'exists'))
organization = module.get_one('organizations', name_or_id=name)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@@ -122,7 +122,7 @@ options:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
type: str
wait:
description:
@@ -272,7 +272,7 @@ def main():
notification_templates_started=dict(type="list", elements='str'),
notification_templates_success=dict(type="list", elements='str'),
notification_templates_error=dict(type="list", elements='str'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
wait=dict(type='bool', default=True),
update_project=dict(default=False, type='bool'),
interval=dict(default=2.0, type='float'),
@@ -313,7 +313,7 @@ def main():
lookup_data['organization'] = org_id
# Attempt to look up project based on the provided name and org ID
project = module.get_one('projects', name_or_id=name, check_exists=(state == 'exists'), data=lookup_data)
project = module.get_one('projects', name_or_id=name, data=lookup_data)
# Attempt to look up credential to copy based on the provided name
if copy_from:

View File

@@ -24,23 +24,11 @@ options:
user:
description:
- User that receives the permissions specified by the role.
- Deprecated, use 'users'.
type: str
users:
description:
- Users that receive the permissions specified by the role.
type: list
elements: str
team:
description:
- Team that receives the permissions specified by the role.
- Deprecated, use 'teams'.
type: str
teams:
description:
- Teams that receive the permissions specified by the role.
type: list
elements: str
role:
description:
- The role type to grant/revoke.
@@ -173,9 +161,7 @@ def main():
argument_spec = dict(
user=dict(),
users=dict(type='list', elements='str'),
team=dict(),
teams=dict(type='list', elements='str'),
role=dict(
choices=[
"admin",
@@ -233,9 +219,9 @@ def main():
'projects': 'project',
'target_teams': 'target_team',
'workflows': 'workflow',
'users': 'user',
'teams': 'team',
}
# Singular parameters
resource_param_keys = ('user', 'team', 'lookup_organization')
resources = {}
for resource_group, old_name in resource_list_param_keys.items():
@@ -243,9 +229,9 @@ def main():
resources.setdefault(resource_group, []).extend(module.params.get(resource_group))
if module.params.get(old_name) is not None:
resources.setdefault(resource_group, []).append(module.params.get(old_name))
if module.params.get('lookup_organization') is not None:
resources['lookup_organization'] = module.params.get('lookup_organization')
for resource_group in resource_param_keys:
if module.params.get(resource_group) is not None:
resources[resource_group] = module.params.get(resource_group)
# Change workflows and target_teams key to its endpoint name.
if 'workflows' in resources:
resources['workflow_job_templates'] = resources.pop('workflows')
@@ -262,13 +248,28 @@ def main():
# separate actors from resources
actor_data = {}
missing_items = []
for key in ('user', 'team'):
if key in resources:
if key == 'user':
lookup_data_populated = {}
else:
lookup_data_populated = lookup_data
# Attempt to look up project based on the provided name or ID and lookup data
data = module.get_one('{0}s'.format(key), name_or_id=resources[key], data=lookup_data_populated)
if data is None:
module.fail_json(
msg='Unable to find {0} with name: {1}'.format(key, resources[key]), changed=False
)
else:
actor_data[key] = module.get_one('{0}s'.format(key), name_or_id=resources[key], data=lookup_data_populated)
resources.pop(key)
# Lookup Resources
resource_data = {}
for key, value in resources.items():
for resource in value:
# Attempt to look up project based on the provided name or ID and lookup data
if key in resources:
if key == 'organizations' or key == 'users':
if key == 'organizations':
lookup_data_populated = {}
else:
lookup_data_populated = lookup_data
@@ -276,18 +277,14 @@ def main():
if data is None:
missing_items.append(resource)
else:
if key == 'users' or key == 'teams':
actor_data.setdefault(key, []).append(data)
else:
resource_data.setdefault(key, []).append(data)
resource_data.setdefault(key, []).append(data)
if len(missing_items) > 0:
module.fail_json(
msg='There were {0} missing items, missing items: {1}'.format(len(missing_items), missing_items), changed=False
)
# build association agenda
associations = {}
for actor_type, actors in actor_data.items():
for actor_type, actor in actor_data.items():
for key, value in resource_data.items():
for resource in value:
resource_roles = resource['summary_fields']['object_roles']
@@ -297,10 +294,9 @@ def main():
msg='Resource {0} has no role {1}, available roles: {2}'.format(resource['url'], role_field, available_roles), changed=False
)
role_data = resource_roles[role_field]
endpoint = '/roles/{0}/{1}/'.format(role_data['id'], actor_type)
endpoint = '/roles/{0}/{1}/'.format(role_data['id'], module.param_to_endpoint(actor_type))
associations.setdefault(endpoint, [])
for actor in actors:
associations[endpoint].append(actor['id'])
associations[endpoint].append(actor['id'])
# perform associations
for association_endpoint, new_association_list in associations.items():

View File

@@ -146,7 +146,7 @@ options:
state:
description:
- Desired state of the resource.
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
default: "present"
type: str
extends_documentation_fragment: awx.awx.auth
@@ -220,7 +220,7 @@ def main():
unified_job_template=dict(),
organization=dict(),
enabled=dict(type='bool'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -265,13 +265,8 @@ def main():
search_fields['name'] = unified_job_template
unified_job_template_id = module.get_one('unified_job_templates', **{'data': search_fields})['id']
sched_search_fields['unified_job_template'] = unified_job_template_id
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('schedules', name_or_id=name, check_exists=(state == 'exists'), **{'data': sched_search_fields})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
existing_item = module.get_one('schedules', name_or_id=name, **{'data': sched_search_fields})
association_fields = {}
@@ -348,14 +343,18 @@ def main():
else:
new_fields['execution_environment'] = ee['id']
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item,
new_fields,
endpoint='schedules',
item_type='schedule',
associations=association_fields,
)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(existing_item)
elif state == 'present':
# If the state was present and we can let the module build or update the existing item, this will return on its own
module.create_or_update_if_needed(
existing_item,
new_fields,
endpoint='schedules',
item_type='schedule',
associations=association_fields,
)
if __name__ == '__main__':

View File

@@ -42,7 +42,7 @@ options:
state:
description:
- Desired state of the resource.
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
default: "present"
type: str
extends_documentation_fragment: awx.awx.auth
@@ -69,7 +69,7 @@ def main():
new_name=dict(),
description=dict(),
organization=dict(required=True),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -86,7 +86,7 @@ def main():
org_id = module.resolve_name_to_id('organizations', organization)
# Attempt to look up team based on the provided name and org ID
team = module.get_one('teams', name_or_id=name, check_exists=(state == 'exists'), **{'data': {'organization': org_id}})
team = module.get_one('teams', name_or_id=name, **{'data': {'organization': org_id}})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@@ -69,7 +69,7 @@ options:
state:
description:
- Desired state of the resource.
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
default: "present"
type: str
extends_documentation_fragment: awx.awx.auth
@@ -137,7 +137,7 @@ def main():
password=dict(no_log=True),
update_secrets=dict(type='bool', default=True, no_log=False),
organization=dict(),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -158,7 +158,7 @@ def main():
# Attempt to look up the related items the user specified (these will fail the module if not found)
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('users', name_or_id=username, check_exists=(state == 'exists'))
existing_item = module.get_one('users', name_or_id=username)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@@ -144,7 +144,6 @@ options:
choices:
- present
- absent
- exists
default: "present"
type: str
notification_templates_started:
@@ -668,7 +667,8 @@ def create_workflow_nodes(module, response, workflow_nodes, workflow_id):
inv_lookup_data = {}
if 'organization' in workflow_node['inventory']:
inv_lookup_data['organization'] = module.resolve_name_to_id('organizations', workflow_node['inventory']['organization']['name'])
workflow_node_fields['inventory'] = module.get_one('inventories', name_or_id=workflow_node['inventory']['name'], data=inv_lookup_data)['id']
workflow_node_fields['inventory'] = module.get_one(
'inventories', name_or_id=workflow_node['inventory']['name'], data=inv_lookup_data)['id']
else:
workflow_node_fields['inventory'] = module.get_one('inventories', name_or_id=workflow_node['inventory'])['id']
@@ -843,7 +843,7 @@ def main():
notification_templates_approvals=dict(type="list", elements='str'),
workflow_nodes=dict(type='list', elements='dict', aliases=['schema']),
destroy_current_nodes=dict(type='bool', default=False, aliases=['destroy_current_schema']),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
# Create a module for ourselves
@@ -871,7 +871,7 @@ def main():
search_fields['organization'] = new_fields['organization'] = organization_id
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('workflow_job_templates', name_or_id=name, check_exists=(state == 'exists'), **{'data': search_fields})
existing_item = module.get_one('workflow_job_templates', name_or_id=name, **{'data': search_fields})
# Attempt to look up credential to copy based on the provided name
if copy_from:

View File

@@ -179,7 +179,7 @@ options:
state:
description:
- Desired state of the resource.
choices: ["present", "absent", "exists"]
choices: ["present", "absent"]
default: "present"
type: str
extends_documentation_fragment: awx.awx.auth
@@ -285,7 +285,7 @@ def main():
job_slice_count=dict(type='int'),
labels=dict(type='list', elements='str'),
timeout=dict(type='int'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
state=dict(choices=['present', 'absent'], default='present'),
)
mutually_exclusive = [("unified_job_template", "approval_node")]
required_if = [
@@ -327,7 +327,7 @@ def main():
search_fields['workflow_job_template'] = new_fields['workflow_job_template'] = workflow_job_template_id
# Attempt to look up an existing item based on the provided data
existing_item = module.get_one('workflow_job_template_nodes', check_exists=(state == 'exists'), **{'data': search_fields})
existing_item = module.get_one('workflow_job_template_nodes', **{'data': search_fields})
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@@ -24,43 +24,6 @@
that:
- "result is changed"
- name: Run an application with exists
application:
name: "{{ app1_name }}"
authorization_grant_type: "password"
client_type: "public"
organization: "Default"
state: exists
register: result
- assert:
that:
- "result is not changed"
- name: Delete our application
application:
name: "{{ app1_name }}"
organization: "Default"
state: absent
register: result
- assert:
that:
- "result is changed"
- name: Run an application with exists
application:
name: "{{ app1_name }}"
authorization_grant_type: "password"
client_type: "public"
organization: "Default"
state: exists
register: result
- assert:
that:
- "result is changed"
- name: Delete our application
application:
name: "{{ app1_name }}"

View File

@@ -47,42 +47,6 @@
that:
- "result is changed"
- name: Create an Org-specific credential with an ID with exists
credential:
name: "{{ ssh_cred_name1 }}"
organization: Default
credential_type: Machine
state: exists
register: result
- assert:
that:
- "result is not changed"
- name: Delete an Org-specific credential with an ID
credential:
name: "{{ ssh_cred_name1 }}"
organization: Default
credential_type: Machine
state: absent
register: result
- assert:
that:
- "result is changed"
- name: Create an Org-specific credential with an ID with exists
credential:
name: "{{ ssh_cred_name1 }}"
organization: Default
credential_type: Machine
state: exists
register: result
- assert:
that:
- "result is changed"
- name: Delete a Org-specific credential
credential:
name: "{{ ssh_cred_name1 }}"

View File

@@ -54,51 +54,6 @@
that:
- "result is changed"
- name: Add credential Input Source with exists
credential_input_source:
input_field_name: password
target_credential: "{{ target_cred_result.id }}"
source_credential: "{{ src_cred_result.id }}"
metadata:
object_query: "Safe=MY_SAFE;Object=AWX-user"
object_query_format: "Exact"
state: exists
register: result
- assert:
that:
- "result is not changed"
- name: Delete credential Input Source
credential_input_source:
input_field_name: password
target_credential: "{{ target_cred_result.id }}"
source_credential: "{{ src_cred_result.id }}"
metadata:
object_query: "Safe=MY_SAFE;Object=AWX-user"
object_query_format: "Exact"
state: absent
register: result
- assert:
that:
- "result is changed"
- name: Add credential Input Source with exists
credential_input_source:
input_field_name: password
target_credential: "{{ target_cred_result.id }}"
source_credential: "{{ src_cred_result.id }}"
metadata:
object_query: "Safe=MY_SAFE;Object=AWX-user"
object_query_format: "Exact"
state: exists
register: result
- assert:
that:
- "result is changed"
- name: Add Second credential Lookup
credential:
description: Credential for Testing Source Change

View File

@@ -22,48 +22,6 @@
that:
- "result is changed"
- name: Add Tower credential type with exists
credential_type:
description: Credential type for Test
name: "{{ cred_type_name }}"
kind: cloud
inputs: {"fields": [{"type": "string", "id": "username", "label": "Username"}, {"secret": true, "type": "string", "id": "password", "label": "Password"}], "required": ["username", "password"]}
injectors: {"extra_vars": {"test": "foo"}}
state: exists
register: result
- assert:
that:
- "result is not changed"
- name: Delete the credential type
credential_type:
description: Credential type for Test
name: "{{ cred_type_name }}"
kind: cloud
inputs: {"fields": [{"type": "string", "id": "username", "label": "Username"}, {"secret": true, "type": "string", "id": "password", "label": "Password"}], "required": ["username", "password"]}
injectors: {"extra_vars": {"test": "foo"}}
state: absent
register: result
- assert:
that:
- "result is changed"
- name: Add Tower credential type with exists
credential_type:
description: Credential type for Test
name: "{{ cred_type_name }}"
kind: cloud
inputs: {"fields": [{"type": "string", "id": "username", "label": "Username"}, {"secret": true, "type": "string", "id": "password", "label": "Password"}], "required": ["username", "password"]}
injectors: {"extra_vars": {"test": "foo"}}
state: exists
register: result
- assert:
that:
- "result is changed"
- name: Rename Tower credential type
credential_type:
name: "{{ cred_type_name }}"

View File

@@ -22,48 +22,6 @@
that:
- "result is changed"
- name: Add an EE with exists
execution_environment:
name: "{{ ee_name }}"
description: "EE for Testing"
image: quay.io/ansible/awx-ee
pull: always
organization: Default
state: exists
register: result
- assert:
that:
- "result is not changed"
- name: Delete an EE
execution_environment:
name: "{{ ee_name }}"
description: "EE for Testing"
image: quay.io/ansible/awx-ee
pull: always
organization: Default
state: absent
register: result
- assert:
that:
- "result is changed"
- name: Add an EE with exists
execution_environment:
name: "{{ ee_name }}"
description: "EE for Testing"
image: quay.io/ansible/awx-ee
pull: always
organization: Default
state: exists
register: result
- assert:
that:
- "result is changed"
- name: Associate the Test EE with Default Org (this should fail)
execution_environment:
name: "{{ ee_name }}"

View File

@@ -19,9 +19,9 @@
name: "{{ inv_name }}"
organization: Default
state: present
registuer: result
register: result
- name: Create Group 1
- name: Create a Group
group:
name: "{{ group_name1 }}"
inventory: "{{ result.id }}"
@@ -34,46 +34,7 @@
that:
- "result is changed"
- name: Create Group 1 with exists
group:
name: "{{ group_name1 }}"
inventory: "{{ inv_name }}"
state: exists
variables:
foo: bar
register: result
- assert:
that:
- "result is not changed"
- name: Delete Group 1
group:
name: "{{ group_name1 }}"
inventory: "{{ inv_name }}"
state: absent
variables:
foo: bar
register: result
- assert:
that:
- "result is changed"
- name: Create Group 1 with exists
group:
name: "{{ group_name1 }}"
inventory: "{{ inv_name }}"
state: exists
variables:
foo: bar
register: result
- assert:
that:
- "result is changed"
- name: Create Group 2
- name: Create a Group
group:
name: "{{ group_name2 }}"
inventory: "{{ inv_name }}"
@@ -86,7 +47,7 @@
that:
- "result is changed"
- name: Create Group 3
- name: Create a Group
group:
name: "{{ group_name3 }}"
inventory: "{{ inv_name }}"
@@ -108,7 +69,7 @@
- "{{ host_name2 }}"
- "{{ host_name3 }}"
- name: Create Group 1 with hosts and sub group of Group 2
- name: Create a Group with hosts and sub group
group:
name: "{{ group_name1 }}"
inventory: "{{ inv_name }}"
@@ -122,7 +83,7 @@
foo: bar
register: result
- name: Create Group 1 with hosts and sub group
- name: Create a Group with hosts and sub group
group:
name: "{{ group_name1 }}"
inventory: "{{ inv_name }}"
@@ -143,7 +104,18 @@
that:
- group1_host_count == "3"
- name: Delete Group 2
- name: Delete a Group
group:
name: "{{ group_name1 }}"
inventory: "{{ inv_name }}"
state: absent
register: result
- assert:
that:
- "result is changed"
- name: Delete a Group
group:
name: "{{ group_name2 }}"
inventory: "{{ inv_name }}"
@@ -155,7 +127,7 @@
that:
- "result is not changed"
- name: Delete Group 3
- name: Delete a Group
group:
name: "{{ group_name3 }}"
inventory: "{{ inv_name }}"
@@ -164,19 +136,7 @@
- assert:
that:
- "result is changed"
# If we delete group 1 first it will delete group 2 and 3
- name: Delete Group 1
group:
name: "{{ group_name1 }}"
inventory: "{{ inv_name }}"
state: absent
register: result
- assert:
that:
- "result is changed"
- "result is not changed"
- name: Check module fails with correct msg
group:

View File

@@ -29,45 +29,6 @@
that:
- "result is changed"
- name: Create a Host with exists
host:
name: "{{ host_name }}"
inventory: "{{ inv_name }}"
state: exists
variables:
foo: bar
register: result
- assert:
that:
- "result is not changed"
- name: Delete a Host
host:
name: "{{ host_name }}"
inventory: "{{ inv_name }}"
state: absent
variables:
foo: bar
register: result
- assert:
that:
- "result is changed"
- name: Create a Host with exists
host:
name: "{{ host_name }}"
inventory: "{{ inv_name }}"
state: exists
variables:
foo: bar
register: result
- assert:
that:
- "result is changed"
- name: Delete a Host
host:
name: "{{ result.id }}"

View File

@@ -38,42 +38,6 @@
that:
- "result is changed"
- name: Create an Instance Group with exists
instance_group:
name: "{{ group_name1 }}"
policy_instance_percentage: 34
policy_instance_minimum: 12
state: exists
register: result
- assert:
that:
- "result is not changed"
- name: Delete an Instance Group
instance_group:
name: "{{ group_name1 }}"
policy_instance_percentage: 34
policy_instance_minimum: 12
state: absent
register: result
- assert:
that:
- "result is changed"
- name: Create an Instance Group with exists
instance_group:
name: "{{ group_name1 }}"
policy_instance_percentage: 34
policy_instance_minimum: 12
state: exists
register: result
- assert:
that:
- "result is changed"
- name: Update an Instance Group
instance_group:
name: "{{ result.id }}"

View File

@@ -51,45 +51,6 @@
that:
- "result is changed"
- name: Create an Inventory with exists
inventory:
name: "{{ inv_name1 }}"
organization: Default
instance_groups:
- "{{ group_name1 }}"
state: exists
register: result
- assert:
that:
- "result is not changed"
- name: Delete an Inventory
inventory:
name: "{{ inv_name1 }}"
organization: Default
instance_groups:
- "{{ group_name1 }}"
state: absent
register: result
- assert:
that:
- "result is changed"
- name: Create an Inventory with exists
inventory:
name: "{{ inv_name1 }}"
organization: Default
instance_groups:
- "{{ group_name1 }}"
state: exists
register: result
- assert:
that:
- "result is changed"
- name: Test Inventory module idempotency
inventory:
name: "{{ result.id }}"

Some files were not shown because too many files have changed in this diff Show More