Clean up string formatting issues from black migration

This commit is contained in:
Shane McDonald 2023-05-10 08:19:23 -04:00
parent 7f020052db
commit f1196fc019
46 changed files with 139 additions and 155 deletions

View File

@ -347,7 +347,7 @@ class FieldLookupBackend(BaseFilterBackend):
args.append(Q(**{k: v}))
for role_name in role_filters:
if not hasattr(queryset.model, 'accessible_pk_qs'):
raise ParseError(_('Cannot apply role_level filter to this list because its model ' 'does not use roles for access control.'))
raise ParseError(_('Cannot apply role_level filter to this list because its model does not use roles for access control.'))
args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
if or_filters:
q = Q()

View File

@ -169,7 +169,7 @@ class APIView(views.APIView):
self.__init_request_error__ = exc
except UnsupportedMediaType as exc:
exc.detail = _(
'You did not use correct Content-Type in your HTTP request. ' 'If you are using our REST API, the Content-Type must be application/json'
'You did not use correct Content-Type in your HTTP request. If you are using our REST API, the Content-Type must be application/json'
)
self.__init_request_error__ = exc
return drf_request

View File

@ -71,7 +71,7 @@ class Metadata(metadata.SimpleMetadata):
'url': _('URL for this {}.'),
'related': _('Data structure with URLs of related resources.'),
'summary_fields': _(
'Data structure with name/description for related resources. ' 'The output for some objects may be limited for performance reasons.'
'Data structure with name/description for related resources. The output for some objects may be limited for performance reasons.'
),
'created': _('Timestamp when this {} was created.'),
'modified': _('Timestamp when this {} was last modified.'),

View File

@ -220,7 +220,7 @@ class CopySerializer(serializers.Serializer):
view = self.context.get('view', None)
obj = view.get_object()
if name == obj.name:
raise serializers.ValidationError(_('The original object is already named {}, a copy from' ' it cannot have the same name.'.format(name)))
raise serializers.ValidationError(_('The original object is already named {}, a copy from it cannot have the same name.'.format(name)))
return attrs
@ -760,7 +760,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer):
class UnifiedJobSerializer(BaseSerializer):
show_capabilities = ['start', 'delete']
event_processing_finished = serializers.BooleanField(
help_text=_('Indicates whether all of the events generated by this ' 'unified job have been saved to the database.'), read_only=True
help_text=_('Indicates whether all of the events generated by this unified job have been saved to the database.'), read_only=True
)
class Meta:
@ -1579,7 +1579,7 @@ class ProjectPlaybooksSerializer(ProjectSerializer):
class ProjectInventoriesSerializer(ProjectSerializer):
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, ' 'not comprehensive.'))
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, not comprehensive.'))
class Meta:
model = Project
@ -2905,7 +2905,7 @@ class CredentialSerializer(BaseSerializer):
):
if getattr(self.instance, related_objects).count() > 0:
raise ValidationError(
_('You cannot change the credential type of the credential, as it may break the functionality' ' of the resources using it.')
_('You cannot change the credential type of the credential, as it may break the functionality of the resources using it.')
)
return credential_type
@ -2925,7 +2925,7 @@ class CredentialSerializerCreate(CredentialSerializer):
default=None,
write_only=True,
allow_null=True,
help_text=_('Write-only field used to add user to owner role. If provided, ' 'do not give either team or organization. Only valid for creation.'),
help_text=_('Write-only field used to add user to owner role. If provided, do not give either team or organization. Only valid for creation.'),
)
team = serializers.PrimaryKeyRelatedField(
queryset=Team.objects.all(),
@ -2933,14 +2933,14 @@ class CredentialSerializerCreate(CredentialSerializer):
default=None,
write_only=True,
allow_null=True,
help_text=_('Write-only field used to add team to owner role. If provided, ' 'do not give either user or organization. Only valid for creation.'),
help_text=_('Write-only field used to add team to owner role. If provided, do not give either user or organization. Only valid for creation.'),
)
organization = serializers.PrimaryKeyRelatedField(
queryset=Organization.objects.all(),
required=False,
default=None,
allow_null=True,
help_text=_('Inherit permissions from organization roles. If provided on creation, ' 'do not give either user or team.'),
help_text=_('Inherit permissions from organization roles. If provided on creation, do not give either user or team.'),
)
class Meta:
@ -2962,7 +2962,7 @@ class CredentialSerializerCreate(CredentialSerializer):
if len(owner_fields) > 1:
received = ", ".join(sorted(owner_fields))
raise serializers.ValidationError(
{"detail": _("Only one of 'user', 'team', or 'organization' should be provided, " "received {} fields.".format(received))}
{"detail": _("Only one of 'user', 'team', or 'organization' should be provided, received {} fields.".format(received))}
)
if attrs.get('team'):
@ -3622,7 +3622,7 @@ class SystemJobSerializer(UnifiedJobSerializer):
try:
return obj.result_stdout
except StdoutMaxBytesExceeded as e:
return _("Standard Output too large to display ({text_size} bytes), " "only download supported for sizes over {supported_size} bytes.").format(
return _("Standard Output too large to display ({text_size} bytes), only download supported for sizes over {supported_size} bytes.").format(
text_size=e.total, supported_size=e.supported
)
@ -4536,7 +4536,7 @@ class JobLaunchSerializer(BaseSerializer):
if cred.unique_hash() in provided_mapping.keys():
continue # User replaced credential with new of same type
errors.setdefault('credentials', []).append(
_('Removing {} credential at launch time without replacement is not supported. ' 'Provided list lacked credential(s): {}.').format(
_('Removing {} credential at launch time without replacement is not supported. Provided list lacked credential(s): {}.').format(
cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])
)
)
@ -5019,7 +5019,7 @@ class NotificationTemplateSerializer(BaseSerializer):
for subevent in event_messages:
if subevent not in ('running', 'approved', 'timed_out', 'denied'):
error_list.append(
_("Workflow Approval event '{}' invalid, must be one of " "'running', 'approved', 'timed_out', or 'denied'").format(subevent)
_("Workflow Approval event '{}' invalid, must be one of 'running', 'approved', 'timed_out', or 'denied'").format(subevent)
)
continue
subevent_messages = event_messages[subevent]
@ -5559,7 +5559,7 @@ class InstanceGroupSerializer(BaseSerializer):
instances = serializers.SerializerMethodField()
is_container_group = serializers.BooleanField(
required=False,
help_text=_('Indicates whether instances in this group are containerized.' 'Containerized groups have a designated Openshift or Kubernetes cluster.'),
help_text=_('Indicates whether instances in this group are containerized.Containerized groups have a designated Openshift or Kubernetes cluster.'),
)
# NOTE: help_text is duplicated from field definitions, no obvious way of
# both defining field details here and also getting the field's help_text
@ -5570,7 +5570,7 @@ class InstanceGroupSerializer(BaseSerializer):
required=False,
initial=0,
label=_('Policy Instance Percentage'),
help_text=_("Minimum percentage of all instances that will be automatically assigned to " "this group when new instances come online."),
help_text=_("Minimum percentage of all instances that will be automatically assigned to this group when new instances come online."),
)
policy_instance_minimum = serializers.IntegerField(
default=0,
@ -5578,7 +5578,7 @@ class InstanceGroupSerializer(BaseSerializer):
required=False,
initial=0,
label=_('Policy Instance Minimum'),
help_text=_("Static minimum number of Instances that will be automatically assign to " "this group when new instances come online."),
help_text=_("Static minimum number of Instances that will be automatically assign to this group when new instances come online."),
)
max_concurrent_jobs = serializers.IntegerField(
default=0,

View File

@ -565,7 +565,7 @@ class LaunchConfigCredentialsBase(SubListAttachDetachAPIView):
if self.relationship not in ask_mapping:
return {"msg": _("Related template cannot accept {} on launch.").format(self.relationship)}
elif sub.passwords_needed:
return {"msg": _("Credential that requires user input on launch " "cannot be used in saved launch configuration.")}
return {"msg": _("Credential that requires user input on launch cannot be used in saved launch configuration.")}
ask_field_name = ask_mapping[self.relationship]
@ -2501,7 +2501,7 @@ class JobTemplateSurveySpec(GenericAPIView):
return Response(
dict(
error=_(
"$encrypted$ is a reserved keyword for password question defaults, " "survey question {idx} is type {survey_item[type]}."
"$encrypted$ is a reserved keyword for password question defaults, survey question {idx} is type {survey_item[type]}."
).format(**context)
),
status=status.HTTP_400_BAD_REQUEST,
@ -4056,7 +4056,7 @@ class UnifiedJobStdout(RetrieveAPIView):
return super(UnifiedJobStdout, self).retrieve(request, *args, **kwargs)
except models.StdoutMaxBytesExceeded as e:
response_message = _(
"Standard Output too large to display ({text_size} bytes), " "only download supported for sizes over {supported_size} bytes."
"Standard Output too large to display ({text_size} bytes), only download supported for sizes over {supported_size} bytes."
).format(text_size=e.total, supported_size=e.supported)
if request.accepted_renderer.format == 'json':
return Response({'range': {'start': 0, 'end': 1, 'absolute_end': 1}, 'content': response_message})

View File

@ -50,7 +50,7 @@ class UnifiedJobDeletionMixin(object):
return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
else:
# if it has been > 1 minute, events are probably lost
logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
logger.warning('Allowing deletion of {} through the API without all events processed.'.format(obj.log_format))
# Manually cascade delete events if unpartitioned job
if obj.has_unpartitioned_events:

View File

@ -114,7 +114,7 @@ class WebhookReceiverBase(APIView):
# Ensure that the full contents of the request are captured for multiple uses.
request.body
logger.debug("headers: {}\n" "data: {}\n".format(request.headers, request.data))
logger.debug("headers: {}\ndata: {}\n".format(request.headers, request.data))
obj = self.get_object()
self.check_signature(obj)

View File

@ -35,7 +35,7 @@ class TestStringListBooleanField:
field = StringListBooleanField()
with pytest.raises(ValidationError) as e:
field.to_internal_value(value)
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
assert e.value.detail[0] == "Expected None, True, False, a string or list of strings but got {} instead.".format(type(value))
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_representation_valid(self, value_in, value_known):
@ -48,7 +48,7 @@ class TestStringListBooleanField:
field = StringListBooleanField()
with pytest.raises(ValidationError) as e:
field.to_representation(value)
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
assert e.value.detail[0] == "Expected None, True, False, a string or list of strings but got {} instead.".format(type(value))
class TestListTuplesField:
@ -67,7 +67,7 @@ class TestListTuplesField:
field = ListTuplesField()
with pytest.raises(ValidationError) as e:
field.to_internal_value(value)
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " "but got {} instead.".format(t)
assert e.value.detail[0] == "Expected a list of tuples of max length 2 but got {} instead.".format(t)
class TestStringListPathField:

View File

@ -2234,7 +2234,7 @@ class WorkflowJobAccess(BaseAccess):
if not node_access.can_add({'reference_obj': node}):
wj_add_perm = False
if not wj_add_perm and self.save_messages:
self.messages['workflow_job_template'] = _('You do not have permission to the workflow job ' 'resources required for relaunch.')
self.messages['workflow_job_template'] = _('You do not have permission to the workflow job resources required for relaunch.')
return wj_add_perm
def can_cancel(self, obj):

View File

@ -87,7 +87,7 @@ class RecordedQueryLog(object):
)
log.commit()
log.execute(
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) ' 'VALUES (?, ?, ?, ?, ?, ?, ?);',
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) VALUES (?, ?, ?, ?, ?, ?, ?);',
(os.getpid(), version, ' '.join(sys.argv), seconds, sql, explain, bt),
)
log.commit()

View File

@ -800,7 +800,7 @@ class CredentialTypeInjectorField(JSONSchemaField):
def validate_env_var_allowed(self, env_var):
if env_var.startswith('ANSIBLE_'):
raise django_exceptions.ValidationError(
_('Environment variable {} may affect Ansible configuration so its ' 'use is not allowed in credentials.').format(env_var),
_('Environment variable {} may affect Ansible configuration so its use is not allowed in credentials.').format(env_var),
code='invalid',
params={'value': env_var},
)

View File

@ -23,7 +23,7 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N', help='Remove activity stream events more than N days old')
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would ' 'be removed)')
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would be removed)')
def init_logging(self):
log_levels = dict(enumerate([logging.ERROR, logging.INFO, logging.DEBUG, 0]))

View File

@ -152,7 +152,7 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--days', dest='days', type=int, default=90, metavar='N', help='Remove jobs/updates executed more than N days ago. Defaults to 90.')
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would ' 'be removed)')
parser.add_argument('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run mode (show items that would be removed)')
parser.add_argument('--jobs', dest='only_jobs', action='store_true', default=False, help='Remove jobs')
parser.add_argument('--ad-hoc-commands', dest='only_ad_hoc_commands', action='store_true', default=False, help='Remove ad hoc commands')
parser.add_argument('--project-updates', dest='only_project_updates', action='store_true', default=False, help='Remove project updates')

View File

@ -44,7 +44,7 @@ class Command(BaseCommand):
'- To list all (now deprecated) custom virtual environments run:',
'awx-manage list_custom_venvs',
'',
'- To export the contents of a (deprecated) virtual environment, ' 'run the following command while supplying the path as an argument:',
'- To export the contents of a (deprecated) virtual environment, run the following command while supplying the path as an argument:',
'awx-manage export_custom_venv /path/to/venv',
'',
'- Run these commands with `-q` to remove tool tips.',

View File

@ -13,7 +13,7 @@ class Command(BaseCommand):
Deprovision a cluster node
"""
help = 'Remove instance from the database. ' 'Specify `--hostname` to use this command.'
help = 'Remove instance from the database. Specify `--hostname` to use this command.'
def add_arguments(self, parser):
parser.add_argument('--hostname', dest='hostname', type=str, help='Hostname used during provisioning')

View File

@ -22,7 +22,7 @@ class Command(BaseCommand):
'# Discovered Virtual Environments:',
'\n'.join(venvs),
'',
'- To export the contents of a (deprecated) virtual environment, ' 'run the following command while supplying the path as an argument:',
'- To export the contents of a (deprecated) virtual environment, run the following command while supplying the path as an argument:',
'awx-manage export_custom_venv /path/to/venv',
'',
'- To view the connections a (deprecated) virtual environment had in the database, run the following command while supplying the path as an argument:',

View File

@ -122,7 +122,7 @@ class URLModificationMiddleware(MiddlewareMixin):
field_class=fields.DictField,
read_only=True,
label=_('Formats of all available named urls'),
help_text=_('Read-only list of key-value pairs that shows the standard format of all ' 'available named URLs.'),
help_text=_('Read-only list of key-value pairs that shows the standard format of all available named URLs.'),
category=_('Named URL'),
category_slug='named-url',
)

View File

@ -158,7 +158,7 @@ class ec2(PluginFileInjector):
return {
# vars that change
'ec2_block_devices': (
"dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings " "| map(attribute='ebs.volume_id') | list))"
"dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings | map(attribute='ebs.volume_id') | list))"
),
'ec2_dns_name': 'public_dns_name',
'ec2_group_name': 'placement.group_name',
@ -635,7 +635,7 @@ class satellite6(PluginFileInjector):
"environment": {
"prefix": "{}environment_".format(group_prefix),
"separator": "",
"key": "foreman['environment_name'] | lower | regex_replace(' ', '') | " "regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')",
"key": "foreman['environment_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_') | regex_replace('none', '')",
},
"location": {
"prefix": "{}location_".format(group_prefix),
@ -656,7 +656,7 @@ class satellite6(PluginFileInjector):
"content_view": {
"prefix": "{}content_view_".format(group_prefix),
"separator": "",
"key": "foreman['content_facet_attributes']['content_view_name'] | " "lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
"key": "foreman['content_facet_attributes']['content_view_name'] | lower | regex_replace(' ', '') | regex_replace('[^A-Za-z0-9_]', '_')",
},
}

View File

@ -91,7 +91,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
related_name='credentials',
null=False,
on_delete=models.CASCADE,
help_text=_('Specify the type of credential you want to create. Refer ' 'to the documentation for details on each type.'),
help_text=_('Specify the type of credential you want to create. Refer to the documentation for details on each type.'),
)
managed = models.BooleanField(default=False, editable=False)
organization = models.ForeignKey(
@ -103,7 +103,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
related_name='credentials',
)
inputs = CredentialInputField(
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.')
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. Refer to the documentation for example syntax.')
)
admin_role = ImplicitRoleField(
parent_role=[
@ -343,12 +343,12 @@ class CredentialType(CommonModelNameNotUnique):
managed = models.BooleanField(default=False, editable=False)
namespace = models.CharField(max_length=1024, null=True, default=None, editable=False)
inputs = CredentialTypeInputField(
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.')
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. Refer to the documentation for example syntax.')
)
injectors = CredentialTypeInjectorField(
blank=True,
default=dict,
help_text=_('Enter injectors using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.'),
help_text=_('Enter injectors using either JSON or YAML syntax. Refer to the documentation for example syntax.'),
)
@classmethod
@ -602,9 +602,7 @@ ManagedCredentialType(
'id': 'become_method',
'label': gettext_noop('Privilege Escalation Method'),
'type': 'string',
'help_text': gettext_noop(
'Specify a method for "become" operations. This is ' 'equivalent to specifying the --become-method ' 'Ansible parameter.'
),
'help_text': gettext_noop('Specify a method for "become" operations. This is equivalent to specifying the --become-method Ansible parameter.'),
},
{
'id': 'become_username',
@ -746,7 +744,7 @@ ManagedCredentialType(
'id': 'host',
'label': gettext_noop('Host (Authentication URL)'),
'type': 'string',
'help_text': gettext_noop('The host to authenticate with. For example, ' 'https://openstack.business.com/v2.0/'),
'help_text': gettext_noop('The host to authenticate with. For example, https://openstack.business.com/v2.0/'),
},
{
'id': 'project',
@ -797,7 +795,7 @@ ManagedCredentialType(
'id': 'host',
'label': gettext_noop('VCenter Host'),
'type': 'string',
'help_text': gettext_noop('Enter the hostname or IP address that corresponds ' 'to your VMware vCenter.'),
'help_text': gettext_noop('Enter the hostname or IP address that corresponds to your VMware vCenter.'),
},
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
{
@ -822,7 +820,7 @@ ManagedCredentialType(
'id': 'host',
'label': gettext_noop('Satellite 6 URL'),
'type': 'string',
'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat ' 'Satellite 6 server. For example, https://satellite.example.org'),
'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat Satellite 6 server. For example, https://satellite.example.org'),
},
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
{
@ -847,7 +845,7 @@ ManagedCredentialType(
'id': 'username',
'label': gettext_noop('Service Account Email Address'),
'type': 'string',
'help_text': gettext_noop('The email address assigned to the Google Compute ' 'Engine service account.'),
'help_text': gettext_noop('The email address assigned to the Google Compute Engine service account.'),
},
{
'id': 'project',
@ -867,7 +865,7 @@ ManagedCredentialType(
'format': 'ssh_private_key',
'secret': True,
'multiline': True,
'help_text': gettext_noop('Paste the contents of the PEM file associated ' 'with the service account email.'),
'help_text': gettext_noop('Paste the contents of the PEM file associated with the service account email.'),
},
],
'required': ['username', 'ssh_key_data'],
@ -885,7 +883,7 @@ ManagedCredentialType(
'id': 'subscription',
'label': gettext_noop('Subscription ID'),
'type': 'string',
'help_text': gettext_noop('Subscription ID is an Azure construct, which is ' 'mapped to a username.'),
'help_text': gettext_noop('Subscription ID is an Azure construct, which is mapped to a username.'),
},
{'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'},
{
@ -906,7 +904,7 @@ ManagedCredentialType(
'id': 'cloud_environment',
'label': gettext_noop('Azure Cloud Environment'),
'type': 'string',
'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when' ' using Azure GovCloud or Azure stack.'),
'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when using Azure GovCloud or Azure stack.'),
},
],
'required': ['subscription'],
@ -1037,7 +1035,7 @@ ManagedCredentialType(
'label': gettext_noop('Username'),
'type': 'string',
'help_text': gettext_noop(
'Red Hat Ansible Automation Platform username id to authenticate as.' 'This should not be set if an OAuth token is being used.'
'Red Hat Ansible Automation Platform username id to authenticate as.This should not be set if an OAuth token is being used.'
),
},
{
@ -1051,7 +1049,7 @@ ManagedCredentialType(
'label': gettext_noop('OAuth Token'),
'type': 'string',
'secret': True,
'help_text': gettext_noop('An OAuth token to use to authenticate with.' 'This should not be set if username/password are being used.'),
'help_text': gettext_noop('An OAuth token to use to authenticate with.This should not be set if username/password are being used.'),
},
{'id': 'verify_ssl', 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False},
],
@ -1162,7 +1160,7 @@ ManagedCredentialType(
'id': 'auth_url',
'label': gettext_noop('Auth Server URL'),
'type': 'string',
'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using ' 'SSO auth.'),
'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using SSO auth.'),
},
{
'id': 'token',

View File

@ -106,28 +106,28 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
has_active_failures = models.BooleanField(
default=False,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. ' 'Flag indicating whether any hosts in this inventory have failed.'),
help_text=_('This field is deprecated and will be removed in a future release. Flag indicating whether any hosts in this inventory have failed.'),
)
total_hosts = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. ' 'Total number of hosts in this inventory.'),
help_text=_('This field is deprecated and will be removed in a future release. Total number of hosts in this inventory.'),
)
hosts_with_active_failures = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. ' 'Number of hosts in this inventory with active failures.'),
help_text=_('This field is deprecated and will be removed in a future release. Number of hosts in this inventory with active failures.'),
)
total_groups = models.PositiveIntegerField(
default=0,
editable=False,
help_text=_('This field is deprecated and will be removed in a future release. ' 'Total number of groups in this inventory.'),
help_text=_('This field is deprecated and will be removed in a future release. Total number of groups in this inventory.'),
)
has_inventory_sources = models.BooleanField(
default=False,
editable=False,
help_text=_(
'This field is deprecated and will be removed in a future release. ' 'Flag indicating whether this inventory has any external inventory sources.'
'This field is deprecated and will be removed in a future release. Flag indicating whether this inventory has any external inventory sources.'
),
)
total_inventory_sources = models.PositiveIntegerField(
@ -424,7 +424,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
for t in tasks:
t.task_impact = t._get_task_impact()
UnifiedJob.objects.bulk_update(tasks, ['task_impact'])
logger.debug("Finished updating inventory computed fields, pk={0}, in " "{1:.3f} seconds".format(self.pk, time.time() - start_time))
logger.debug("Finished updating inventory computed fields, pk={0}, in {1:.3f} seconds".format(self.pk, time.time() - start_time))
def websocket_emit_status(self, status):
connection.on_commit(
@ -1055,16 +1055,16 @@ class InventorySourceOptions(BaseModel):
# the actual inventory source being used (Amazon requires Amazon
# credentials; Rackspace requires Rackspace credentials; etc...)
if source.replace('ec2', 'aws') != cred.kind:
return _('Cloud-based inventory sources (such as %s) require ' 'credentials for the matching cloud service.') % source
return _('Cloud-based inventory sources (such as %s) require credentials for the matching cloud service.') % source
# Allow an EC2 source to omit the credential. If Tower is running on
# an EC2 instance with an IAM Role assigned, boto will use credentials
# from the instance metadata instead of those explicitly provided.
elif source in CLOUD_PROVIDERS and source != 'ec2':
return _('Credential is required for a cloud source.')
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
return _('Credentials of type machine, source control, insights and vault are ' 'disallowed for custom inventory sources.')
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
return _('Credentials of type insights and vault are ' 'disallowed for scm inventory sources.')
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
return None
def get_cloud_credential(self):

View File

@ -101,7 +101,7 @@ class JobOptions(BaseModel):
max_length=1024,
default='',
blank=True,
help_text=_('Branch to use in job run. Project default used if blank. ' 'Only allowed if project allow_override field is set to true.'),
help_text=_('Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.'),
)
forks = models.PositiveIntegerField(
blank=True,
@ -253,7 +253,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
job_slice_count = models.PositiveIntegerField(
blank=True,
default=1,
help_text=_("The number of jobs to slice into at runtime. " "Will cause the Job Template to launch a workflow if value is greater than 1."),
help_text=_("The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1."),
)
admin_role = ImplicitRoleField(parent_role=['organization.job_template_admin_role'])
@ -596,12 +596,12 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
job_slice_number = models.PositiveIntegerField(
blank=True,
default=0,
help_text=_("If part of a sliced job, the ID of the inventory slice operated on. " "If not part of sliced job, parameter is not used."),
help_text=_("If part of a sliced job, the ID of the inventory slice operated on. If not part of sliced job, parameter is not used."),
)
job_slice_count = models.PositiveIntegerField(
blank=True,
default=1,
help_text=_("If ran as part of sliced jobs, the total number of slices. " "If 1, job is not part of a sliced job."),
help_text=_("If ran as part of sliced jobs, the total number of slices. If 1, job is not part of a sliced job."),
)
def _get_parent_field_name(self):

View File

@ -675,4 +675,4 @@ class WebhookMixin(models.Model):
if response.status_code < 400:
logger.debug("Webhook status update sent.")
else:
logger.error("Posting webhook status failed, code: {}\n" "{}\n" "Payload sent: {}".format(response.status_code, response.text, json.dumps(data)))
logger.error("Posting webhook status failed, code: {}\n" "{}\nPayload sent: {}".format(response.status_code, response.text, json.dumps(data)))

View File

@ -74,7 +74,7 @@ class ProjectOptions(models.Model):
return []
local_path = models.CharField(
max_length=1024, blank=True, help_text=_('Local path (relative to PROJECTS_ROOT) containing ' 'playbooks and related files for this project.')
max_length=1024, blank=True, help_text=_('Local path (relative to PROJECTS_ROOT) containing playbooks and related files for this project.')
)
scm_type = models.CharField(
@ -276,11 +276,11 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
scm_update_cache_timeout = models.PositiveIntegerField(
default=0,
blank=True,
help_text=_('The number of seconds after the last project update ran that a new ' 'project update will be launched as a job dependency.'),
help_text=_('The number of seconds after the last project update ran that a new project update will be launched as a job dependency.'),
)
allow_override = models.BooleanField(
default=False,
help_text=_('Allow changing the SCM branch or revision in a job template ' 'that uses this project.'),
help_text=_('Allow changing the SCM branch or revision in a job template that uses this project.'),
)
# credential (keys) used to validate content signature

View File

@ -82,7 +82,7 @@ class WorkflowNodeBase(CreatedModifiedModel, LaunchTimeConfig):
related_name='%(class)ss_always',
)
all_parents_must_converge = models.BooleanField(
default=False, help_text=_("If enabled then the node will only run if all of the parent nodes " "have met the criteria to reach this node")
default=False, help_text=_("If enabled then the node will only run if all of the parent nodes have met the criteria to reach this node")
)
unified_job_template = models.ForeignKey(
'UnifiedJobTemplate',
@ -181,7 +181,7 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
max_length=512,
default=uuid4,
blank=False,
help_text=_('An identifier for this node that is unique within its workflow. ' 'It is copied to workflow job nodes corresponding to this node.'),
help_text=_('An identifier for this node that is unique within its workflow. It is copied to workflow job nodes corresponding to this node.'),
)
instance_groups = OrderedManyToManyField(
'InstanceGroup',
@ -334,7 +334,7 @@ class WorkflowJobNode(WorkflowNodeBase):
accepted_fields, ignored_fields, errors = ujt_obj._accept_or_ignore_job_kwargs(**node_prompts_data)
if errors:
logger.info(
_('Bad launch configuration starting template {template_pk} as part of ' 'workflow {workflow_pk}. Errors:\n{error_text}').format(
_('Bad launch configuration starting template {template_pk} as part of workflow {workflow_pk}. Errors:\n{error_text}').format(
template_pk=ujt_obj.pk, workflow_pk=self.pk, error_text=errors
)
)
@ -647,7 +647,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
null=True,
default=None,
on_delete=models.SET_NULL,
help_text=_("If automatically created for a sliced job run, the job template " "the workflow job was created from."),
help_text=_("If automatically created for a sliced job run, the job template the workflow job was created from."),
)
is_sliced_job = models.BooleanField(default=False)
is_bulk_job = models.BooleanField(default=False)
@ -714,7 +714,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
wj = self.get_workflow_job()
while wj and wj.workflow_job_template_id:
if wj.pk in wj_ids:
logger.critical('Cycles detected in the workflow jobs graph, ' 'this is not normal and suggests task manager degeneracy.')
logger.critical('Cycles detected in the workflow jobs graph, this is not normal and suggests task manager degeneracy.')
break
wj_ids.add(wj.pk)
ancestors.append(wj.workflow_job_template)

View File

@ -8,7 +8,7 @@ class CustomNotificationBase(object):
DEFAULT_APPROVAL_RUNNING_MSG = 'The approval node "{{ approval_node_name }}" needs review. This node can be viewed at: {{ workflow_url }}'
DEFAULT_APPROVAL_RUNNING_BODY = (
'The approval node "{{ approval_node_name }}" needs review. ' 'This approval node can be viewed at: {{ workflow_url }}\n\n{{ job_metadata }}'
'The approval node "{{ approval_node_name }}" needs review. This approval node can be viewed at: {{ workflow_url }}\n\n{{ job_metadata }}'
)
DEFAULT_APPROVAL_APPROVED_MSG = 'The approval node "{{ approval_node_name }}" was approved. {{ workflow_url }}'

View File

@ -32,7 +32,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
"success": {"body": DEFAULT_BODY},
"error": {"body": DEFAULT_BODY},
"workflow_approval": {
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. ' 'This node can be viewed at: {{ workflow_url }}"}'},
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},

View File

@ -77,7 +77,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
}
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 400
assert response.data['detail'][0] == ("Only one of 'user', 'team', or 'organization' should be provided, " "received organization, team, user fields.")
assert response.data['detail'][0] == ("Only one of 'user', 'team', or 'organization' should be provided, received organization, team, user fields.")
@pytest.mark.django_db
@ -925,7 +925,7 @@ def test_credential_type_mutability(patch, organization, admin, credentialtype_s
response = _change_credential_type()
assert response.status_code == 400
expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
expected = ['You cannot change the credential type of the credential, as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
@ -962,7 +962,7 @@ def test_vault_credential_type_mutability(patch, organization, admin, credential
response = _change_credential_type()
assert response.status_code == 400
expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
expected = ['You cannot change the credential type of the credential, as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)
@ -994,7 +994,7 @@ def test_cloud_credential_type_mutability(patch, organization, admin, credential
response = _change_credential_type()
assert response.status_code == 400
expected = ['You cannot change the credential type of the credential, ' 'as it may break the functionality of the resources using it.']
expected = ['You cannot change the credential type of the credential, as it may break the functionality of the resources using it.']
assert response.data['credential_type'] == expected
response = patch(reverse('api:credential_detail', kwargs={'pk': cred.pk}), {'name': 'Worst credential ever'}, admin)

View File

@ -121,7 +121,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
break
alias = 'file_reference_{}'.format(i)
else:
raise RuntimeError('Test not able to cope with >10 references by env vars. ' 'Something probably went very wrong.')
raise RuntimeError('Test not able to cope with >10 references by env vars. Something probably went very wrong.')
file_aliases[abs_file_path] = alias
for env_key in inverse_env[runner_path]:
env[env_key] = '{{{{ {} }}}}'.format(alias)
@ -234,7 +234,7 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential
source_dir = os.path.join(base_dir, this_kind) # this_kind is a global
if not os.path.exists(source_dir):
raise FileNotFoundError('Maybe you never made reference files? ' 'MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}')
raise FileNotFoundError('Maybe you never made reference files? MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}')
files_dir = os.path.join(source_dir, 'files')
try:
expected_file_list = os.listdir(files_dir)

View File

@ -371,7 +371,7 @@ class TestExtraVarSanitation(TestJobExecution):
# are deemed trustable, because they can only be added by users w/ enough
# privilege to add/modify a Job Template)
UNSAFE = '{{ lookup(' 'pipe' ',' 'ls -la' ') }}'
UNSAFE = "{{ lookup('pipe', 'ls -la') }}"
def test_vars_unsafe_by_default(self, job, private_data_dir, mock_me):
job.created_by = User(pk=123, username='angry-spud')

View File

@ -88,6 +88,6 @@ def test_global_creation_always_possible(all_views):
creatable_view = View
if not creatable or not global_view:
continue
assert 'POST' in global_view().allowed_methods, 'Resource {} should be creatable in global list view {}. ' 'Can be created now in {}'.format(
assert 'POST' in global_view().allowed_methods, 'Resource {} should be creatable in global list view {}. Can be created now in {}'.format(
model, global_view, creatable_view
)

View File

@ -93,7 +93,7 @@ class TestSmartFilterQueryFromString:
@pytest.mark.parametrize(
"filter_string",
[
'ansible_facts__facts__facts__blank=' 'ansible_facts__a__b__c__ space =ggg',
'ansible_facts__facts__facts__blank=ansible_facts__a__b__c__ space =ggg',
],
)
def test_invalid_filter_strings(self, mock_get_host_model, filter_string):
@ -104,7 +104,7 @@ class TestSmartFilterQueryFromString:
@pytest.mark.parametrize(
"filter_string",
[
'created_by__password__icontains=pbkdf2' 'search=foo or created_by__password__icontains=pbkdf2',
'created_by__password__icontains=pbkdf2search=foo or created_by__password__icontains=pbkdf2',
'created_by__password__icontains=pbkdf2 or search=foo',
],
)

View File

@ -716,7 +716,7 @@ def parse_yaml_or_json(vars_str, silent_failure=True):
if silent_failure:
return {}
raise ParseError(
_('Cannot parse as JSON (error: {json_error}) or ' 'YAML (error: {yaml_error}).').format(json_error=str(json_err), yaml_error=str(yaml_err))
_('Cannot parse as JSON (error: {json_error}) or YAML (error: {yaml_error}).').format(json_error=str(json_err), yaml_error=str(yaml_err))
)
return vars_dict

View File

@ -253,7 +253,7 @@ def dict_to_mem_data(data, inventory=None):
if isinstance(hv, dict):
host.variables.update(hv)
else:
logger.warning('Expected dict of vars for ' 'host "%s", got %s instead', hk, str(type(hv)))
logger.warning('Expected dict of vars for host "%s", got %s instead', hk, str(type(hv)))
group.add_host(host)
elif isinstance(hosts, (list, tuple)):
for hk in hosts:
@ -262,13 +262,13 @@ def dict_to_mem_data(data, inventory=None):
continue
group.add_host(host)
else:
logger.warning('Expected dict or list of "hosts" for ' 'group "%s", got %s instead', k, str(type(hosts)))
logger.warning('Expected dict or list of "hosts" for group "%s", got %s instead', k, str(type(hosts)))
# Process group variables.
vars = v.get('vars', {})
if isinstance(vars, dict):
group.variables.update(vars)
else:
logger.warning('Expected dict of vars for ' 'group "%s", got %s instead', k, str(type(vars)))
logger.warning('Expected dict of vars for group "%s", got %s instead', k, str(type(vars)))
# Process child groups.
children = v.get('children', [])
if isinstance(children, (list, tuple)):
@ -277,7 +277,7 @@ def dict_to_mem_data(data, inventory=None):
if child and c != 'ungrouped':
group.add_child_group(child)
else:
logger.warning('Expected list of children for ' 'group "%s", got %s instead', k, str(type(children)))
logger.warning('Expected list of children for group "%s", got %s instead', k, str(type(children)))
# Load host names from a list.
elif isinstance(v, (list, tuple)):
@ -288,7 +288,7 @@ def dict_to_mem_data(data, inventory=None):
group.add_host(host)
else:
logger.warning('')
logger.warning('Expected dict or list for group "%s", ' 'got %s instead', k, str(type(v)))
logger.warning('Expected dict or list for group "%s", got %s instead', k, str(type(v)))
if k not in ['all', 'ungrouped']:
inventory.all_group.add_child_group(group)
@ -299,6 +299,6 @@ def dict_to_mem_data(data, inventory=None):
if isinstance(meta_hostvars, dict):
v.variables.update(meta_hostvars)
else:
logger.warning('Expected dict of vars for ' 'host "%s", got %s instead', k, str(type(meta_hostvars)))
logger.warning('Expected dict of vars for host "%s", got %s instead', k, str(type(meta_hostvars)))
return inventory

View File

@ -58,7 +58,7 @@ class ActionModule(ActionBase):
if res.status_code != 200:
result['failed'] = True
result['msg'] = 'Expected {} to return a status code of 200 but returned status ' 'code "{}" instead with content "{}".'.format(
result['msg'] = 'Expected {} to return a status code of 200 but returned status code "{}" instead with content "{}".'.format(
url, res.status_code, res.content
)
return result
@ -87,7 +87,7 @@ class ActionModule(ActionBase):
continue
elif res.status_code != 200:
result['failed'] = True
result['msg'] = 'Expected {} to return a status code of 200 but returned status ' 'code "{}" instead with content "{}".'.format(
result['msg'] = 'Expected {} to return a status code of 200 but returned status code "{}" instead with content "{}".'.format(
playbook_url, res.status_code, res.content
)
return result

View File

@ -269,7 +269,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
value = value[0]
if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None:
logger.warning(
"Could not map user detail '%s' from SAML attribute '%s'; " "update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
"Could not map user detail '%s' from SAML attribute '%s'; update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
conf_key[5:],
key,
self.name,

View File

@ -100,7 +100,7 @@ register(
'AUTHENTICATION_BACKENDS',
field_class=AuthenticationBackendsField,
label=_('Authentication Backends'),
help_text=_('List of authentication backends that are enabled based on ' 'license features and other authentication settings.'),
help_text=_('List of authentication backends that are enabled based on license features and other authentication settings.'),
read_only=True,
depends_on=AuthenticationBackendsField.get_all_required_settings(),
category=_('Authentication'),
@ -360,7 +360,7 @@ def _register_ldap(append=None):
default=None,
label=_('LDAP Deny Group'),
help_text=_(
'Group DN denied from login. If specified, user will not be ' 'allowed to login if a member of this group. Only one deny group ' 'is supported.'
'Group DN denied from login. If specified, user will not be allowed to login if a member of this group. Only one deny group is supported.'
),
category=_('LDAP'),
category_slug='ldap',
@ -426,7 +426,7 @@ def _register_ldap(append=None):
field_class=LDAPTeamMapField,
default={},
label=_('LDAP Team Map'),
help_text=_('Mapping between team members (users) and LDAP groups. Configuration' ' details are available in the documentation.'),
help_text=_('Mapping between team members (users) and LDAP groups. Configuration details are available in the documentation.'),
category=_('LDAP'),
category_slug='ldap',
placeholder=collections.OrderedDict(
@ -461,7 +461,7 @@ register(
allow_blank=True,
default='',
label=_('RADIUS Server'),
help_text=_('Hostname/IP of RADIUS server. RADIUS authentication is ' 'disabled if this setting is empty.'),
help_text=_('Hostname/IP of RADIUS server. RADIUS authentication is disabled if this setting is empty.'),
category=_('RADIUS'),
category_slug='radius',
placeholder='radius.example.com',
@ -564,9 +564,7 @@ register(
read_only=True,
default=SocialAuthCallbackURL('google-oauth2'),
label=_('Google OAuth2 Callback URL'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
category=_('Google OAuth2'),
category_slug='google-oauth2',
depends_on=['TOWER_URL_BASE'],
@ -602,7 +600,7 @@ register(
field_class=fields.StringListField,
default=[],
label=_('Google OAuth2 Allowed Domains'),
help_text=_('Update this setting to restrict the domains who are allowed to ' 'login using Google OAuth2.'),
help_text=_('Update this setting to restrict the domains who are allowed to login using Google OAuth2.'),
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder=['example.com'],
@ -658,9 +656,7 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github'),
label=_('GitHub OAuth2 Callback URL'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
category=_('GitHub OAuth2'),
category_slug='github',
depends_on=['TOWER_URL_BASE'],
@ -723,9 +719,7 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-org'),
label=_('GitHub Organization OAuth2 Callback URL'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
depends_on=['TOWER_URL_BASE'],
@ -760,7 +754,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Organization Name'),
help_text=_('The name of your GitHub organization, as used in your ' 'organization\'s URL: https://github.com/<yourorg>/.'),
help_text=_('The name of your GitHub organization, as used in your organization\'s URL: https://github.com/<yourorg>/.'),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
)
@ -839,7 +833,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Team ID'),
help_text=_('Find the numeric team ID using the Github API: ' 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
help_text=_('Find the numeric team ID using the Github API: http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
category=_('GitHub Team OAuth2'),
category_slug='github-team',
)
@ -878,9 +872,7 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-enterprise'),
label=_('GitHub Enterprise OAuth2 Callback URL'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise',
depends_on=['TOWER_URL_BASE'],
@ -892,7 +884,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise URL'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise documentation for more details.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise',
)
@ -904,7 +896,7 @@ register(
default='',
label=_('GitHub Enterprise API URL'),
help_text=_(
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github Enterprise documentation for more details.'
),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise',
@ -967,9 +959,7 @@ register(
read_only=True,
default=SocialAuthCallbackURL('github-enterprise-org'),
label=_('GitHub Enterprise Organization OAuth2 Callback URL'),
help_text=_(
'Provide this URL as the callback URL for your application as part ' 'of your registration process. Refer to the ' 'documentation for more detail.'
),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail.'),
category=_('GitHub Enterprise Organization OAuth2'),
category_slug='github-enterprise-org',
depends_on=['TOWER_URL_BASE'],
@ -981,7 +971,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Organization URL'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise documentation for more details.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-org',
)
@ -993,7 +983,7 @@ register(
default='',
label=_('GitHub Enterprise Organization API URL'),
help_text=_(
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github Enterprise documentation for more details.'
),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-org',
@ -1028,7 +1018,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Organization Name'),
help_text=_('The name of your GitHub Enterprise organization, as used in your ' 'organization\'s URL: https://github.com/<yourorg>/.'),
help_text=_('The name of your GitHub Enterprise organization, as used in your organization\'s URL: https://github.com/<yourorg>/.'),
category=_('GitHub Enterprise Organization OAuth2'),
category_slug='github-enterprise-org',
)
@ -1084,7 +1074,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Team URL'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise ' 'documentation for more details.'),
help_text=_('The URL for your Github Enterprise instance, e.g.: http(s)://hostname/. Refer to Github Enterprise documentation for more details.'),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-team',
)
@ -1096,7 +1086,7 @@ register(
default='',
label=_('GitHub Enterprise Team API URL'),
help_text=_(
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github ' 'Enterprise documentation for more details.'
'The API URL for your GitHub Enterprise instance, e.g.: http(s)://hostname/api/v3/. Refer to Github Enterprise documentation for more details.'
),
category=_('GitHub Enterprise OAuth2'),
category_slug='github-enterprise-team',
@ -1131,7 +1121,7 @@ register(
allow_blank=True,
default='',
label=_('GitHub Enterprise Team ID'),
help_text=_('Find the numeric team ID using the Github Enterprise API: ' 'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
help_text=_('Find the numeric team ID using the Github Enterprise API: http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
category=_('GitHub Enterprise Team OAuth2'),
category_slug='github-enterprise-team',
)
@ -1170,9 +1160,7 @@ register(
read_only=True,
default=SocialAuthCallbackURL('azuread-oauth2'),
label=_('Azure AD OAuth2 Callback URL'),
help_text=_(
'Provide this URL as the callback URL for your application as part' ' of your registration process. Refer to the' ' documentation for more detail. '
),
help_text=_('Provide this URL as the callback URL for your application as part of your registration process. Refer to the documentation for more detail. '),
category=_('Azure AD OAuth2'),
category_slug='azuread-oauth2',
depends_on=['TOWER_URL_BASE'],
@ -1291,7 +1279,7 @@ register(
field_class=fields.BooleanField,
default=True,
label=_('Automatically Create Organizations and Teams on SAML Login'),
help_text=_('When enabled (the default), mapped Organizations and Teams ' 'will be created automatically on successful SAML login.'),
help_text=_('When enabled (the default), mapped Organizations and Teams will be created automatically on successful SAML login.'),
category=_('SAML'),
category_slug='saml',
)
@ -1318,7 +1306,7 @@ register(
read_only=True,
default=get_saml_metadata_url,
label=_('SAML Service Provider Metadata URL'),
help_text=_('If your identity provider (IdP) allows uploading an XML ' 'metadata file, you can download one from this URL.'),
help_text=_('If your identity provider (IdP) allows uploading an XML metadata file, you can download one from this URL.'),
category=_('SAML'),
category_slug='saml',
)
@ -1346,7 +1334,7 @@ register(
required=True,
validators=[validate_certificate],
label=_('SAML Service Provider Public Certificate'),
help_text=_('Create a keypair to use as a service provider (SP) ' 'and include the certificate content here.'),
help_text=_('Create a keypair to use as a service provider (SP) and include the certificate content here.'),
category=_('SAML'),
category_slug='saml',
)
@ -1358,7 +1346,7 @@ register(
required=True,
validators=[validate_private_key],
label=_('SAML Service Provider Private Key'),
help_text=_('Create a keypair to use as a service provider (SP) ' 'and include the private key content here.'),
help_text=_('Create a keypair to use as a service provider (SP) and include the private key content here.'),
category=_('SAML'),
category_slug='saml',
encrypted=True,
@ -1369,7 +1357,7 @@ register(
field_class=SAMLOrgInfoField,
required=True,
label=_('SAML Service Provider Organization Info'),
help_text=_('Provide the URL, display name, and the name of your app. Refer to' ' the documentation for example syntax.'),
help_text=_('Provide the URL, display name, and the name of your app. Refer to the documentation for example syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict(
@ -1383,7 +1371,7 @@ register(
allow_blank=True,
required=True,
label=_('SAML Service Provider Technical Contact'),
help_text=_('Provide the name and email address of the technical contact for' ' your service provider. Refer to the documentation' ' for example syntax.'),
help_text=_('Provide the name and email address of the technical contact for your service provider. Refer to the documentation for example syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict([('givenName', 'Technical Contact'), ('emailAddress', 'techsup@example.com')]),
@ -1395,7 +1383,7 @@ register(
allow_blank=True,
required=True,
label=_('SAML Service Provider Support Contact'),
help_text=_('Provide the name and email address of the support contact for your' ' service provider. Refer to the documentation for' ' example syntax.'),
help_text=_('Provide the name and email address of the support contact for your service provider. Refer to the documentation for example syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict([('givenName', 'Support Contact'), ('emailAddress', 'support@example.com')]),
@ -1457,9 +1445,7 @@ register(
allow_null=True,
default={'requestedAuthnContext': False},
label=_('SAML Security Config'),
help_text=_(
'A dict of key value pairs that are passed to the underlying' ' python-saml security setting' ' https://github.com/onelogin/python-saml#settings'
),
help_text=_('A dict of key value pairs that are passed to the underlying python-saml security setting https://github.com/onelogin/python-saml#settings'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict(
@ -1491,7 +1477,7 @@ register(
allow_null=True,
default=None,
label=_('SAML Service Provider extra configuration data'),
help_text=_('A dict of key value pairs to be passed to the underlying' ' python-saml Service Provider configuration setting.'),
help_text=_('A dict of key value pairs to be passed to the underlying python-saml Service Provider configuration setting.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict(),

View File

@ -390,7 +390,7 @@ class LDAPSearchUnionField(fields.ListField):
search_args = []
for i in range(len(data)):
if not isinstance(data[i], list):
raise ValidationError('In order to ultilize LDAP Union, input element No. %d' ' should be a search query array.' % (i + 1))
raise ValidationError('In order to ultilize LDAP Union, input element No. %d should be a search query array.' % (i + 1))
try:
search_args.append(self.ldap_search_field_class().run_validation(data[i]))
except Exception as e:

View File

@ -56,7 +56,7 @@ register(
field_class=fields.IntegerField,
min_value=100,
label=_('Max Job Events Retrieved by UI'),
help_text=_('Maximum number of job events for the UI to retrieve within a ' 'single request.'),
help_text=_('Maximum number of job events for the UI to retrieve within a single request.'),
category=_('UI'),
category_slug='ui',
)
@ -65,7 +65,7 @@ register(
'UI_LIVE_UPDATES_ENABLED',
field_class=fields.BooleanField,
label=_('Enable Live Updates in the UI'),
help_text=_('If disabled, the page will not refresh when events are received. ' 'Reloading the page will be required to get the latest details.'),
help_text=_('If disabled, the page will not refresh when events are received. Reloading the page will be required to get the latest details.'),
category=_('UI'),
category_slug='ui',
)

View File

@ -24,7 +24,7 @@
</script>
<meta
http-equiv="Content-Security-Policy"
content="default-src 'self'; connect-src 'self' ws: wss:; style-src 'self' 'unsafe-inline'; script-src 'self' 'nonce-{{ csp_nonce }}' *.pendo.io; img-src 'self' *.pendo.io data:; worker-src 'self' blob: ;"
content="default-src 'self'; connect-src 'self' ws: wss:; style-src 'selfunsafe-inline'; script-src 'selfnonce-{{ csp_nonce }}' *.pendo.io; img-src 'self' *.pendo.io data:; worker-src 'self' blob: ;"
/>
<link rel="shortcut icon" href="{% static 'media/favicon.ico' %}" />
<% } else { %>

View File

@ -5,7 +5,7 @@
<title data-cy="migration-title">{{ title }}</title>
<meta
http-equiv="Content-Security-Policy"
content="default-src 'self'; connect-src 'self' ws: wss:; style-src 'self' 'unsafe-inline'; script-src 'self' 'nonce-{{ csp_nonce }}' *.pendo.io; img-src 'self' *.pendo.io data:;"
content="default-src 'self'; connect-src 'self' ws: wss:; style-src 'selfunsafe-inline'; script-src 'selfnonce-{{ csp_nonce }}' *.pendo.io; img-src 'self' *.pendo.io data:;"
/>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge" />

View File

@ -13,7 +13,7 @@ class SystemJobTemplate(UnifiedJobTemplate, HasNotifications):
# return job
jobs_pg = self.get_related('jobs', id=result.json['system_job'])
assert jobs_pg.count == 1, "system_job_template launched (id:%s) but unable to find matching " "job at %s/jobs/" % (result.json['job'], self.url)
assert jobs_pg.count == 1, "system_job_template launched (id:%s) but unable to find matching job at %s/jobs/" % (result.json['job'], self.url)
return jobs_pg.results[0]

View File

@ -40,7 +40,7 @@ class UnifiedJob(HasStatus, base.Base):
Default behavior is to replace newline characters with a space, but this can be modified, including replacement
with ''. Pass replace_newlines=None to disable.
Additionally, you may replace any ' ' with another character (including ''). This is applied after the newline
Additionally, you may replace any with another character (including ''). This is applied after the newline
replacement. Default behavior is to not replace spaces.
"""
self.wait_until_completed()
@ -147,7 +147,7 @@ class UnifiedJob(HasStatus, base.Base):
if host_loc.startswith(expected_prefix):
return host_loc
raise RuntimeError(
'Could not find a controller private_data_dir for this job. ' 'Searched for volume mount to {} inside of args {}'.format(expected_prefix, job_args)
'Could not find a controller private_data_dir for this job. Searched for volume mount to {} inside of args {}'.format(expected_prefix, job_args)
)

View File

@ -139,7 +139,7 @@ def format_jq(output, fmt):
if fmt == '.':
return output
raise ImportError(
'To use `-f jq`, you must install the optional jq dependency.\n' '`pip install jq`\n',
'To use `-f jq`, you must install the optional jq dependency.\n`pip install jq`\n',
'Note that some platforms may require additional programs to '
'build jq from source (like `libtool`).\n'
'See https://pypi.org/project/jq/ for instructions.',

View File

@ -55,7 +55,7 @@ def pk_or_name(v2, model_name, value, page=None):
return int(results.results[0].id)
if results.count > 1:
raise argparse.ArgumentTypeError(
'Multiple {0} exist with that {1}. ' 'To look up an ID, run:\n' 'awx {0} list --{1} "{2}" -f human'.format(model_name, identity, value)
'Multiple {0} exist with that {1}. To look up an ID, run:\nawx {0} list --{1} "{2}" -f human'.format(model_name, identity, value)
)
raise argparse.ArgumentTypeError('Could not find any {0} with that {1}.'.format(model_name, identity))
@ -119,7 +119,7 @@ class ResourceOptionsParser(object):
'--all',
dest='all_pages',
action='store_true',
help=('fetch all pages of content from the API when ' 'returning results (instead of just the first page)'),
help=('fetch all pages of content from the API when returning results (instead of just the first page)'),
)
parser.add_argument(
'--order_by',

View File

@ -22,7 +22,7 @@ class CustomAutoprogramDirective(AutoprogramDirective):
nodes[0][0].children = [heading]
# add a descriptive top synopsis of the reference guide
nodes[0].children.insert(1, paragraph(text=('This is an exhaustive guide of every available command in ' 'the awx CLI tool.')))
nodes[0].children.insert(1, paragraph(text=('This is an exhaustive guide of every available command in the awx CLI tool.')))
disclaimer = (
'The commands and parameters documented here can (and will) '
'vary based on a variety of factors, such as the AWX API '

View File

@ -33,7 +33,7 @@ def parse_args():
'--project-file',
dest='project_file',
default=os.getenv('AWXKIT_PROJECT_FILE'),
help='Path for yml project config file.' 'If not provided or set by AWXKIT_PROJECT_FILE, projects will not have default SCM_URL',
help='Path for yml project config file.If not provided or set by AWXKIT_PROJECT_FILE, projects will not have default SCM_URL',
)
parser.add_argument('-f', '--file', dest='akit_script', default=False, help='akit script file to run in interactive session.')
parser.add_argument('-x', '--non-interactive', action='store_true', dest='non_interactive', help='Do not run in interactive mode.')