mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 01:57:35 -03:30
clean up unnecessary usage of the six library (awx only supports py3)
This commit is contained in:
parent
68950d56ca
commit
daeeaf413a
@ -91,16 +91,6 @@ def prepare_env():
|
||||
# Monkeypatch Django find_commands to also work with .pyc files.
|
||||
import django.core.management
|
||||
django.core.management.find_commands = find_commands
|
||||
# Fixup sys.modules reference to django.utils.six to allow jsonfield to
|
||||
# work when using Django 1.4.
|
||||
import django.utils
|
||||
try:
|
||||
import django.utils.six
|
||||
except ImportError: # pragma: no cover
|
||||
import six
|
||||
sys.modules['django.utils.six'] = sys.modules['six']
|
||||
django.utils.six = sys.modules['django.utils.six']
|
||||
from django.utils import six # noqa
|
||||
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
|
||||
# database settings to use when management command is run as an external
|
||||
# program via unit tests.
|
||||
|
||||
@ -5,7 +5,6 @@
|
||||
import inspect
|
||||
import logging
|
||||
import time
|
||||
import six
|
||||
import urllib.parse
|
||||
|
||||
# Django
|
||||
@ -851,14 +850,14 @@ class CopyAPIView(GenericAPIView):
|
||||
return field_val
|
||||
if isinstance(field_val, dict):
|
||||
for sub_field in field_val:
|
||||
if isinstance(sub_field, six.string_types) \
|
||||
and isinstance(field_val[sub_field], six.string_types):
|
||||
if isinstance(sub_field, str) \
|
||||
and isinstance(field_val[sub_field], str):
|
||||
try:
|
||||
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
|
||||
except AttributeError:
|
||||
# Catching the corner case with v1 credential fields
|
||||
field_val[sub_field] = decrypt_field(obj, sub_field)
|
||||
elif isinstance(field_val, six.string_types):
|
||||
elif isinstance(field_val, str):
|
||||
try:
|
||||
field_val = decrypt_field(obj, field_name)
|
||||
except AttributeError:
|
||||
@ -916,7 +915,7 @@ class CopyAPIView(GenericAPIView):
|
||||
obj, field.name, field_val
|
||||
)
|
||||
new_obj = model.objects.create(**create_kwargs)
|
||||
logger.debug(six.text_type('Deep copy: Created new object {}({})').format(
|
||||
logger.debug('Deep copy: Created new object {}({})'.format(
|
||||
new_obj, model
|
||||
))
|
||||
# Need to save separatedly because Djang-crum get_current_user would
|
||||
|
||||
@ -4,7 +4,6 @@ import json
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils import six
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
@ -34,4 +33,4 @@ class JSONParser(parsers.JSONParser):
|
||||
raise ParseError(_('JSON parse error - not a JSON object'))
|
||||
return obj
|
||||
except ValueError as exc:
|
||||
raise ParseError(_('JSON parse error - %s\nPossible cause: trailing comma.' % six.text_type(exc)))
|
||||
raise ParseError(_('JSON parse error - %s\nPossible cause: trailing comma.' % str(exc)))
|
||||
|
||||
@ -5,8 +5,6 @@
|
||||
from rest_framework import renderers
|
||||
from rest_framework.request import override_method
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
'''
|
||||
@ -71,8 +69,8 @@ class PlainTextRenderer(renderers.BaseRenderer):
|
||||
format = 'txt'
|
||||
|
||||
def render(self, data, media_type=None, renderer_context=None):
|
||||
if not isinstance(data, six.string_types):
|
||||
data = six.text_type(data)
|
||||
if not isinstance(data, str):
|
||||
data = str(data)
|
||||
return data.encode(self.charset)
|
||||
|
||||
|
||||
|
||||
@ -7,7 +7,6 @@ import json
|
||||
import logging
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib.parse
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
@ -1046,7 +1045,7 @@ class BaseOAuth2TokenSerializer(BaseSerializer):
|
||||
return ret
|
||||
|
||||
def _is_valid_scope(self, value):
|
||||
if not value or (not isinstance(value, six.string_types)):
|
||||
if not value or (not isinstance(value, str)):
|
||||
return False
|
||||
words = value.split()
|
||||
for word in words:
|
||||
@ -2478,8 +2477,7 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
|
||||
|
||||
# TODO: remove when API v1 is removed
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class V1CredentialFields(BaseSerializer):
|
||||
class V1CredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@ -2497,8 +2495,7 @@ class V1CredentialFields(BaseSerializer):
|
||||
return super(V1CredentialFields, self).build_field(field_name, info, model_class, nested_depth)
|
||||
|
||||
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class V2CredentialFields(BaseSerializer):
|
||||
class V2CredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@ -2786,8 +2783,7 @@ class LabelsListMixin(object):
|
||||
|
||||
|
||||
# TODO: remove when API v1 is removed
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class V1JobOptionsSerializer(BaseSerializer):
|
||||
class V1JobOptionsSerializer(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@ -2801,8 +2797,7 @@ class V1JobOptionsSerializer(BaseSerializer):
|
||||
return super(V1JobOptionsSerializer, self).build_field(field_name, info, model_class, nested_depth)
|
||||
|
||||
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
class LegacyCredentialFields(BaseSerializer):
|
||||
class LegacyCredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
@ -4387,7 +4382,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
errors.setdefault('credentials', []).append(_(
|
||||
'Removing {} credential at launch time without replacement is not supported. '
|
||||
'Provided list lacked credential(s): {}.'
|
||||
).format(cred.unique_hash(display=True), ', '.join([six.text_type(c) for c in removed_creds])))
|
||||
).format(cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])))
|
||||
|
||||
# verify that credentials (either provided or existing) don't
|
||||
# require launch-time passwords that have not been provided
|
||||
@ -4725,8 +4720,8 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
raise serializers.ValidationError(_('Manual Project cannot have a schedule set.'))
|
||||
elif type(value) == InventorySource and value.source == 'scm' and value.update_on_project_update:
|
||||
raise serializers.ValidationError(_(
|
||||
six.text_type('Inventory sources with `update_on_project_update` cannot be scheduled. '
|
||||
'Schedule its source project `{}` instead.').format(value.source_project.name)))
|
||||
'Inventory sources with `update_on_project_update` cannot be scheduled. '
|
||||
'Schedule its source project `{}` instead.'.format(value.source_project.name)))
|
||||
return value
|
||||
|
||||
|
||||
@ -5064,6 +5059,6 @@ class FactSerializer(BaseFactSerializer):
|
||||
ret = super(FactSerializer, self).to_representation(obj)
|
||||
if obj is None:
|
||||
return ret
|
||||
if 'facts' in ret and isinstance(ret['facts'], six.string_types):
|
||||
if 'facts' in ret and isinstance(ret['facts'], str):
|
||||
ret['facts'] = json.loads(ret['facts'])
|
||||
return ret
|
||||
|
||||
@ -12,7 +12,6 @@ import requests
|
||||
import functools
|
||||
from base64 import b64encode
|
||||
from collections import OrderedDict, Iterable
|
||||
import six
|
||||
|
||||
|
||||
# Django
|
||||
@ -1435,7 +1434,7 @@ class HostList(HostRelatedSearchMixin, ListCreateAPIView):
|
||||
try:
|
||||
return super(HostList, self).list(*args, **kwargs)
|
||||
except Exception as e:
|
||||
return Response(dict(error=_(six.text_type(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_(str(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class HostDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
@ -1878,7 +1877,7 @@ class InventoryScriptView(RetrieveAPIView):
|
||||
show_all = bool(request.query_params.get('all', ''))
|
||||
subset = request.query_params.get('subset', '')
|
||||
if subset:
|
||||
if not isinstance(subset, six.string_types):
|
||||
if not isinstance(subset, str):
|
||||
raise ParseError(_('Inventory subset argument must be a string.'))
|
||||
if subset.startswith('slice'):
|
||||
slice_number, slice_count = Inventory.parse_slice_params(subset)
|
||||
@ -2416,11 +2415,11 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
serializer_class = EmptySerializer
|
||||
|
||||
ALLOWED_TYPES = {
|
||||
'text': six.string_types,
|
||||
'textarea': six.string_types,
|
||||
'password': six.string_types,
|
||||
'multiplechoice': six.string_types,
|
||||
'multiselect': six.string_types,
|
||||
'text': str,
|
||||
'textarea': str,
|
||||
'password': str,
|
||||
'multiplechoice': str,
|
||||
'multiselect': str,
|
||||
'integer': int,
|
||||
'float': float
|
||||
}
|
||||
@ -2455,8 +2454,8 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
def _validate_spec_data(new_spec, old_spec):
|
||||
schema_errors = {}
|
||||
for field, expect_type, type_label in [
|
||||
('name', six.string_types, 'string'),
|
||||
('description', six.string_types, 'string'),
|
||||
('name', str, 'string'),
|
||||
('description', str, 'string'),
|
||||
('spec', list, 'list of items')]:
|
||||
if field not in new_spec:
|
||||
schema_errors['error'] = _("Field '{}' is missing from survey spec.").format(field)
|
||||
@ -2474,7 +2473,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
old_spec_dict = JobTemplate.pivot_spec(old_spec)
|
||||
for idx, survey_item in enumerate(new_spec["spec"]):
|
||||
context = dict(
|
||||
idx=six.text_type(idx),
|
||||
idx=str(idx),
|
||||
survey_item=survey_item
|
||||
)
|
||||
# General element validation
|
||||
@ -2486,7 +2485,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
field_name=field_name, **context
|
||||
)), status=status.HTTP_400_BAD_REQUEST)
|
||||
val = survey_item[field_name]
|
||||
allow_types = six.string_types
|
||||
allow_types = str
|
||||
type_label = 'string'
|
||||
if field_name == 'required':
|
||||
allow_types = bool
|
||||
@ -2534,7 +2533,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
)))
|
||||
|
||||
# Process encryption substitution
|
||||
if ("default" in survey_item and isinstance(survey_item['default'], six.string_types) and
|
||||
if ("default" in survey_item and isinstance(survey_item['default'], str) and
|
||||
survey_item['default'].startswith('$encrypted$')):
|
||||
# Submission expects the existence of encrypted DB value to replace given default
|
||||
if qtype != "password":
|
||||
@ -2546,7 +2545,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
encryptedish_default_exists = False
|
||||
if 'default' in old_element:
|
||||
old_default = old_element['default']
|
||||
if isinstance(old_default, six.string_types):
|
||||
if isinstance(old_default, str):
|
||||
if old_default.startswith('$encrypted$'):
|
||||
encryptedish_default_exists = True
|
||||
elif old_default == "": # unencrypted blank string is allowed as DB value as special case
|
||||
@ -3075,8 +3074,8 @@ class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, CopyAPIView):
|
||||
elif field_name in ['credentials']:
|
||||
for cred in item.all():
|
||||
if not user.can_access(cred.__class__, 'use', cred):
|
||||
logger.debug(six.text_type(
|
||||
'Deep copy: removing {} from relationship due to permissions').format(cred))
|
||||
logger.debug(
|
||||
'Deep copy: removing {} from relationship due to permissions'.format(cred))
|
||||
item.remove(cred.pk)
|
||||
obj.save()
|
||||
|
||||
|
||||
@ -10,8 +10,6 @@ from django.utils.translation import ugettext_lazy as _
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import * # noqa
|
||||
|
||||
import six
|
||||
|
||||
logger = logging.getLogger('awx.conf.fields')
|
||||
|
||||
# Use DRF fields to convert/validate settings:
|
||||
@ -139,7 +137,7 @@ class KeyValueField(DictField):
|
||||
def to_internal_value(self, data):
|
||||
ret = super(KeyValueField, self).to_internal_value(data)
|
||||
for value in data.values():
|
||||
if not isinstance(value, six.string_types + six.integer_types + (float,)):
|
||||
if not isinstance(value, (str, int, float)):
|
||||
if isinstance(value, OrderedDict):
|
||||
value = dict(value)
|
||||
self.fail('invalid_child', input=value)
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
import six
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
@ -91,7 +90,7 @@ def encrypt_field(instance, field_name, ask=False, subfield=None, skip_utf8=Fals
|
||||
if skip_utf8:
|
||||
utf8 = False
|
||||
else:
|
||||
utf8 = type(value) == six.text_type
|
||||
utf8 = type(value) == str
|
||||
value = smart_str(value)
|
||||
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
|
||||
encryptor = Cipher(AES(key), ECB(), default_backend()).encryptor()
|
||||
|
||||
@ -1,8 +1,6 @@
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers
|
||||
|
||||
import six
|
||||
|
||||
# Tower
|
||||
from awx.api.fields import VerbatimField
|
||||
from awx.api.serializers import BaseSerializer
|
||||
@ -47,12 +45,12 @@ class SettingFieldMixin(object):
|
||||
"""Mixin to use a registered setting field class for API display/validation."""
|
||||
|
||||
def to_representation(self, obj):
|
||||
if getattr(self, 'encrypted', False) and isinstance(obj, six.string_types) and obj:
|
||||
if getattr(self, 'encrypted', False) and isinstance(obj, str) and obj:
|
||||
return '$encrypted$'
|
||||
return obj
|
||||
|
||||
def to_internal_value(self, value):
|
||||
if getattr(self, 'encrypted', False) and isinstance(value, six.string_types) and value.startswith('$encrypted$'):
|
||||
if getattr(self, 'encrypted', False) and isinstance(value, str) and value.startswith('$encrypted$'):
|
||||
raise serializers.SkipField()
|
||||
obj = super(SettingFieldMixin, self).to_internal_value(value)
|
||||
return super(SettingFieldMixin, self).to_representation(obj)
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
import urllib.parse
|
||||
|
||||
import pytest
|
||||
|
||||
from django.core.urlresolvers import resolve
|
||||
from django.utils.six.moves.urllib.parse import urlparse
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from rest_framework.test import (
|
||||
@ -33,7 +34,7 @@ def admin():
|
||||
@pytest.fixture
|
||||
def api_request(admin):
|
||||
def rf(verb, url, data=None, user=admin):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
||||
request = getattr(APIRequestFactory(), verb)(url, data=data, format='json')
|
||||
if user:
|
||||
force_authenticate(request, user=user)
|
||||
|
||||
@ -13,7 +13,6 @@ from django.core.cache.backends.locmem import LocMemCache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from awx.conf import models, fields
|
||||
from awx.conf.settings import SettingsWrapper, EncryptedCacheProxy, SETTING_CACHE_NOTSET
|
||||
@ -70,7 +69,7 @@ def test_cached_settings_unicode_is_auto_decoded(settings):
|
||||
|
||||
value = 'Iñtërnâtiônàlizætiøn' # this simulates what python-memcached does on cache.set()
|
||||
settings.cache.set('DEBUG', value)
|
||||
assert settings.cache.get('DEBUG') == six.u('Iñtërnâtiônàlizætiøn')
|
||||
assert settings.cache.get('DEBUG') == 'Iñtërnâtiônàlizætiøn'
|
||||
|
||||
|
||||
def test_read_only_setting(settings):
|
||||
|
||||
@ -6,8 +6,6 @@ import glob
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx.conf.registry import settings_registry
|
||||
|
||||
@ -15,7 +13,7 @@ __all__ = ['comment_assignments', 'conf_to_dict']
|
||||
|
||||
|
||||
def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'):
|
||||
if isinstance(patterns, six.string_types):
|
||||
if isinstance(patterns, str):
|
||||
patterns = [patterns]
|
||||
diffs = []
|
||||
for pattern in patterns:
|
||||
@ -34,7 +32,7 @@ def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix=
|
||||
def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup_filename=None):
|
||||
from redbaron import RedBaron, indent
|
||||
|
||||
if isinstance(assignment_names, six.string_types):
|
||||
if isinstance(assignment_names, str):
|
||||
assignment_names = [assignment_names]
|
||||
else:
|
||||
assignment_names = assignment_names[:]
|
||||
|
||||
@ -5,7 +5,6 @@
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import six
|
||||
from functools import reduce
|
||||
|
||||
# Django
|
||||
@ -2590,7 +2589,7 @@ class RoleAccess(BaseAccess):
|
||||
if (isinstance(obj.content_object, Organization) and
|
||||
obj.role_field in (Organization.member_role.field.parent_role + ['member_role'])):
|
||||
if not isinstance(sub_obj, User):
|
||||
logger.error(six.text_type('Unexpected attempt to associate {} with organization role.').format(sub_obj))
|
||||
logger.error('Unexpected attempt to associate {} with organization role.'.format(sub_obj))
|
||||
return False
|
||||
if not UserAccess(self.user).can_admin(sub_obj, None, allow_orphans=True):
|
||||
return False
|
||||
|
||||
@ -4,7 +4,6 @@ import importlib
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
import six
|
||||
|
||||
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
|
||||
|
||||
@ -90,7 +89,7 @@ class TaskWorker(BaseWorker):
|
||||
try:
|
||||
if getattr(exc, 'is_awx_task_error', False):
|
||||
# Error caused by user / tracked in job output
|
||||
logger.warning(six.text_type("{}").format(exc))
|
||||
logger.warning("{}".format(exc))
|
||||
else:
|
||||
task = body['task']
|
||||
args = body.get('args', [])
|
||||
|
||||
@ -1,13 +1,12 @@
|
||||
# Copyright (c) 2018 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class _AwxTaskError():
|
||||
def build_exception(self, task, message=None):
|
||||
if message is None:
|
||||
message = six.text_type("Execution error running {}").format(task.log_format)
|
||||
message = "Execution error running {}".format(task.log_format)
|
||||
e = Exception(message)
|
||||
e.task = task
|
||||
e.is_awx_task_error = True
|
||||
@ -15,7 +14,7 @@ class _AwxTaskError():
|
||||
|
||||
def TaskCancel(self, task, rc):
|
||||
"""Canceled flag caused run_pexpect to kill the job run"""
|
||||
message=six.text_type("{} was canceled (rc={})").format(task.log_format, rc)
|
||||
message="{} was canceled (rc={})".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskCancel"
|
||||
@ -23,7 +22,7 @@ class _AwxTaskError():
|
||||
|
||||
def TaskError(self, task, rc):
|
||||
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
|
||||
message = six.text_type("{} encountered an error (rc={}), please see task stdout for details.").format(task.log_format, rc)
|
||||
message = "{} encountered an error (rc={}), please see task stdout for details.".format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskError"
|
||||
|
||||
@ -6,7 +6,6 @@ import copy
|
||||
import json
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib.parse
|
||||
|
||||
from jinja2 import Environment, StrictUndefined
|
||||
@ -80,7 +79,7 @@ class JSONField(upstream_JSONField):
|
||||
|
||||
class JSONBField(upstream_JSONBField):
|
||||
def get_prep_lookup(self, lookup_type, value):
|
||||
if isinstance(value, six.string_types) and value == "null":
|
||||
if isinstance(value, str) and value == "null":
|
||||
return 'null'
|
||||
return super(JSONBField, self).get_prep_lookup(lookup_type, value)
|
||||
|
||||
@ -95,7 +94,7 @@ class JSONBField(upstream_JSONBField):
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
# Work around a bug in django-jsonfield
|
||||
# https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return json.loads(value)
|
||||
return value
|
||||
|
||||
@ -411,7 +410,7 @@ class JSONSchemaField(JSONBField):
|
||||
format_checker=self.format_checker
|
||||
).iter_errors(value):
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = six.text_type(error.schema['error']).format(instance=error.instance)
|
||||
error.message = error.schema['error'].format(instance=error.instance)
|
||||
elif error.validator == 'type':
|
||||
expected_type = error.validator_value
|
||||
if expected_type == 'object':
|
||||
@ -450,7 +449,7 @@ class JSONSchemaField(JSONBField):
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
# Work around a bug in django-jsonfield
|
||||
# https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return json.loads(value)
|
||||
return value
|
||||
|
||||
@ -547,7 +546,7 @@ class CredentialInputField(JSONSchemaField):
|
||||
v != '$encrypted$',
|
||||
model_instance.pk
|
||||
]):
|
||||
if not isinstance(getattr(model_instance, k), six.string_types):
|
||||
if not isinstance(getattr(model_instance, k), str):
|
||||
raise django_exceptions.ValidationError(
|
||||
_('secret values must be of type string, not {}').format(type(v).__name__),
|
||||
code='invalid',
|
||||
@ -564,7 +563,7 @@ class CredentialInputField(JSONSchemaField):
|
||||
format_checker=self.format_checker
|
||||
).iter_errors(decrypted_values):
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = six.text_type(error.schema['error']).format(instance=error.instance)
|
||||
error.message = error.schema['error'].format(instance=error.instance)
|
||||
if error.validator == 'dependencies':
|
||||
# replace the default error messaging w/ a better i18n string
|
||||
# I wish there was a better way to determine the parameters of
|
||||
|
||||
@ -5,7 +5,6 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
@ -43,7 +42,7 @@ class Command(BaseCommand):
|
||||
n_deleted_items = 0
|
||||
pks_to_delete = set()
|
||||
for asobj in ActivityStream.objects.iterator():
|
||||
asobj_disp = '"%s" id: %s' % (six.text_type(asobj), asobj.id)
|
||||
asobj_disp = '"%s" id: %s' % (str(asobj), asobj.id)
|
||||
if asobj.timestamp >= self.cutoff:
|
||||
if self.dry_run:
|
||||
self.logger.info("would skip %s" % asobj_disp)
|
||||
|
||||
@ -5,7 +5,6 @@
|
||||
import datetime
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
@ -68,7 +67,7 @@ class Command(BaseCommand):
|
||||
jobs = Job.objects.filter(created__lt=self.cutoff)
|
||||
for job in jobs.iterator():
|
||||
job_display = '"%s" (%d host summaries, %d events)' % \
|
||||
(six.text_type(job),
|
||||
(str(job),
|
||||
job.job_host_summaries.count(), job.job_events.count())
|
||||
if job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@ -89,7 +88,7 @@ class Command(BaseCommand):
|
||||
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
|
||||
for ad_hoc_command in ad_hoc_commands.iterator():
|
||||
ad_hoc_command_display = '"%s" (%d events)' % \
|
||||
(six.text_type(ad_hoc_command),
|
||||
(str(ad_hoc_command),
|
||||
ad_hoc_command.ad_hoc_command_events.count())
|
||||
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@ -109,7 +108,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for pu in project_updates.iterator():
|
||||
pu_display = '"%s" (type %s)' % (six.text_type(pu), six.text_type(pu.launch_type))
|
||||
pu_display = '"%s" (type %s)' % (str(pu), str(pu.launch_type))
|
||||
if pu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display)
|
||||
@ -132,7 +131,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
|
||||
for iu in inventory_updates.iterator():
|
||||
iu_display = '"%s" (source %s)' % (six.text_type(iu), six.text_type(iu.source))
|
||||
iu_display = '"%s" (source %s)' % (str(iu), str(iu.source))
|
||||
if iu.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
|
||||
@ -155,7 +154,7 @@ class Command(BaseCommand):
|
||||
skipped, deleted = 0, 0
|
||||
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
|
||||
for sj in system_jobs.iterator():
|
||||
sj_display = '"%s" (type %s)' % (six.text_type(sj), six.text_type(sj.job_type))
|
||||
sj_display = '"%s" (type %s)' % (str(sj), str(sj.job_type))
|
||||
if sj.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
|
||||
@ -185,7 +184,7 @@ class Command(BaseCommand):
|
||||
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
|
||||
for workflow_job in workflow_jobs.iterator():
|
||||
workflow_job_display = '"{}" ({} nodes)'.format(
|
||||
six.text_type(workflow_job),
|
||||
str(workflow_job),
|
||||
workflow_job.workflow_nodes.count())
|
||||
if workflow_job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
@ -206,7 +205,7 @@ class Command(BaseCommand):
|
||||
notifications = Notification.objects.filter(created__lt=self.cutoff)
|
||||
for notification in notifications.iterator():
|
||||
notification_display = '"{}" (started {}, {} type, {} sent)'.format(
|
||||
six.text_type(notification), six.text_type(notification.created),
|
||||
str(notification), str(notification.created),
|
||||
notification.notification_type, notification.notifications_sent)
|
||||
if notification.status in ('pending',):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
|
||||
@ -3,7 +3,6 @@
|
||||
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
from django.core.management.base import BaseCommand
|
||||
import six
|
||||
|
||||
|
||||
class Ungrouped(object):
|
||||
@ -42,7 +41,7 @@ class Command(BaseCommand):
|
||||
fmt += ' policy>={0.policy_instance_minimum}'
|
||||
if instance_group.controller:
|
||||
fmt += ' controller={0.controller.name}'
|
||||
print(six.text_type(fmt + ']').format(instance_group))
|
||||
print((fmt + ']').format(instance_group))
|
||||
for x in instance_group.instances.all():
|
||||
color = '\033[92m'
|
||||
if x.capacity == 0 or x.enabled is False:
|
||||
@ -52,5 +51,5 @@ class Command(BaseCommand):
|
||||
fmt += ' last_isolated_check="{0.last_isolated_check:%Y-%m-%d %H:%M:%S}"'
|
||||
if x.capacity:
|
||||
fmt += ' heartbeat="{0.modified:%Y-%m-%d %H:%M:%S}"'
|
||||
print(six.text_type(fmt + '\033[0m').format(x, x.version or '?'))
|
||||
print((fmt + '\033[0m').format(x, x.version or '?'))
|
||||
print('')
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
# Copyright (c) 2017 Ansible Tower by Red Hat
|
||||
# All Rights Reserved.
|
||||
import sys
|
||||
import six
|
||||
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
@ -73,7 +72,7 @@ class Command(BaseCommand):
|
||||
if instance.exists():
|
||||
instances.append(instance[0])
|
||||
else:
|
||||
raise InstanceNotFound(six.text_type("Instance does not exist: {}").format(inst_name), changed)
|
||||
raise InstanceNotFound("Instance does not exist: {}".format(inst_name), changed)
|
||||
|
||||
ig.instances.add(*instances)
|
||||
|
||||
@ -99,24 +98,24 @@ class Command(BaseCommand):
|
||||
if options.get('hostnames'):
|
||||
hostname_list = options.get('hostnames').split(",")
|
||||
|
||||
with advisory_lock(six.text_type('instance_group_registration_{}').format(queuename)):
|
||||
with advisory_lock('instance_group_registration_{}'.format(queuename)):
|
||||
changed2 = False
|
||||
changed3 = False
|
||||
(ig, created, changed1) = self.get_create_update_instance_group(queuename, inst_per, inst_min)
|
||||
if created:
|
||||
print(six.text_type("Creating instance group {}").format(ig.name))
|
||||
print("Creating instance group {}".format(ig.name))
|
||||
elif not created:
|
||||
print(six.text_type("Instance Group already registered {}").format(ig.name))
|
||||
print("Instance Group already registered {}".format(ig.name))
|
||||
|
||||
if ctrl:
|
||||
(ig_ctrl, changed2) = self.update_instance_group_controller(ig, ctrl)
|
||||
if changed2:
|
||||
print(six.text_type("Set controller group {} on {}.").format(ctrl, queuename))
|
||||
print("Set controller group {} on {}.".format(ctrl, queuename))
|
||||
|
||||
try:
|
||||
(instances, changed3) = self.add_instances_to_group(ig, hostname_list)
|
||||
for i in instances:
|
||||
print(six.text_type("Added instance {} to {}").format(i.hostname, ig.name))
|
||||
print("Added instance {} to {}".format(i.hostname, ig.name))
|
||||
except InstanceNotFound as e:
|
||||
instance_not_found_err = e
|
||||
|
||||
|
||||
@ -4,11 +4,11 @@
|
||||
import uuid
|
||||
import logging
|
||||
import threading
|
||||
import six
|
||||
import time
|
||||
import cProfile
|
||||
import pstats
|
||||
import os
|
||||
import urllib.parse
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
@ -195,7 +195,7 @@ class URLModificationMiddleware(object):
|
||||
|
||||
def process_request(self, request):
|
||||
if hasattr(request, 'environ') and 'REQUEST_URI' in request.environ:
|
||||
old_path = six.moves.urllib.parse.urlsplit(request.environ['REQUEST_URI']).path
|
||||
old_path = urllib.parse.urlsplit(request.environ['REQUEST_URI']).path
|
||||
old_path = old_path[request.path.find(request.path_info):]
|
||||
else:
|
||||
old_path = request.path_info
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import logging
|
||||
|
||||
from django.db.models import Q
|
||||
import six
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
@ -39,8 +38,8 @@ def rename_inventory_sources(apps, schema_editor):
|
||||
Q(deprecated_group__inventory__organization=org)).distinct().all()):
|
||||
|
||||
inventory = invsrc.deprecated_group.inventory if invsrc.deprecated_group else invsrc.inventory
|
||||
name = six.text_type('{0} - {1} - {2}').format(invsrc.name, inventory.name, i)
|
||||
logger.debug(six.text_type("Renaming InventorySource({0}) {1} -> {2}").format(
|
||||
name = '{0} - {1} - {2}'.format(invsrc.name, inventory.name, i)
|
||||
logger.debug("Renaming InventorySource({0}) {1} -> {2}".format(
|
||||
invsrc.pk, invsrc.name, name
|
||||
))
|
||||
invsrc.name = name
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import logging
|
||||
import json
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
import six
|
||||
|
||||
from awx.conf.migrations._reencrypt import (
|
||||
decrypt_field,
|
||||
|
||||
@ -3,8 +3,6 @@ import logging
|
||||
from django.utils.timezone import now
|
||||
from django.utils.text import slugify
|
||||
|
||||
import six
|
||||
|
||||
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
|
||||
from awx.main import utils
|
||||
|
||||
@ -26,7 +24,7 @@ def _create_fact_scan_project(ContentType, Project, org):
|
||||
polymorphic_ctype=ct)
|
||||
proj.save()
|
||||
|
||||
slug_name = slugify(six.text_type(name)).replace(u'-', u'_')
|
||||
slug_name = slugify(str(name)).replace(u'-', u'_')
|
||||
proj.local_path = u'_%d__%s' % (int(proj.pk), slug_name)
|
||||
|
||||
proj.save()
|
||||
|
||||
@ -7,7 +7,6 @@ import os
|
||||
import re
|
||||
import stat
|
||||
import tempfile
|
||||
import six
|
||||
|
||||
# Jinja2
|
||||
from jinja2 import Template
|
||||
@ -418,11 +417,11 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
type_alias = self.credential_type_id
|
||||
if self.kind == 'vault' and self.has_input('vault_id'):
|
||||
if display:
|
||||
fmt_str = six.text_type('{} (id={})')
|
||||
fmt_str = '{} (id={})'
|
||||
else:
|
||||
fmt_str = six.text_type('{}_{}')
|
||||
fmt_str = '{}_{}'
|
||||
return fmt_str.format(type_alias, self.get_input('vault_id'))
|
||||
return six.text_type(type_alias)
|
||||
return str(type_alias)
|
||||
|
||||
@staticmethod
|
||||
def unique_dict(cred_qs):
|
||||
@ -679,9 +678,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
try:
|
||||
injector_field.validate_env_var_allowed(env_var)
|
||||
except ValidationError as e:
|
||||
logger.error(six.text_type(
|
||||
'Ignoring prohibited env var {}, reason: {}'
|
||||
).format(env_var, e))
|
||||
logger.error('Ignoring prohibited env var {}, reason: {}'.format(env_var, e))
|
||||
continue
|
||||
env[env_var] = Template(tmpl).render(**namespace)
|
||||
safe_env[env_var] = Template(tmpl).render(**safe_namespace)
|
||||
|
||||
@ -9,7 +9,6 @@ from django.utils.text import Truncator
|
||||
from django.utils.timezone import utc
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import force_text
|
||||
import six
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.fields import JSONField
|
||||
@ -35,7 +34,7 @@ def sanitize_event_keys(kwargs, valid_keys):
|
||||
for key in [
|
||||
'play', 'role', 'task', 'playbook'
|
||||
]:
|
||||
if isinstance(kwargs.get('event_data', {}).get(key), six.string_types):
|
||||
if isinstance(kwargs.get('event_data', {}).get(key), str):
|
||||
if len(kwargs['event_data'][key]) > 1024:
|
||||
kwargs['event_data'][key] = Truncator(kwargs['event_data'][key]).chars(1024)
|
||||
|
||||
|
||||
@ -9,7 +9,6 @@ import logging
|
||||
import re
|
||||
import copy
|
||||
import os.path
|
||||
import six
|
||||
from urllib.parse import urljoin
|
||||
|
||||
# Django
|
||||
@ -1356,7 +1355,7 @@ class InventorySourceOptions(BaseModel):
|
||||
source_vars_dict = VarsDictProperty('source_vars')
|
||||
|
||||
def clean_instance_filters(self):
|
||||
instance_filters = six.text_type(self.instance_filters or '')
|
||||
instance_filters = str(self.instance_filters or '')
|
||||
if self.source == 'ec2':
|
||||
invalid_filters = []
|
||||
instance_filter_re = re.compile(r'^((tag:.+)|([a-z][a-z\.-]*[a-z]))=.*$')
|
||||
@ -1382,7 +1381,7 @@ class InventorySourceOptions(BaseModel):
|
||||
return ''
|
||||
|
||||
def clean_group_by(self):
|
||||
group_by = six.text_type(self.group_by or '')
|
||||
group_by = str(self.group_by or '')
|
||||
if self.source == 'ec2':
|
||||
get_choices = getattr(self, 'get_%s_group_by_choices' % self.source)
|
||||
valid_choices = [x[0] for x in get_choices()]
|
||||
@ -1539,7 +1538,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, RelatedJobsMix
|
||||
if '_eager_fields' not in kwargs:
|
||||
kwargs['_eager_fields'] = {}
|
||||
if 'name' not in kwargs['_eager_fields']:
|
||||
name = six.text_type('{} - {}').format(self.inventory.name, self.name)
|
||||
name = '{} - {}'.format(self.inventory.name, self.name)
|
||||
name_field = self._meta.get_field('name')
|
||||
if len(name) > name_field.max_length:
|
||||
name = name[:name_field.max_length]
|
||||
|
||||
@ -10,7 +10,6 @@ import time
|
||||
import json
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -823,7 +822,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
timeout = now() - datetime.timedelta(seconds=timeout)
|
||||
hosts = hosts.filter(ansible_facts_modified__gte=timeout)
|
||||
for host in hosts:
|
||||
filepath = os.sep.join(map(six.text_type, [destination, host.name]))
|
||||
filepath = os.sep.join(map(str, [destination, host.name]))
|
||||
if not os.path.realpath(filepath).startswith(destination):
|
||||
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
|
||||
continue
|
||||
@ -840,7 +839,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
def finish_job_fact_cache(self, destination, modification_times):
|
||||
destination = os.path.join(destination, 'facts')
|
||||
for host in self._get_inventory_hosts():
|
||||
filepath = os.sep.join(map(six.text_type, [destination, host.name]))
|
||||
filepath = os.sep.join(map(str, [destination, host.name]))
|
||||
if not os.path.realpath(filepath).startswith(destination):
|
||||
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
|
||||
continue
|
||||
|
||||
@ -3,7 +3,6 @@ import os
|
||||
import json
|
||||
from copy import copy, deepcopy
|
||||
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.apps import apps
|
||||
@ -167,7 +166,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
decrypted_default = default
|
||||
if (
|
||||
survey_element['type'] == "password" and
|
||||
isinstance(decrypted_default, six.string_types) and
|
||||
isinstance(decrypted_default, str) and
|
||||
decrypted_default.startswith('$encrypted$')
|
||||
):
|
||||
decrypted_default = decrypt_value(get_encryption_key('value', pk=None), decrypted_default)
|
||||
@ -190,7 +189,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
if (survey_element['type'] == "password"):
|
||||
password_value = data.get(survey_element['variable'])
|
||||
if (
|
||||
isinstance(password_value, six.string_types) and
|
||||
isinstance(password_value, str) and
|
||||
password_value == '$encrypted$'
|
||||
):
|
||||
if survey_element.get('default') is None and survey_element['required']:
|
||||
@ -203,7 +202,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
errors.append("'%s' value missing" % survey_element['variable'])
|
||||
elif survey_element['type'] in ["textarea", "text", "password"]:
|
||||
if survey_element['variable'] in data:
|
||||
if not isinstance(data[survey_element['variable']], six.string_types):
|
||||
if not isinstance(data[survey_element['variable']], str):
|
||||
errors.append("Value %s for '%s' expected to be a string." % (data[survey_element['variable']],
|
||||
survey_element['variable']))
|
||||
return errors
|
||||
@ -247,7 +246,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
errors.append("'%s' value is expected to be a list." % survey_element['variable'])
|
||||
else:
|
||||
choice_list = copy(survey_element['choices'])
|
||||
if isinstance(choice_list, six.string_types):
|
||||
if isinstance(choice_list, str):
|
||||
choice_list = choice_list.split('\n')
|
||||
for val in data[survey_element['variable']]:
|
||||
if val not in choice_list:
|
||||
@ -255,7 +254,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
choice_list))
|
||||
elif survey_element['type'] == 'multiplechoice':
|
||||
choice_list = copy(survey_element['choices'])
|
||||
if isinstance(choice_list, six.string_types):
|
||||
if isinstance(choice_list, str):
|
||||
choice_list = choice_list.split('\n')
|
||||
if survey_element['variable'] in data:
|
||||
if data[survey_element['variable']] not in choice_list:
|
||||
@ -315,7 +314,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
if 'prompts' not in _exclude_errors:
|
||||
errors['extra_vars'] = [_('Variables {list_of_keys} are not allowed on launch. Check the Prompt on Launch setting '+
|
||||
'on the {model_name} to include Extra Variables.').format(
|
||||
list_of_keys=six.text_type(', ').join([six.text_type(key) for key in extra_vars.keys()]),
|
||||
list_of_keys=', '.join([str(key) for key in extra_vars.keys()]),
|
||||
model_name=self._meta.verbose_name.title())]
|
||||
|
||||
return (accepted, rejected, errors)
|
||||
@ -386,7 +385,7 @@ class SurveyJobMixin(models.Model):
|
||||
extra_vars = json.loads(self.extra_vars)
|
||||
for key in self.survey_passwords:
|
||||
value = extra_vars.get(key)
|
||||
if value and isinstance(value, six.string_types) and value.startswith('$encrypted$'):
|
||||
if value and isinstance(value, str) and value.startswith('$encrypted$'):
|
||||
extra_vars[key] = decrypt_value(get_encryption_key('value', pk=None), value)
|
||||
return json.dumps(extra_vars)
|
||||
else:
|
||||
|
||||
@ -15,7 +15,6 @@ from django.utils.text import slugify
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.timezone import now, make_aware, get_default_timezone
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@ -134,7 +133,7 @@ class ProjectOptions(models.Model):
|
||||
def clean_scm_url(self):
|
||||
if self.scm_type == 'insights':
|
||||
self.scm_url = settings.INSIGHTS_URL_BASE
|
||||
scm_url = six.text_type(self.scm_url or '')
|
||||
scm_url = str(self.scm_url or '')
|
||||
if not self.scm_type:
|
||||
return ''
|
||||
try:
|
||||
@ -145,7 +144,7 @@ class ProjectOptions(models.Model):
|
||||
scm_url_parts = urlparse.urlsplit(scm_url)
|
||||
if self.scm_type and not any(scm_url_parts):
|
||||
raise ValidationError(_('SCM URL is required.'))
|
||||
return six.text_type(self.scm_url or '')
|
||||
return str(self.scm_url or '')
|
||||
|
||||
def clean_credential(self):
|
||||
if not self.scm_type:
|
||||
@ -329,7 +328,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
skip_update = bool(kwargs.pop('skip_update', False))
|
||||
# Create auto-generated local path if project uses SCM.
|
||||
if self.pk and self.scm_type and not self.local_path.startswith('_'):
|
||||
slug_name = slugify(six.text_type(self.name)).replace(u'-', u'_')
|
||||
slug_name = slugify(str(self.name)).replace(u'-', u'_')
|
||||
self.local_path = u'_%d__%s' % (int(self.pk), slug_name)
|
||||
if 'local_path' not in update_fields:
|
||||
update_fields.append('local_path')
|
||||
@ -544,8 +543,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
|
||||
res = super(ProjectUpdate, self).cancel(job_explanation=job_explanation, is_chain=is_chain)
|
||||
if res and self.launch_type != 'sync':
|
||||
for inv_src in self.scm_inventory_updates.filter(status='running'):
|
||||
inv_src.cancel(job_explanation=six.text_type(
|
||||
'Source project update `{}` was canceled.').format(self.name))
|
||||
inv_src.cancel(job_explanation='Source project update `{}` was canceled.'.format(self.name))
|
||||
return res
|
||||
|
||||
'''
|
||||
|
||||
@ -12,7 +12,6 @@ import socket
|
||||
import subprocess
|
||||
import tempfile
|
||||
from collections import OrderedDict
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -351,8 +350,8 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
validated_kwargs = kwargs.copy()
|
||||
if unallowed_fields:
|
||||
if parent_field_name is None:
|
||||
logger.warn(six.text_type('Fields {} are not allowed as overrides to spawn from {}.').format(
|
||||
six.text_type(', ').join(unallowed_fields), self
|
||||
logger.warn('Fields {} are not allowed as overrides to spawn from {}.'.format(
|
||||
', '.join(unallowed_fields), self
|
||||
))
|
||||
for f in unallowed_fields:
|
||||
validated_kwargs.pop(f)
|
||||
@ -1305,9 +1304,9 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
'dispatcher', self.execution_node
|
||||
).running(timeout=timeout)
|
||||
except socket.timeout:
|
||||
logger.error(six.text_type(
|
||||
'could not reach dispatcher on {} within {}s'
|
||||
).format(self.execution_node, timeout))
|
||||
logger.error('could not reach dispatcher on {} within {}s'.format(
|
||||
self.execution_node, timeout
|
||||
))
|
||||
running = False
|
||||
return running
|
||||
|
||||
|
||||
@ -6,7 +6,6 @@ from datetime import timedelta
|
||||
import logging
|
||||
import uuid
|
||||
import json
|
||||
import six
|
||||
import random
|
||||
|
||||
# Django
|
||||
@ -131,7 +130,7 @@ class TaskManager():
|
||||
job.job_explanation = _(
|
||||
"Workflow Job spawned from workflow could not start because it "
|
||||
"would result in recursion (spawn order, most recent first: {})"
|
||||
).format(six.text_type(', ').join([six.text_type('<{}>').format(tmp) for tmp in display_list]))
|
||||
).format(', '.join(['<{}>'.format(tmp) for tmp in display_list]))
|
||||
else:
|
||||
logger.debug('Starting workflow-in-workflow id={}, wfjt={}, ancestors={}'.format(
|
||||
job.id, spawn_node.unified_job_template.pk, [wa.pk for wa in workflow_ancestors]))
|
||||
@ -182,7 +181,7 @@ class TaskManager():
|
||||
logger.info('Marking %s as %s.', workflow_job.log_format, 'failed' if has_failed else 'successful')
|
||||
result.append(workflow_job.id)
|
||||
new_status = 'failed' if has_failed else 'successful'
|
||||
logger.debug(six.text_type("Transitioning {} to {} status.").format(workflow_job.log_format, new_status))
|
||||
logger.debug("Transitioning {} to {} status.".format(workflow_job.log_format, new_status))
|
||||
update_fields = ['status', 'start_args']
|
||||
workflow_job.status = new_status
|
||||
if reason:
|
||||
@ -217,7 +216,7 @@ class TaskManager():
|
||||
try:
|
||||
controller_node = rampart_group.choose_online_controller_node()
|
||||
except IndexError:
|
||||
logger.debug(six.text_type("No controllers available in group {} to run {}").format(
|
||||
logger.debug("No controllers available in group {} to run {}".format(
|
||||
rampart_group.name, task.log_format))
|
||||
return
|
||||
|
||||
@ -240,19 +239,19 @@ class TaskManager():
|
||||
# non-Ansible jobs on isolated instances run on controller
|
||||
task.instance_group = rampart_group.controller
|
||||
task.execution_node = random.choice(list(rampart_group.controller.instances.all().values_list('hostname', flat=True)))
|
||||
logger.info(six.text_type('Submitting isolated {} to queue {}.').format(
|
||||
logger.info('Submitting isolated {} to queue {}.'.format(
|
||||
task.log_format, task.instance_group.name, task.execution_node))
|
||||
elif controller_node:
|
||||
task.instance_group = rampart_group
|
||||
task.execution_node = instance.hostname
|
||||
task.controller_node = controller_node
|
||||
logger.info(six.text_type('Submitting isolated {} to queue {} controlled by {}.').format(
|
||||
logger.info('Submitting isolated {} to queue {} controlled by {}.'.format(
|
||||
task.log_format, task.execution_node, controller_node))
|
||||
else:
|
||||
task.instance_group = rampart_group
|
||||
if instance is not None:
|
||||
task.execution_node = instance.hostname
|
||||
logger.info(six.text_type('Submitting {} to <instance group, instance> <{},{}>.').format(
|
||||
logger.info('Submitting {} to <instance group, instance> <{},{}>.'.format(
|
||||
task.log_format, task.instance_group_id, task.execution_node))
|
||||
with disable_activity_stream():
|
||||
task.celery_task_id = str(uuid.uuid4())
|
||||
@ -436,7 +435,7 @@ class TaskManager():
|
||||
def process_dependencies(self, dependent_task, dependency_tasks):
|
||||
for task in dependency_tasks:
|
||||
if self.is_job_blocked(task):
|
||||
logger.debug(six.text_type("Dependent {} is blocked from running").format(task.log_format))
|
||||
logger.debug("Dependent {} is blocked from running".format(task.log_format))
|
||||
continue
|
||||
preferred_instance_groups = task.preferred_instance_groups
|
||||
found_acceptable_queue = False
|
||||
@ -445,16 +444,16 @@ class TaskManager():
|
||||
if idle_instance_that_fits is None:
|
||||
idle_instance_that_fits = rampart_group.find_largest_idle_instance()
|
||||
if self.get_remaining_capacity(rampart_group.name) <= 0:
|
||||
logger.debug(six.text_type("Skipping group {} capacity <= 0").format(rampart_group.name))
|
||||
logger.debug("Skipping group {} capacity <= 0".format(rampart_group.name))
|
||||
continue
|
||||
|
||||
execution_instance = rampart_group.fit_task_to_most_remaining_capacity_instance(task)
|
||||
if execution_instance:
|
||||
logger.debug(six.text_type("Starting dependent {} in group {} instance {}").format(
|
||||
logger.debug("Starting dependent {} in group {} instance {}".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname))
|
||||
elif not execution_instance and idle_instance_that_fits:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug(six.text_type("Starting dependent {} in group {} on idle instance {}").format(
|
||||
logger.debug("Starting dependent {} in group {} on idle instance {}".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname))
|
||||
if execution_instance:
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
@ -464,17 +463,17 @@ class TaskManager():
|
||||
found_acceptable_queue = True
|
||||
break
|
||||
else:
|
||||
logger.debug(six.text_type("No instance available in group {} to run job {} w/ capacity requirement {}").format(
|
||||
logger.debug("No instance available in group {} to run job {} w/ capacity requirement {}".format(
|
||||
rampart_group.name, task.log_format, task.task_impact))
|
||||
if not found_acceptable_queue:
|
||||
logger.debug(six.text_type("Dependent {} couldn't be scheduled on graph, waiting for next cycle").format(task.log_format))
|
||||
logger.debug("Dependent {} couldn't be scheduled on graph, waiting for next cycle".format(task.log_format))
|
||||
|
||||
def process_pending_tasks(self, pending_tasks):
|
||||
running_workflow_templates = set([wf.unified_job_template_id for wf in self.get_running_workflow_jobs()])
|
||||
for task in pending_tasks:
|
||||
self.process_dependencies(task, self.generate_dependencies(task))
|
||||
if self.is_job_blocked(task):
|
||||
logger.debug(six.text_type("{} is blocked from running").format(task.log_format))
|
||||
logger.debug("{} is blocked from running".format(task.log_format))
|
||||
continue
|
||||
preferred_instance_groups = task.preferred_instance_groups
|
||||
found_acceptable_queue = False
|
||||
@ -482,7 +481,7 @@ class TaskManager():
|
||||
if isinstance(task, WorkflowJob):
|
||||
if task.unified_job_template_id in running_workflow_templates:
|
||||
if not task.allow_simultaneous:
|
||||
logger.debug(six.text_type("{} is blocked from running, workflow already running").format(task.log_format))
|
||||
logger.debug("{} is blocked from running, workflow already running".format(task.log_format))
|
||||
continue
|
||||
else:
|
||||
running_workflow_templates.add(task.unified_job_template_id)
|
||||
@ -493,17 +492,17 @@ class TaskManager():
|
||||
idle_instance_that_fits = rampart_group.find_largest_idle_instance()
|
||||
remaining_capacity = self.get_remaining_capacity(rampart_group.name)
|
||||
if remaining_capacity <= 0:
|
||||
logger.debug(six.text_type("Skipping group {}, remaining_capacity {} <= 0").format(
|
||||
logger.debug("Skipping group {}, remaining_capacity {} <= 0".format(
|
||||
rampart_group.name, remaining_capacity))
|
||||
continue
|
||||
|
||||
execution_instance = rampart_group.fit_task_to_most_remaining_capacity_instance(task)
|
||||
if execution_instance:
|
||||
logger.debug(six.text_type("Starting {} in group {} instance {} (remaining_capacity={})").format(
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
elif not execution_instance and idle_instance_that_fits:
|
||||
execution_instance = idle_instance_that_fits
|
||||
logger.debug(six.text_type("Starting {} in group {} instance {} (remaining_capacity={})").format(
|
||||
logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
|
||||
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
|
||||
if execution_instance:
|
||||
self.graph[rampart_group.name]['graph'].add_job(task)
|
||||
@ -511,10 +510,10 @@ class TaskManager():
|
||||
found_acceptable_queue = True
|
||||
break
|
||||
else:
|
||||
logger.debug(six.text_type("No instance available in group {} to run job {} w/ capacity requirement {}").format(
|
||||
logger.debug("No instance available in group {} to run job {} w/ capacity requirement {}".format(
|
||||
rampart_group.name, task.log_format, task.task_impact))
|
||||
if not found_acceptable_queue:
|
||||
logger.debug(six.text_type("{} couldn't be scheduled on graph, waiting for next cycle").format(task.log_format))
|
||||
logger.debug("{} couldn't be scheduled on graph, waiting for next cycle".format(task.log_format))
|
||||
|
||||
def calculate_capacity_consumed(self, tasks):
|
||||
self.graph = InstanceGroup.objects.capacity_values(tasks=tasks, graph=self.graph)
|
||||
@ -527,7 +526,7 @@ class TaskManager():
|
||||
return (task.task_impact + current_capacity > capacity_total)
|
||||
|
||||
def consume_capacity(self, task, instance_group):
|
||||
logger.debug(six.text_type('{} consumed {} capacity units from {} with prior total of {}').format(
|
||||
logger.debug('{} consumed {} capacity units from {} with prior total of {}'.format(
|
||||
task.log_format, task.task_impact, instance_group,
|
||||
self.graph[instance_group]['consumed_capacity']))
|
||||
self.graph[instance_group]['consumed_capacity'] += task.task_impact
|
||||
|
||||
@ -28,7 +28,6 @@ from django.utils import timezone
|
||||
from crum import get_current_request, get_current_user
|
||||
from crum.signals import current_user_getter
|
||||
|
||||
import six
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
@ -117,7 +116,7 @@ def emit_update_inventory_computed_fields(sender, **kwargs):
|
||||
elif sender == Group.inventory_sources.through:
|
||||
sender_name = 'group.inventory_sources'
|
||||
else:
|
||||
sender_name = six.text_type(sender._meta.verbose_name)
|
||||
sender_name = str(sender._meta.verbose_name)
|
||||
if kwargs['signal'] == post_save:
|
||||
if sender == Job:
|
||||
return
|
||||
@ -147,7 +146,7 @@ def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
|
||||
pass
|
||||
else:
|
||||
return
|
||||
sender_name = six.text_type(sender._meta.verbose_name)
|
||||
sender_name = str(sender._meta.verbose_name)
|
||||
logger.debug("%s created or deleted, updating inventory computed fields: %r %r",
|
||||
sender_name, sender, kwargs)
|
||||
try:
|
||||
@ -437,7 +436,7 @@ def activity_stream_create(sender, instance, created, **kwargs):
|
||||
# Special case where Job survey password variables need to be hidden
|
||||
if type(instance) == Job:
|
||||
changes['credentials'] = [
|
||||
six.text_type('{} ({})').format(c.name, c.id)
|
||||
'{} ({})'.format(c.name, c.id)
|
||||
for c in instance.credentials.iterator()
|
||||
]
|
||||
changes['labels'] = [l.name for l in instance.labels.iterator()]
|
||||
|
||||
@ -13,7 +13,6 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import six
|
||||
import stat
|
||||
import tempfile
|
||||
import time
|
||||
@ -93,7 +92,7 @@ def dispatch_startup():
|
||||
with disable_activity_stream():
|
||||
sch.save()
|
||||
except Exception:
|
||||
logger.exception(six.text_type("Failed to rebuild schedule {}.").format(sch))
|
||||
logger.exception("Failed to rebuild schedule {}.".format(sch))
|
||||
|
||||
#
|
||||
# When the dispatcher starts, if the instance cannot be found in the database,
|
||||
@ -125,8 +124,8 @@ def inform_cluster_of_shutdown():
|
||||
reaper.reap(this_inst)
|
||||
except Exception:
|
||||
logger.exception('failed to reap jobs for {}'.format(this_inst.hostname))
|
||||
logger.warning(six.text_type('Normal shutdown signal for instance {}, '
|
||||
'removed self from capacity pool.').format(this_inst.hostname))
|
||||
logger.warning('Normal shutdown signal for instance {}, '
|
||||
'removed self from capacity pool.'.format(this_inst.hostname))
|
||||
except Exception:
|
||||
logger.exception('Encountered problem with normal shutdown signal.')
|
||||
|
||||
@ -164,14 +163,14 @@ def apply_cluster_membership_policies():
|
||||
])
|
||||
for hostname in ig.policy_instance_list:
|
||||
if hostname not in instance_hostnames_map:
|
||||
logger.info(six.text_type("Unknown instance {} in {} policy list").format(hostname, ig.name))
|
||||
logger.info("Unknown instance {} in {} policy list".format(hostname, ig.name))
|
||||
continue
|
||||
inst = instance_hostnames_map[hostname]
|
||||
group_actual.instances.append(inst.id)
|
||||
# NOTE: arguable behavior: policy-list-group is not added to
|
||||
# instance's group count for consideration in minimum-policy rules
|
||||
if group_actual.instances:
|
||||
logger.info(six.text_type("Policy List, adding Instances {} to Group {}").format(group_actual.instances, ig.name))
|
||||
logger.info("Policy List, adding Instances {} to Group {}".format(group_actual.instances, ig.name))
|
||||
|
||||
if ig.controller_id is None:
|
||||
actual_groups.append(group_actual)
|
||||
@ -199,7 +198,7 @@ def apply_cluster_membership_policies():
|
||||
i.groups.append(g.obj.id)
|
||||
policy_min_added.append(i.obj.id)
|
||||
if policy_min_added:
|
||||
logger.info(six.text_type("Policy minimum, adding Instances {} to Group {}").format(policy_min_added, g.obj.name))
|
||||
logger.info("Policy minimum, adding Instances {} to Group {}".format(policy_min_added, g.obj.name))
|
||||
|
||||
# Finally, process instance policy percentages
|
||||
for g in sorted(actual_groups, key=lambda x: len(x.instances)):
|
||||
@ -215,7 +214,7 @@ def apply_cluster_membership_policies():
|
||||
i.groups.append(g.obj.id)
|
||||
policy_per_added.append(i.obj.id)
|
||||
if policy_per_added:
|
||||
logger.info(six.text_type("Policy percentage, adding Instances {} to Group {}").format(policy_per_added, g.obj.name))
|
||||
logger.info("Policy percentage, adding Instances {} to Group {}".format(policy_per_added, g.obj.name))
|
||||
|
||||
# Determine if any changes need to be made
|
||||
needs_change = False
|
||||
@ -259,15 +258,15 @@ def delete_project_files(project_path):
|
||||
if os.path.exists(project_path):
|
||||
try:
|
||||
shutil.rmtree(project_path)
|
||||
logger.info(six.text_type('Success removing project files {}').format(project_path))
|
||||
logger.info('Success removing project files {}'.format(project_path))
|
||||
except Exception:
|
||||
logger.exception(six.text_type('Could not remove project directory {}').format(project_path))
|
||||
logger.exception('Could not remove project directory {}'.format(project_path))
|
||||
if os.path.exists(lock_file):
|
||||
try:
|
||||
os.remove(lock_file)
|
||||
logger.debug(six.text_type('Success removing {}').format(lock_file))
|
||||
logger.debug('Success removing {}'.format(lock_file))
|
||||
except Exception:
|
||||
logger.exception(six.text_type('Could not remove lock file {}').format(lock_file))
|
||||
logger.exception('Could not remove lock file {}'.format(lock_file))
|
||||
|
||||
|
||||
@task()
|
||||
@ -288,7 +287,7 @@ def send_notifications(notification_list, job_id=None):
|
||||
notification.status = "successful"
|
||||
notification.notifications_sent = sent
|
||||
except Exception as e:
|
||||
logger.error(six.text_type("Send Notification Failed {}").format(e))
|
||||
logger.error("Send Notification Failed {}".format(e))
|
||||
notification.status = "failed"
|
||||
notification.error = smart_str(e)
|
||||
update_fields.append('error')
|
||||
@ -296,7 +295,7 @@ def send_notifications(notification_list, job_id=None):
|
||||
try:
|
||||
notification.save(update_fields=update_fields)
|
||||
except Exception:
|
||||
logger.exception(six.text_type('Error saving notification {} result.').format(notification.id))
|
||||
logger.exception('Error saving notification {} result.'.format(notification.id))
|
||||
|
||||
|
||||
@task()
|
||||
@ -327,7 +326,7 @@ def purge_old_stdout_files():
|
||||
for f in os.listdir(settings.JOBOUTPUT_ROOT):
|
||||
if os.path.getctime(os.path.join(settings.JOBOUTPUT_ROOT,f)) < nowtime - settings.LOCAL_STDOUT_EXPIRE_TIME:
|
||||
os.unlink(os.path.join(settings.JOBOUTPUT_ROOT,f))
|
||||
logger.info(six.text_type("Removing {}").format(os.path.join(settings.JOBOUTPUT_ROOT,f)))
|
||||
logger.info("Removing {}".format(os.path.join(settings.JOBOUTPUT_ROOT,f)))
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
@ -340,7 +339,7 @@ def cluster_node_heartbeat():
|
||||
|
||||
(changed, instance) = Instance.objects.get_or_register()
|
||||
if changed:
|
||||
logger.info(six.text_type("Registered tower node '{}'").format(instance.hostname))
|
||||
logger.info("Registered tower node '{}'".format(instance.hostname))
|
||||
|
||||
for inst in list(instance_list):
|
||||
if inst.hostname == settings.CLUSTER_HOST_ID:
|
||||
@ -352,7 +351,7 @@ def cluster_node_heartbeat():
|
||||
if this_inst:
|
||||
startup_event = this_inst.is_lost(ref_time=nowtime)
|
||||
if this_inst.capacity == 0 and this_inst.enabled:
|
||||
logger.warning(six.text_type('Rejoining the cluster as instance {}.').format(this_inst.hostname))
|
||||
logger.warning('Rejoining the cluster as instance {}.'.format(this_inst.hostname))
|
||||
if this_inst.enabled:
|
||||
this_inst.refresh_capacity()
|
||||
elif this_inst.capacity != 0 and not this_inst.enabled:
|
||||
@ -367,11 +366,12 @@ def cluster_node_heartbeat():
|
||||
if other_inst.version == "":
|
||||
continue
|
||||
if Version(other_inst.version.split('-', 1)[0]) > Version(awx_application_version.split('-', 1)[0]) and not settings.DEBUG:
|
||||
logger.error(six.text_type("Host {} reports version {}, but this node {} is at {}, shutting down")
|
||||
.format(other_inst.hostname,
|
||||
other_inst.version,
|
||||
this_inst.hostname,
|
||||
this_inst.version))
|
||||
logger.error("Host {} reports version {}, but this node {} is at {}, shutting down".format(
|
||||
other_inst.hostname,
|
||||
other_inst.version,
|
||||
this_inst.hostname,
|
||||
this_inst.version
|
||||
))
|
||||
# Shutdown signal will set the capacity to zero to ensure no Jobs get added to this instance.
|
||||
# The heartbeat task will reset the capacity to the system capacity after upgrade.
|
||||
stop_local_services(communicate=False)
|
||||
@ -392,17 +392,17 @@ def cluster_node_heartbeat():
|
||||
if other_inst.capacity != 0 and not settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
||||
other_inst.capacity = 0
|
||||
other_inst.save(update_fields=['capacity'])
|
||||
logger.error(six.text_type("Host {} last checked in at {}, marked as lost.").format(
|
||||
logger.error("Host {} last checked in at {}, marked as lost.".format(
|
||||
other_inst.hostname, other_inst.modified))
|
||||
elif settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
||||
deprovision_hostname = other_inst.hostname
|
||||
other_inst.delete()
|
||||
logger.info(six.text_type("Host {} Automatically Deprovisioned.").format(deprovision_hostname))
|
||||
logger.info("Host {} Automatically Deprovisioned.".format(deprovision_hostname))
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug(six.text_type('Another instance has marked {} as lost').format(other_inst.hostname))
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception(six.text_type('Error marking {} as lost').format(other_inst.hostname))
|
||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
@ -429,7 +429,7 @@ def awx_isolated_heartbeat():
|
||||
isolated_instance.save(update_fields=['last_isolated_check'])
|
||||
# Slow pass looping over isolated IGs and their isolated instances
|
||||
if len(isolated_instance_qs) > 0:
|
||||
logger.debug(six.text_type("Managing isolated instances {}.").format(','.join([inst.hostname for inst in isolated_instance_qs])))
|
||||
logger.debug("Managing isolated instances {}.".format(','.join([inst.hostname for inst in isolated_instance_qs])))
|
||||
isolated_manager.IsolatedManager.health_check(isolated_instance_qs, awx_application_version)
|
||||
|
||||
|
||||
@ -462,7 +462,7 @@ def awx_periodic_scheduler():
|
||||
try:
|
||||
job_kwargs = schedule.get_job_kwargs()
|
||||
new_unified_job = schedule.unified_job_template.create_unified_job(**job_kwargs)
|
||||
logger.info(six.text_type('Spawned {} from schedule {}-{}.').format(
|
||||
logger.info('Spawned {} from schedule {}-{}.'.format(
|
||||
new_unified_job.log_format, schedule.name, schedule.pk))
|
||||
|
||||
if invalid_license:
|
||||
@ -575,7 +575,7 @@ def update_host_smart_inventory_memberships():
|
||||
changed_inventories.add(smart_inventory)
|
||||
SmartInventoryMembership.objects.bulk_create(memberships)
|
||||
except IntegrityError as e:
|
||||
logger.error(six.text_type("Update Host Smart Inventory Memberships failed due to an exception: {}").format(e))
|
||||
logger.error("Update Host Smart Inventory Memberships failed due to an exception: {}".format(e))
|
||||
return
|
||||
# Update computed fields for changed inventories outside atomic action
|
||||
for smart_inventory in changed_inventories:
|
||||
@ -602,7 +602,7 @@ def delete_inventory(inventory_id, user_id, retries=5):
|
||||
'inventories-status_changed',
|
||||
{'group_name': 'inventories', 'inventory_id': inventory_id, 'status': 'deleted'}
|
||||
)
|
||||
logger.debug(six.text_type('Deleted inventory {} as user {}.').format(inventory_id, user_id))
|
||||
logger.debug('Deleted inventory {} as user {}.'.format(inventory_id, user_id))
|
||||
except Inventory.DoesNotExist:
|
||||
logger.exception("Delete Inventory failed due to missing inventory: " + str(inventory_id))
|
||||
return
|
||||
@ -626,7 +626,7 @@ def with_path_cleanup(f):
|
||||
elif os.path.exists(p):
|
||||
os.remove(p)
|
||||
except OSError:
|
||||
logger.exception(six.text_type("Failed to remove tmp file: {}").format(p))
|
||||
logger.exception("Failed to remove tmp file: {}".format(p))
|
||||
self.cleanup_paths = []
|
||||
return _wrapped
|
||||
|
||||
@ -1064,13 +1064,13 @@ class BaseTask(object):
|
||||
try:
|
||||
self.post_run_hook(instance, status, **kwargs)
|
||||
except Exception:
|
||||
logger.exception(six.text_type('{} Post run hook errored.').format(instance.log_format))
|
||||
logger.exception('{} Post run hook errored.'.format(instance.log_format))
|
||||
instance = self.update_model(pk)
|
||||
if instance.cancel_flag:
|
||||
status = 'canceled'
|
||||
cancel_wait = (now() - instance.modified).seconds if instance.modified else 0
|
||||
if cancel_wait > 5:
|
||||
logger.warn(six.text_type('Request to cancel {} took {} seconds to complete.').format(instance.log_format, cancel_wait))
|
||||
logger.warn('Request to cancel {} took {} seconds to complete.'.format(instance.log_format, cancel_wait))
|
||||
|
||||
instance = self.update_model(pk, status=status, result_traceback=tb,
|
||||
output_replacements=output_replacements,
|
||||
@ -1079,7 +1079,7 @@ class BaseTask(object):
|
||||
try:
|
||||
self.final_run_hook(instance, status, **kwargs)
|
||||
except Exception:
|
||||
logger.exception(six.text_type('{} Final run hook errored.').format(instance.log_format))
|
||||
logger.exception('{} Final run hook errored.'.format(instance.log_format))
|
||||
instance.websocket_emit_status(status)
|
||||
if status != 'successful':
|
||||
if status == 'canceled':
|
||||
@ -1258,7 +1258,7 @@ class RunJob(BaseTask):
|
||||
env['ANSIBLE_NET_SSH_KEYFILE'] = ssh_keyfile
|
||||
|
||||
authorize = network_cred.get_input('authorize', default=False)
|
||||
env['ANSIBLE_NET_AUTHORIZE'] = six.text_type(int(authorize))
|
||||
env['ANSIBLE_NET_AUTHORIZE'] = str(int(authorize))
|
||||
if authorize:
|
||||
env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='')
|
||||
|
||||
@ -1684,15 +1684,15 @@ class RunProjectUpdate(BaseTask):
|
||||
if not inv_src.update_on_project_update:
|
||||
continue
|
||||
if inv_src.scm_last_revision == scm_revision:
|
||||
logger.debug(six.text_type('Skipping SCM inventory update for `{}` because '
|
||||
'project has not changed.').format(inv_src.name))
|
||||
logger.debug('Skipping SCM inventory update for `{}` because '
|
||||
'project has not changed.'.format(inv_src.name))
|
||||
continue
|
||||
logger.debug(six.text_type('Local dependent inventory update for `{}`.').format(inv_src.name))
|
||||
logger.debug('Local dependent inventory update for `{}`.'.format(inv_src.name))
|
||||
with transaction.atomic():
|
||||
if InventoryUpdate.objects.filter(inventory_source=inv_src,
|
||||
status__in=ACTIVE_STATES).exists():
|
||||
logger.info(six.text_type('Skipping SCM inventory update for `{}` because '
|
||||
'another update is already active.').format(inv_src.name))
|
||||
logger.info('Skipping SCM inventory update for `{}` because '
|
||||
'another update is already active.'.format(inv_src.name))
|
||||
continue
|
||||
local_inv_update = inv_src.create_inventory_update(
|
||||
_eager_fields=dict(
|
||||
@ -1705,8 +1705,9 @@ class RunProjectUpdate(BaseTask):
|
||||
try:
|
||||
inv_update_class().run(local_inv_update.id)
|
||||
except Exception:
|
||||
logger.exception(six.text_type('{} Unhandled exception updating dependent SCM inventory sources.')
|
||||
.format(project_update.log_format))
|
||||
logger.exception('{} Unhandled exception updating dependent SCM inventory sources.'.format(
|
||||
project_update.log_format
|
||||
))
|
||||
|
||||
try:
|
||||
project_update.refresh_from_db()
|
||||
@ -1719,10 +1720,10 @@ class RunProjectUpdate(BaseTask):
|
||||
logger.warning('%s Dependent inventory update deleted during execution.', project_update.log_format)
|
||||
continue
|
||||
if project_update.cancel_flag:
|
||||
logger.info(six.text_type('Project update {} was canceled while updating dependent inventories.').format(project_update.log_format))
|
||||
logger.info('Project update {} was canceled while updating dependent inventories.'.format(project_update.log_format))
|
||||
break
|
||||
if local_inv_update.cancel_flag:
|
||||
logger.info(six.text_type('Continuing to process project dependencies after {} was canceled').format(local_inv_update.log_format))
|
||||
logger.info('Continuing to process project dependencies after {} was canceled'.format(local_inv_update.log_format))
|
||||
if local_inv_update.status == 'successful':
|
||||
inv_src.scm_last_revision = scm_revision
|
||||
inv_src.save(update_fields=['scm_last_revision'])
|
||||
@ -1731,7 +1732,7 @@ class RunProjectUpdate(BaseTask):
|
||||
try:
|
||||
fcntl.flock(self.lock_fd, fcntl.LOCK_UN)
|
||||
except IOError as e:
|
||||
logger.error(six.text_type("I/O error({0}) while trying to open lock file [{1}]: {2}").format(e.errno, instance.get_lock_file(), e.strerror))
|
||||
logger.error("I/O error({0}) while trying to open lock file [{1}]: {2}".format(e.errno, instance.get_lock_file(), e.strerror))
|
||||
os.close(self.lock_fd)
|
||||
raise
|
||||
|
||||
@ -1749,7 +1750,7 @@ class RunProjectUpdate(BaseTask):
|
||||
try:
|
||||
self.lock_fd = os.open(lock_path, os.O_RDONLY | os.O_CREAT)
|
||||
except OSError as e:
|
||||
logger.error(six.text_type("I/O error({0}) while trying to open lock file [{1}]: {2}").format(e.errno, lock_path, e.strerror))
|
||||
logger.error("I/O error({0}) while trying to open lock file [{1}]: {2}".format(e.errno, lock_path, e.strerror))
|
||||
raise
|
||||
|
||||
start_time = time.time()
|
||||
@ -1757,23 +1758,23 @@ class RunProjectUpdate(BaseTask):
|
||||
try:
|
||||
instance.refresh_from_db(fields=['cancel_flag'])
|
||||
if instance.cancel_flag:
|
||||
logger.info(six.text_type("ProjectUpdate({0}) was cancelled".format(instance.pk)))
|
||||
logger.info("ProjectUpdate({0}) was cancelled".format(instance.pk))
|
||||
return
|
||||
fcntl.flock(self.lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
break
|
||||
except IOError as e:
|
||||
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
||||
os.close(self.lock_fd)
|
||||
logger.error(six.text_type("I/O error({0}) while trying to aquire lock on file [{1}]: {2}").format(e.errno, lock_path, e.strerror))
|
||||
logger.error("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(e.errno, lock_path, e.strerror))
|
||||
raise
|
||||
else:
|
||||
time.sleep(1.0)
|
||||
waiting_time = time.time() - start_time
|
||||
|
||||
if waiting_time > 1.0:
|
||||
logger.info(six.text_type(
|
||||
logger.info(
|
||||
'{} spent {} waiting to acquire lock for local source tree '
|
||||
'for path {}.').format(instance.log_format, waiting_time, lock_path))
|
||||
'for path {}.'.format(instance.log_format, waiting_time, lock_path))
|
||||
|
||||
def pre_run_hook(self, instance, **kwargs):
|
||||
# re-create root project folder if a natural disaster has destroyed it
|
||||
@ -1790,7 +1791,7 @@ class RunProjectUpdate(BaseTask):
|
||||
if lines:
|
||||
p.scm_revision = lines[0].strip()
|
||||
else:
|
||||
logger.info(six.text_type("{} Could not find scm revision in check").format(instance.log_format))
|
||||
logger.info("{} Could not find scm revision in check".format(instance.log_format))
|
||||
p.playbook_files = p.playbooks
|
||||
p.inventory_files = p.inventories
|
||||
p.save()
|
||||
@ -1912,7 +1913,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
ec2_opts['cache_path'] = cache_path
|
||||
ec2_opts.setdefault('cache_max_age', '300')
|
||||
for k, v in ec2_opts.items():
|
||||
cp.set(section, k, six.text_type(v))
|
||||
cp.set(section, k, str(v))
|
||||
# Allow custom options to vmware inventory script.
|
||||
elif inventory_update.source == 'vmware':
|
||||
|
||||
@ -1931,7 +1932,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
vmware_opts.setdefault('groupby_patterns', inventory_update.group_by)
|
||||
|
||||
for k, v in vmware_opts.items():
|
||||
cp.set(section, k, six.text_type(v))
|
||||
cp.set(section, k, str(v))
|
||||
|
||||
elif inventory_update.source == 'satellite6':
|
||||
section = 'foreman'
|
||||
@ -1950,7 +1951,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
elif k == 'satellite6_want_hostcollections' and isinstance(v, bool):
|
||||
want_hostcollections = v
|
||||
else:
|
||||
cp.set(section, k, six.text_type(v))
|
||||
cp.set(section, k, str(v))
|
||||
|
||||
if credential:
|
||||
cp.set(section, 'url', credential.get_input('host', default=''))
|
||||
@ -2009,7 +2010,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
|
||||
azure_rm_opts = dict(inventory_update.source_vars_dict.items())
|
||||
for k, v in azure_rm_opts.items():
|
||||
cp.set(section, k, six.text_type(v))
|
||||
cp.set(section, k, str(v))
|
||||
|
||||
# Return INI content.
|
||||
if cp.sections():
|
||||
@ -2094,7 +2095,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
elif inventory_update.source in ['scm', 'custom']:
|
||||
for env_k in inventory_update.source_vars_dict:
|
||||
if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLACKLIST:
|
||||
env[str(env_k)] = six.text_type(inventory_update.source_vars_dict[env_k])
|
||||
env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
|
||||
elif inventory_update.source == 'tower':
|
||||
env['TOWER_INVENTORY'] = inventory_update.instance_filters
|
||||
env['TOWER_LICENSE_TYPE'] = get_licenser().validate()['license_type']
|
||||
@ -2410,7 +2411,7 @@ class RunSystemJob(BaseTask):
|
||||
'--management-jobs', '--ad-hoc-commands', '--workflow-jobs',
|
||||
'--notifications'])
|
||||
except Exception:
|
||||
logger.exception(six.text_type("{} Failed to parse system job").format(system_job.log_format))
|
||||
logger.exception("{} Failed to parse system job".format(system_job.log_format))
|
||||
return args
|
||||
|
||||
def build_env(self, instance, **kwargs):
|
||||
@ -2436,7 +2437,7 @@ def _reconstruct_relationships(copy_mapping):
|
||||
setattr(new_obj, field_name, related_obj)
|
||||
elif field.many_to_many:
|
||||
for related_obj in getattr(old_obj, field_name).all():
|
||||
logger.debug(six.text_type('Deep copy: Adding {} to {}({}).{} relationship').format(
|
||||
logger.debug('Deep copy: Adding {} to {}({}).{} relationship'.format(
|
||||
related_obj, new_obj, model, field_name
|
||||
))
|
||||
getattr(new_obj, field_name).add(copy_mapping.get(related_obj, related_obj))
|
||||
@ -2448,7 +2449,7 @@ def deep_copy_model_obj(
|
||||
model_module, model_name, obj_pk, new_obj_pk,
|
||||
user_pk, sub_obj_list, permission_check_func=None
|
||||
):
|
||||
logger.info(six.text_type('Deep copy {} from {} to {}.').format(model_name, obj_pk, new_obj_pk))
|
||||
logger.info('Deep copy {} from {} to {}.'.format(model_name, obj_pk, new_obj_pk))
|
||||
from awx.api.generics import CopyAPIView
|
||||
from awx.main.signals import disable_activity_stream
|
||||
model = getattr(importlib.import_module(model_module), model_name, None)
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
import six
|
||||
|
||||
from awx.main.models import (
|
||||
Organization,
|
||||
@ -150,7 +149,7 @@ def create_survey_spec(variables=None, default_type='integer', required=True, mi
|
||||
vars_list = variables
|
||||
else:
|
||||
vars_list = [variables]
|
||||
if isinstance(variables[0], six.string_types):
|
||||
if isinstance(variables[0], str):
|
||||
slogan = variables[0]
|
||||
else:
|
||||
slogan = variables[0].get('question_name', 'something')
|
||||
|
||||
@ -2,7 +2,6 @@ from unittest import mock
|
||||
import pytest
|
||||
import json
|
||||
|
||||
import six
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import timestamp_apiformat
|
||||
@ -107,7 +106,7 @@ def test_content(hosts, fact_scans, get, user, fact_ansible_json, monkeypatch_js
|
||||
|
||||
assert fact_known.host_id == response.data['host']
|
||||
# TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug
|
||||
assert fact_ansible_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], six.text_type) else response.data['facts'])
|
||||
assert fact_ansible_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], str) else response.data['facts'])
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert fact_known.module == response.data['module']
|
||||
|
||||
@ -119,7 +118,7 @@ def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name)
|
||||
(fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params)
|
||||
|
||||
# TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug
|
||||
assert fact_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], six.text_type) else response.data['facts'])
|
||||
assert fact_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], str) else response.data['facts'])
|
||||
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
|
||||
assert module_name == response.data['module']
|
||||
|
||||
|
||||
@ -17,7 +17,6 @@ from awx.api.versioning import reverse
|
||||
from awx.conf.models import Setting
|
||||
from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException
|
||||
|
||||
import six
|
||||
|
||||
TEST_GIF_LOGO = 'data:image/gif;base64,R0lGODlhIQAjAPIAAP//////AP8AAMzMAJmZADNmAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQJCgAHACwAAAAAIQAjAAADo3i63P4wykmrvTjrzZsxXfR94WMQBFh6RECuixHMLyzPQ13ewZCvow9OpzEAjIBj79cJJmU+FceIVEZ3QRozxBttmyOBwPBtisdX4Bha3oxmS+llFIPHQXQKkiSEXz9PeklHBzx3hYNyEHt4fmmAhHp8Nz45KgV5FgWFOFEGmwWbGqEfniChohmoQZ+oqRiZDZhEgk81I4mwg4EKVbxzrDHBEAkAIfkECQoABwAsAAAAACEAIwAAA6V4utz+MMpJq724GpP15p1kEAQYQmOwnWjgrmxjuMEAx8rsDjZ+fJvdLWQAFAHGWo8FRM54JqIRmYTigDrDMqZTbbbMj0CgjTLHZKvPQH6CTx+a2vKR0XbbOsoZ7SphG057gjl+c0dGgzeGNiaBiSgbBQUHBV08NpOVlkMSk0FKjZuURHiiOJxQnSGfQJuoEKREejK0dFRGjoiQt7iOuLx0rgxYEQkAIfkECQoABwAsAAAAACEAIwAAA7h4utxnxslJDSGR6nrz/owxYB64QUEwlGaVqlB7vrAJscsd3Lhy+wBArGEICo3DUFH4QDqK0GMy51xOgcGlEAfJ+iAFie62chR+jYKaSAuQGOqwJp7jGQRDuol+F/jxZWsyCmoQfwYwgoM5Oyg1i2w0A2WQIW2TPYOIkleQmy+UlYygoaIPnJmapKmqKiusMmSdpjxypnALtrcHioq3ury7hGm3dnVosVpMWFmwREZbddDOSsjVswcJACH5BAkKAAcALAAAAAAhACMAAAOxeLrc/jDKSZUxNS9DCNYV54HURQwfGRlDEFwqdLVuGjOsW9/Odb0wnsUAKBKNwsMFQGwyNUHckVl8bqI4o43lA26PNkv1S9DtNuOeVirw+aTI3qWAQwnud1vhLSnQLS0GeFF+GoVKNF0fh4Z+LDQ6Bn5/MTNmL0mAl2E3j2aclTmRmYCQoKEDiaRDKFhJez6UmbKyQowHtzy1uEl8DLCnEktrQ2PBD1NxSlXKIW5hz6cJACH5BAkKAAcALAAAAAAhACMAAAOkeLrc/jDKSau9OOvNlTFd9H3hYxAEWDJfkK5LGwTq+g0zDR/GgM+10A04Cm56OANgqTRmkDTmSOiLMgFOTM9AnFJHuexzYBAIijZf2SweJ8ttbbXLmd5+wBiJosSCoGF/fXEeS1g8gHl9hxODKkh4gkwVIwUekESIhA4FlgV3PyCWG52WI2oGnR2lnUWpqhqVEF4Xi7QjhpsshpOFvLosrnpoEAkAIfkECQoABwAsAAAAACEAIwAAA6l4utz+MMpJq71YGpPr3t1kEAQXQltQnk8aBCa7bMMLy4wx1G8s072PL6SrGQDI4zBThCU/v50zCVhidIYgNPqxWZkDg0AgxB2K4vEXbBSvr1JtZ3uOext0x7FqovF6OXtfe1UzdjAxhINPM013ChtJER8FBQeVRX8GlpggFZWWfjwblTiigGZnfqRmpUKbljKxDrNMeY2eF4R8jUiSur6/Z8GFV2WBtwwJACH5BAkKAAcALAAAAAAhACMAAAO6eLrcZi3KyQwhkGpq8f6ONWQgaAxB8JTfg6YkO50pzD5xhaurhCsGAKCnEw6NucNDCAkyI8ugdAhFKpnJJdMaeiofBejowUseCr9GYa0j1GyMdVgjBxoEuPSZXWKf7gKBeHtzMms0gHgGfDIVLztmjScvNZEyk28qjT40b5aXlHCbDgOhnzedoqOOlKeopaqrCy56sgtotbYKhYW6e7e9tsHBssO6eSTIm1peV0iuFUZDyU7NJnmcuQsJACH5BAkKAAcALAAAAAAhACMAAAOteLrc/jDKSZsxNS9DCNYV54Hh4H0kdAXBgKaOwbYX/Miza1vrVe8KA2AoJL5gwiQgeZz4GMXlcHl8xozQ3kW3KTajL9zsBJ1+sV2fQfALem+XAlRApxu4ioI1UpC76zJ4fRqDBzI+LFyFhH1iiS59fkgziW07jjRAG5QDeECOLk2Tj6KjnZafW6hAej6Smgevr6yysza2tiCuMasUF2Yov2gZUUQbU8YaaqjLpQkAOw==' # NOQA
|
||||
TEST_PNG_LOGO = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACEAAAAjCAYAAAAaLGNkAAAAAXNSR0IB2cksfwAAAdVpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpDb21wcmVzc2lvbj4xPC90aWZmOkNvbXByZXNzaW9uPgogICAgICAgICA8dGlmZjpQaG90b21ldHJpY0ludGVycHJldGF0aW9uPjI8L3RpZmY6UGhvdG9tZXRyaWNJbnRlcnByZXRhdGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+Cjl0tmoAAAHVSURBVFgJ7VZRsoMgDNTOu5E9U+/Ud6Z6JssGNg2oNKD90xkHCNnNkgTbYbieKwNXBn6bgSXQ4+16xi5UDiqDN3Pecr6+1fM5DHh7n1NEIPjjoRLKzOjG3qQ5dRtEy2LCjh/Gz2wDZE2nZYKkrxdn/kY9XQQkGCGqqDY5IgJFkEKgBCzDNGXhTKEye7boFRH6IPJj5EshiNCSjV4R4eSx7zhmR2tcdIuwmWiMeao7e0JHViZEWUI5aP8a9O+rx74D6sGEiJftiX3YeueIiFXg2KrhpqzjVC3dPZFYJZ7NOwwtNwM8R0UkLfH0sT5qck+OlkMq0BucKr0iWG7gpAQksD9esM1z3Lnf6SHjLh67nnKEGxC/iomWhByTeXOQJGHHcKxwHhHKnt1HIdYtmexkIb/HOURWTSJqn2gKMDG0bDUc/D0iAseovxUBoylmQCug6IVhSv+4DIeKI94jAr4AjiSEgQ25JYB+YWT9BZ94AM8erwgFkRifaArA6U0G5KT0m//z26REZuK9okgrT6VwE1jTHjbVzyNAyRwTEPOtuiex9FVBNZCkruaA4PZqFp1u8Rpww9/6rcK5y0EkAxRiZJt79PWOVYWGRE9pbJhavMengMflGyumk0akMsQnAAAAAElFTkSuQmCC' # NOQA
|
||||
@ -78,7 +77,7 @@ def test_awx_task_env_validity(get, patch, admin, value, expected):
|
||||
|
||||
if expected == 200:
|
||||
resp = get(url, user=admin)
|
||||
assert resp.data['AWX_TASK_ENV'] == dict((k, six.text_type(v)) for k, v in value.items())
|
||||
assert resp.data['AWX_TASK_ENV'] == dict((k, str(v)) for k, v in value.items())
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@ -3,15 +3,14 @@ import pytest
|
||||
from unittest import mock
|
||||
import json
|
||||
import os
|
||||
import six
|
||||
import tempfile
|
||||
import shutil
|
||||
import urllib.parse
|
||||
from datetime import timedelta
|
||||
from unittest.mock import PropertyMock
|
||||
|
||||
# Django
|
||||
from django.core.urlresolvers import resolve
|
||||
from django.utils.six.moves.urllib.parse import urlparse
|
||||
from django.utils import timezone
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
@ -523,7 +522,7 @@ def _request(verb):
|
||||
if 'format' not in kwargs and 'content_type' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
|
||||
request = getattr(APIRequestFactory(), verb)(url, **kwargs)
|
||||
if isinstance(kwargs.get('cookies', None), dict):
|
||||
for key, value in kwargs['cookies'].items():
|
||||
@ -730,7 +729,7 @@ def get_db_prep_save(self, value, connection, **kwargs):
|
||||
return None
|
||||
# default values come in as strings; only non-strings should be
|
||||
# run through `dumps`
|
||||
if not isinstance(value, six.string_types):
|
||||
if not isinstance(value, str):
|
||||
value = dumps(value)
|
||||
|
||||
return value
|
||||
|
||||
@ -2,7 +2,6 @@
|
||||
|
||||
import pytest
|
||||
from unittest import mock
|
||||
import six
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
@ -249,7 +248,7 @@ def test_inventory_update_name(inventory, inventory_source):
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_name_with_unicode(inventory, inventory_source):
|
||||
inventory.name = six.u('オオオ')
|
||||
inventory.name = 'オオオ'
|
||||
inventory.save()
|
||||
iu = inventory_source.update()
|
||||
assert iu.name.startswith(inventory.name)
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from awx.main.models import JobTemplate, Job, JobHostSummary, WorkflowJob
|
||||
|
||||
@ -71,12 +70,12 @@ def test_job_host_summary_representation(host):
|
||||
host=host, job=job,
|
||||
changed=1, dark=2, failures=3, ok=4, processed=5, skipped=6
|
||||
)
|
||||
assert 'single-host changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == six.text_type(jhs)
|
||||
assert 'single-host changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == str(jhs)
|
||||
|
||||
# Representation should be robust to deleted related items
|
||||
jhs = JobHostSummary.objects.get(pk=jhs.id)
|
||||
host.delete()
|
||||
assert 'N/A changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == six.text_type(jhs)
|
||||
assert 'N/A changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == str(jhs)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
import itertools
|
||||
import pytest
|
||||
import six
|
||||
|
||||
# CRUM
|
||||
from crum import impersonate
|
||||
@ -74,7 +73,7 @@ class TestCreateUnifiedJob:
|
||||
new_creds = []
|
||||
for cred in jt_linked.credentials.all():
|
||||
new_creds.append(Credential.objects.create(
|
||||
name=six.text_type(cred.name) + six.text_type('_new'),
|
||||
name=str(cred.name) + '_new',
|
||||
credential_type=cred.credential_type,
|
||||
inputs=cred.inputs
|
||||
))
|
||||
|
||||
@ -19,7 +19,6 @@ from django.utils.encoding import smart_str, smart_bytes
|
||||
from awx.main.expect import run, isolated_manager
|
||||
|
||||
from django.conf import settings
|
||||
import six
|
||||
|
||||
HERE, FILENAME = os.path.split(__file__)
|
||||
|
||||
@ -107,7 +106,7 @@ def test_cancel_callback_error():
|
||||
|
||||
|
||||
@pytest.mark.timeout(3) # https://github.com/ansible/tower/issues/2391#issuecomment-401946895
|
||||
@pytest.mark.parametrize('value', ['abc123', six.u('Iñtërnâtiônàlizætiøn')])
|
||||
@pytest.mark.parametrize('value', ['abc123', 'Iñtërnâtiônàlizætiøn'])
|
||||
def test_env_vars(value):
|
||||
stdout = StringIO()
|
||||
status, rc = run.run_pexpect(
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
import six
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from rest_framework.serializers import ValidationError as DRFValidationError
|
||||
@ -152,8 +151,7 @@ def test_cred_type_injectors_schema(injectors, valid):
|
||||
)
|
||||
field = CredentialType._meta.get_field('injectors')
|
||||
if valid is False:
|
||||
with pytest.raises(ValidationError, message=six.text_type(
|
||||
"Injector was supposed to throw a validation error, data: {}").format(injectors)):
|
||||
with pytest.raises(ValidationError, message="Injector was supposed to throw a validation error, data: {}".format(injectors)):
|
||||
field.clean(injectors, type_)
|
||||
else:
|
||||
field.clean(injectors, type_)
|
||||
|
||||
@ -14,7 +14,6 @@ from backports.tempfile import TemporaryDirectory
|
||||
import fcntl
|
||||
from unittest import mock
|
||||
import pytest
|
||||
import six
|
||||
import yaml
|
||||
|
||||
from django.conf import settings
|
||||
@ -1562,7 +1561,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_custom_environment_injectors_with_unicode_content(self):
|
||||
value = six.u('Iñtërnâtiônàlizætiøn')
|
||||
value = 'Iñtërnâtiônàlizætiøn'
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
|
||||
@ -9,7 +9,6 @@ from awx.main.utils.filters import SmartFilter, ExternalLoggerEnabled
|
||||
# Django
|
||||
from django.db.models import Q
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@pytest.mark.parametrize('params, logger_name, expected', [
|
||||
@ -111,7 +110,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_query_generated(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
assert str(q) == str(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string", [
|
||||
'ansible_facts__facts__facts__blank='
|
||||
@ -138,7 +137,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_unicode(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
assert str(q) == str(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
('(a=b)', Q(**{u"a": u"b"})),
|
||||
@ -154,7 +153,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_boolean_parenthesis(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
assert str(q) == str(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
('ansible_facts__a__b__c[]=3', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [3]}}}})),
|
||||
@ -177,7 +176,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_contains_query_generated(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
assert str(q) == str(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
#('a__b__c[]="true"', Q(**{u"a__b__c__contains": u"\"true\""})),
|
||||
@ -187,7 +186,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_contains_query_generated_unicode(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
assert str(q) == str(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
('ansible_facts__a=null', Q(**{u"ansible_facts__contains": {u"a": None}})),
|
||||
@ -195,7 +194,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_contains_query_generated_null(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
assert str(q) == str(q_expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
@ -213,7 +212,7 @@ class TestSmartFilterQueryFromString():
|
||||
])
|
||||
def test_search_related_fields(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
assert str(q) == str(q_expected)
|
||||
|
||||
|
||||
'''
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
import six
|
||||
|
||||
from awx.main.models import Job, JobEvent
|
||||
|
||||
@ -15,7 +14,7 @@ def test_log_from_job_event_object():
|
||||
|
||||
# Check entire body of data for any exceptions from getattr on event object
|
||||
for fd in data_for_log:
|
||||
if not isinstance(data_for_log[fd], six.string_types):
|
||||
if not isinstance(data_for_log[fd], str):
|
||||
continue
|
||||
assert 'Exception' not in data_for_log[fd], 'Exception delivered in data: {}'.format(data_for_log[fd])
|
||||
|
||||
|
||||
@ -14,7 +14,6 @@ import urllib.parse
|
||||
import threading
|
||||
import contextlib
|
||||
import tempfile
|
||||
import six
|
||||
import psutil
|
||||
from functools import reduce, wraps
|
||||
from io import StringIO
|
||||
@ -82,7 +81,7 @@ def get_object_or_403(klass, *args, **kwargs):
|
||||
|
||||
|
||||
def to_python_boolean(value, allow_none=False):
|
||||
value = six.text_type(value)
|
||||
value = str(value)
|
||||
if value.lower() in ('true', '1', 't'):
|
||||
return True
|
||||
elif value.lower() in ('false', '0', 'f'):
|
||||
@ -90,7 +89,7 @@ def to_python_boolean(value, allow_none=False):
|
||||
elif allow_none and value.lower() in ('none', 'null'):
|
||||
return None
|
||||
else:
|
||||
raise ValueError(_(u'Unable to convert "%s" to boolean') % six.text_type(value))
|
||||
raise ValueError(_(u'Unable to convert "%s" to boolean') % value)
|
||||
|
||||
|
||||
def region_sorting(region):
|
||||
@ -339,7 +338,7 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
netloc = u''
|
||||
netloc = u'@'.join(filter(None, [netloc, parts.hostname]))
|
||||
if parts.port:
|
||||
netloc = u':'.join([netloc, six.text_type(parts.port)])
|
||||
netloc = u':'.join([netloc, str(parts.port)])
|
||||
new_url = urllib.parse.urlunsplit([parts.scheme, netloc, parts.path,
|
||||
parts.query, parts.fragment])
|
||||
if scp_format and parts.scheme == 'git+ssh':
|
||||
@ -376,7 +375,7 @@ def _convert_model_field_for_display(obj, field_name, password_fields=None):
|
||||
if password_fields is None:
|
||||
password_fields = set(getattr(type(obj), 'PASSWORD_FIELDS', [])) | set(['password'])
|
||||
if field_name in password_fields or (
|
||||
isinstance(field_val, six.string_types) and
|
||||
isinstance(field_val, str) and
|
||||
field_val.startswith('$encrypted$')
|
||||
):
|
||||
return u'hidden'
|
||||
@ -623,7 +622,7 @@ def parse_yaml_or_json(vars_str, silent_failure=True):
|
||||
'''
|
||||
if isinstance(vars_str, dict):
|
||||
return vars_str
|
||||
elif isinstance(vars_str, six.string_types) and vars_str == '""':
|
||||
elif isinstance(vars_str, str) and vars_str == '""':
|
||||
return {}
|
||||
|
||||
try:
|
||||
|
||||
@ -3,7 +3,6 @@ import hashlib
|
||||
import logging
|
||||
from collections import namedtuple
|
||||
|
||||
import six
|
||||
from cryptography.fernet import Fernet, InvalidToken
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from django.utils.encoding import smart_str, smart_bytes
|
||||
@ -144,6 +143,6 @@ def encrypt_dict(data, fields):
|
||||
|
||||
|
||||
def is_encrypted(value):
|
||||
if not isinstance(value, six.string_types):
|
||||
if not isinstance(value, str):
|
||||
return False
|
||||
return value.startswith('$encrypted$') and len(value) > len('$encrypted$')
|
||||
|
||||
@ -10,7 +10,6 @@ from pyparsing import (
|
||||
)
|
||||
from logging import Filter, _nameToLevel
|
||||
|
||||
import six
|
||||
|
||||
from django.apps import apps
|
||||
from django.db import models
|
||||
@ -154,12 +153,12 @@ class SmartFilter(object):
|
||||
self.result = Host.objects.filter(**kwargs)
|
||||
|
||||
def strip_quotes_traditional_logic(self, v):
|
||||
if type(v) is six.text_type and v.startswith('"') and v.endswith('"'):
|
||||
if type(v) is str and v.startswith('"') and v.endswith('"'):
|
||||
return v[1:-1]
|
||||
return v
|
||||
|
||||
def strip_quotes_json_logic(self, v):
|
||||
if type(v) is six.text_type and v.startswith('"') and v.endswith('"') and v != u'"null"':
|
||||
if type(v) is str and v.startswith('"') and v.endswith('"') and v != u'"null"':
|
||||
return v[1:-1]
|
||||
return v
|
||||
|
||||
@ -238,7 +237,7 @@ class SmartFilter(object):
|
||||
# value
|
||||
# ="something"
|
||||
if t_len > (v_offset + 2) and t[v_offset] == "\"" and t[v_offset + 2] == "\"":
|
||||
v = u'"' + six.text_type(t[v_offset + 1]) + u'"'
|
||||
v = u'"' + str(t[v_offset + 1]) + u'"'
|
||||
#v = t[v_offset + 1]
|
||||
# empty ""
|
||||
elif t_len > (v_offset + 1):
|
||||
@ -307,9 +306,9 @@ class SmartFilter(object):
|
||||
* handle key with __ in it
|
||||
'''
|
||||
filter_string_raw = filter_string
|
||||
filter_string = six.text_type(filter_string)
|
||||
filter_string = str(filter_string)
|
||||
|
||||
unicode_spaces = list(set(six.text_type(c) for c in filter_string if c.isspace()))
|
||||
unicode_spaces = list(set(str(c) for c in filter_string if c.isspace()))
|
||||
unicode_spaces_other = unicode_spaces + [u'(', u')', u'=', u'"']
|
||||
atom = CharsNotIn(unicode_spaces_other)
|
||||
atom_inside_quotes = CharsNotIn(u'"')
|
||||
|
||||
@ -7,7 +7,6 @@ import json
|
||||
import time
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
@ -40,7 +39,7 @@ class LogstashFormatter(LogstashFormatterVersion1):
|
||||
data = copy(raw_data['ansible_facts'])
|
||||
else:
|
||||
data = copy(raw_data)
|
||||
if isinstance(data, six.string_types):
|
||||
if isinstance(data, str):
|
||||
data = json.loads(data)
|
||||
data_for_log = {}
|
||||
|
||||
|
||||
@ -8,7 +8,6 @@ import requests
|
||||
import time
|
||||
import socket
|
||||
import select
|
||||
import six
|
||||
from urllib import parse as urlparse
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from requests.exceptions import RequestException
|
||||
@ -211,7 +210,7 @@ def _encode_payload_for_socket(payload):
|
||||
encoded_payload = payload
|
||||
if isinstance(encoded_payload, dict):
|
||||
encoded_payload = json.dumps(encoded_payload, ensure_ascii=False)
|
||||
if isinstance(encoded_payload, six.text_type):
|
||||
if isinstance(encoded_payload, str):
|
||||
encoded_payload = encoded_payload.encode('utf-8')
|
||||
return encoded_payload
|
||||
|
||||
@ -237,7 +236,7 @@ class TCPHandler(BaseHandler):
|
||||
except Exception as e:
|
||||
ret = SocketResult(False, "Error sending message from %s: %s" %
|
||||
(TCPHandler.__name__,
|
||||
' '.join(six.text_type(arg) for arg in e.args)))
|
||||
' '.join(str(arg) for arg in e.args)))
|
||||
logger.exception(ret.reason)
|
||||
finally:
|
||||
sok.close()
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
# Python
|
||||
import six
|
||||
import urllib.parse
|
||||
from collections import deque
|
||||
# Django
|
||||
from django.db import models
|
||||
@ -12,7 +12,7 @@ NAMED_URL_RES_INNER_DILIMITER = "+"
|
||||
NAMED_URL_RES_DILIMITER_ENCODE = "%2B"
|
||||
URL_PATH_RESERVED_CHARSET = {}
|
||||
for c in ';/?:@=&[]':
|
||||
URL_PATH_RESERVED_CHARSET[c] = six.moves.urllib.parse.quote(c, safe='')
|
||||
URL_PATH_RESERVED_CHARSET[c] = urllib.parse.quote(c, safe='')
|
||||
FK_NAME = 0
|
||||
NEXT_NODE = 1
|
||||
|
||||
@ -126,7 +126,7 @@ class GraphNode(object):
|
||||
for attr_name, attr_value in zip(stack[-1].fields, named_url_parts):
|
||||
attr_name = ("__%s" % attr_name) if evolving_prefix else attr_name
|
||||
if isinstance(attr_value, str):
|
||||
attr_value = six.moves.urllib.parse.unquote(attr_value)
|
||||
attr_value = urllib.parse.unquote(attr_value)
|
||||
kwargs[evolving_prefix + attr_name] = attr_value
|
||||
idx += 1
|
||||
if stack[-1].counter >= len(stack[-1].adj_list):
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import re
|
||||
import six
|
||||
import yaml
|
||||
|
||||
|
||||
@ -9,7 +8,7 @@ __all__ = ['safe_dump', 'SafeLoader']
|
||||
class SafeStringDumper(yaml.SafeDumper):
|
||||
|
||||
def represent_data(self, value):
|
||||
if isinstance(value, six.string_types):
|
||||
if isinstance(value, str):
|
||||
return self.represent_scalar('!unsafe', value)
|
||||
return super(SafeStringDumper, self).represent_data(value)
|
||||
|
||||
@ -17,7 +16,7 @@ class SafeStringDumper(yaml.SafeDumper):
|
||||
class SafeLoader(yaml.Loader):
|
||||
|
||||
def construct_yaml_unsafe(self, node):
|
||||
class UnsafeText(six.text_type):
|
||||
class UnsafeText(str):
|
||||
__UNSAFE__ = True
|
||||
node = UnsafeText(self.construct_scalar(node))
|
||||
return node
|
||||
@ -75,7 +74,7 @@ def sanitize_jinja(arg):
|
||||
"""
|
||||
For some string, prevent usage of Jinja-like flags
|
||||
"""
|
||||
if isinstance(arg, six.string_types):
|
||||
if isinstance(arg, str):
|
||||
# If the argument looks like it contains Jinja expressions
|
||||
# {{ x }} ...
|
||||
if re.search(r'\{\{[^}]+}}', arg) is not None:
|
||||
|
||||
@ -4,7 +4,6 @@
|
||||
import os
|
||||
import re # noqa
|
||||
import sys
|
||||
import six
|
||||
from datetime import timedelta
|
||||
|
||||
# global settings
|
||||
@ -499,11 +498,11 @@ CELERYBEAT_SCHEDULE = {
|
||||
AWX_INCONSISTENT_TASK_INTERVAL = 60 * 3
|
||||
|
||||
AWX_CELERY_QUEUES_STATIC = [
|
||||
six.text_type(CELERY_DEFAULT_QUEUE),
|
||||
CELERY_DEFAULT_QUEUE,
|
||||
]
|
||||
|
||||
AWX_CELERY_BCAST_QUEUES_STATIC = [
|
||||
six.text_type('tower_broadcast_all'),
|
||||
'tower_broadcast_all',
|
||||
]
|
||||
|
||||
ASGI_AMQP = {
|
||||
|
||||
@ -6,7 +6,6 @@ import logging
|
||||
import uuid
|
||||
|
||||
import ldap
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.dispatch import receiver
|
||||
@ -258,7 +257,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
|
||||
|
||||
def get_user_permanent_id(self, attributes):
|
||||
uid = attributes[self.conf.get('attr_user_permanent_id', OID_USERID)]
|
||||
if isinstance(uid, six.string_types):
|
||||
if isinstance(uid, str):
|
||||
return uid
|
||||
return uid[0]
|
||||
|
||||
@ -277,7 +276,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
|
||||
logger.warn("Could not map user detail '%s' from SAML attribute '%s'; "
|
||||
"update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
|
||||
conf_key[5:], key, self.name, conf_key)
|
||||
return six.text_type(value) if value is not None else value
|
||||
return str(value) if value is not None else value
|
||||
|
||||
|
||||
class SAMLAuth(BaseSAMLAuth):
|
||||
@ -330,10 +329,10 @@ def _update_m2m_from_groups(user, ldap_user, rel, opts, remove=True):
|
||||
elif opts is True:
|
||||
should_add = True
|
||||
else:
|
||||
if isinstance(opts, six.string_types):
|
||||
if isinstance(opts, str):
|
||||
opts = [opts]
|
||||
for group_dn in opts:
|
||||
if not isinstance(group_dn, six.string_types):
|
||||
if not isinstance(group_dn, str):
|
||||
continue
|
||||
if ldap_user._get_groups().is_member_of(group_dn):
|
||||
should_add = True
|
||||
@ -366,9 +365,9 @@ def on_populate_user(sender, **kwargs):
|
||||
field_len = len(getattr(user, field))
|
||||
if field_len > max_len:
|
||||
setattr(user, field, getattr(user, field)[:max_len])
|
||||
logger.warn(six.text_type(
|
||||
'LDAP user {} has {} > max {} characters'
|
||||
).format(user.username, field, max_len))
|
||||
logger.warn(
|
||||
'LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)
|
||||
)
|
||||
|
||||
# Update organization membership based on group memberships.
|
||||
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
|
||||
|
||||
@ -5,7 +5,6 @@
|
||||
import urllib.parse
|
||||
|
||||
# Six
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -83,7 +82,7 @@ class SocialAuthMiddleware(SocialAuthExceptionMiddleware):
|
||||
return redirect(url)
|
||||
|
||||
def get_message(self, request, exception):
|
||||
msg = six.text_type(exception)
|
||||
msg = str(exception)
|
||||
if msg and msg[-1] not in '.?!':
|
||||
msg = msg + '.'
|
||||
return msg
|
||||
|
||||
@ -5,7 +5,6 @@
|
||||
import re
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
# Python Social Auth
|
||||
from social_core.exceptions import AuthException
|
||||
@ -67,10 +66,10 @@ def _update_m2m_from_expression(user, rel, expr, remove=True):
|
||||
elif expr is True:
|
||||
should_add = True
|
||||
else:
|
||||
if isinstance(expr, (six.string_types, type(re.compile('')))):
|
||||
if isinstance(expr, (str, type(re.compile('')))):
|
||||
expr = [expr]
|
||||
for ex in expr:
|
||||
if isinstance(ex, six.string_types):
|
||||
if isinstance(ex, str):
|
||||
if user.username == ex or user.email == ex:
|
||||
should_add = True
|
||||
elif isinstance(ex, type(re.compile(''))):
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
import argparse
|
||||
import six
|
||||
import urllib.parse
|
||||
|
||||
import requests
|
||||
|
||||
@ -10,7 +10,7 @@ NAMED_URL_RES_INNER_DILIMITER = "-"
|
||||
NAMED_URL_RES_DILIMITER_ENCODE = "%2D"
|
||||
URL_PATH_RESERVED_CHARSET = {}
|
||||
for c in ';/?:@=&[]':
|
||||
URL_PATH_RESERVED_CHARSET[c] = six.moves.urllib.parse.quote(c, safe='')
|
||||
URL_PATH_RESERVED_CHARSET[c] = urllib.parse.quote(c, safe='')
|
||||
|
||||
|
||||
def _get_named_url_graph(url, auth):
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user