clean up unnecessary usage of the six library (awx only supports py3)

This commit is contained in:
Ryan Petrello
2019-01-24 17:57:08 -05:00
parent 68950d56ca
commit daeeaf413a
58 changed files with 238 additions and 311 deletions

View File

@@ -91,16 +91,6 @@ def prepare_env():
# Monkeypatch Django find_commands to also work with .pyc files. # Monkeypatch Django find_commands to also work with .pyc files.
import django.core.management import django.core.management
django.core.management.find_commands = find_commands django.core.management.find_commands = find_commands
# Fixup sys.modules reference to django.utils.six to allow jsonfield to
# work when using Django 1.4.
import django.utils
try:
import django.utils.six
except ImportError: # pragma: no cover
import six
sys.modules['django.utils.six'] = sys.modules['six']
django.utils.six = sys.modules['django.utils.six']
from django.utils import six # noqa
# Use the AWX_TEST_DATABASE_* environment variables to specify the test # Use the AWX_TEST_DATABASE_* environment variables to specify the test
# database settings to use when management command is run as an external # database settings to use when management command is run as an external
# program via unit tests. # program via unit tests.

View File

@@ -5,7 +5,6 @@
import inspect import inspect
import logging import logging
import time import time
import six
import urllib.parse import urllib.parse
# Django # Django
@@ -851,14 +850,14 @@ class CopyAPIView(GenericAPIView):
return field_val return field_val
if isinstance(field_val, dict): if isinstance(field_val, dict):
for sub_field in field_val: for sub_field in field_val:
if isinstance(sub_field, six.string_types) \ if isinstance(sub_field, str) \
and isinstance(field_val[sub_field], six.string_types): and isinstance(field_val[sub_field], str):
try: try:
field_val[sub_field] = decrypt_field(obj, field_name, sub_field) field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
except AttributeError: except AttributeError:
# Catching the corner case with v1 credential fields # Catching the corner case with v1 credential fields
field_val[sub_field] = decrypt_field(obj, sub_field) field_val[sub_field] = decrypt_field(obj, sub_field)
elif isinstance(field_val, six.string_types): elif isinstance(field_val, str):
try: try:
field_val = decrypt_field(obj, field_name) field_val = decrypt_field(obj, field_name)
except AttributeError: except AttributeError:
@@ -916,7 +915,7 @@ class CopyAPIView(GenericAPIView):
obj, field.name, field_val obj, field.name, field_val
) )
new_obj = model.objects.create(**create_kwargs) new_obj = model.objects.create(**create_kwargs)
logger.debug(six.text_type('Deep copy: Created new object {}({})').format( logger.debug('Deep copy: Created new object {}({})'.format(
new_obj, model new_obj, model
)) ))
# Need to save separatedly because Djang-crum get_current_user would # Need to save separatedly because Djang-crum get_current_user would

View File

@@ -4,7 +4,6 @@ import json
# Django # Django
from django.conf import settings from django.conf import settings
from django.utils import six
from django.utils.encoding import smart_str from django.utils.encoding import smart_str
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
@@ -34,4 +33,4 @@ class JSONParser(parsers.JSONParser):
raise ParseError(_('JSON parse error - not a JSON object')) raise ParseError(_('JSON parse error - not a JSON object'))
return obj return obj
except ValueError as exc: except ValueError as exc:
raise ParseError(_('JSON parse error - %s\nPossible cause: trailing comma.' % six.text_type(exc))) raise ParseError(_('JSON parse error - %s\nPossible cause: trailing comma.' % str(exc)))

View File

@@ -5,8 +5,6 @@
from rest_framework import renderers from rest_framework import renderers
from rest_framework.request import override_method from rest_framework.request import override_method
import six
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer): class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
''' '''
@@ -71,8 +69,8 @@ class PlainTextRenderer(renderers.BaseRenderer):
format = 'txt' format = 'txt'
def render(self, data, media_type=None, renderer_context=None): def render(self, data, media_type=None, renderer_context=None):
if not isinstance(data, six.string_types): if not isinstance(data, str):
data = six.text_type(data) data = str(data)
return data.encode(self.charset) return data.encode(self.charset)

View File

@@ -7,7 +7,6 @@ import json
import logging import logging
import operator import operator
import re import re
import six
import urllib.parse import urllib.parse
from collections import OrderedDict from collections import OrderedDict
from datetime import timedelta from datetime import timedelta
@@ -1046,7 +1045,7 @@ class BaseOAuth2TokenSerializer(BaseSerializer):
return ret return ret
def _is_valid_scope(self, value): def _is_valid_scope(self, value):
if not value or (not isinstance(value, six.string_types)): if not value or (not isinstance(value, str)):
return False return False
words = value.split() words = value.split()
for word in words: for word in words:
@@ -2478,8 +2477,7 @@ class CredentialTypeSerializer(BaseSerializer):
# TODO: remove when API v1 is removed # TODO: remove when API v1 is removed
@six.add_metaclass(BaseSerializerMetaclass) class V1CredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
class V1CredentialFields(BaseSerializer):
class Meta: class Meta:
model = Credential model = Credential
@@ -2497,8 +2495,7 @@ class V1CredentialFields(BaseSerializer):
return super(V1CredentialFields, self).build_field(field_name, info, model_class, nested_depth) return super(V1CredentialFields, self).build_field(field_name, info, model_class, nested_depth)
@six.add_metaclass(BaseSerializerMetaclass) class V2CredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
class V2CredentialFields(BaseSerializer):
class Meta: class Meta:
model = Credential model = Credential
@@ -2786,8 +2783,7 @@ class LabelsListMixin(object):
# TODO: remove when API v1 is removed # TODO: remove when API v1 is removed
@six.add_metaclass(BaseSerializerMetaclass) class V1JobOptionsSerializer(BaseSerializer, metaclass=BaseSerializerMetaclass):
class V1JobOptionsSerializer(BaseSerializer):
class Meta: class Meta:
model = Credential model = Credential
@@ -2801,8 +2797,7 @@ class V1JobOptionsSerializer(BaseSerializer):
return super(V1JobOptionsSerializer, self).build_field(field_name, info, model_class, nested_depth) return super(V1JobOptionsSerializer, self).build_field(field_name, info, model_class, nested_depth)
@six.add_metaclass(BaseSerializerMetaclass) class LegacyCredentialFields(BaseSerializer, metaclass=BaseSerializerMetaclass):
class LegacyCredentialFields(BaseSerializer):
class Meta: class Meta:
model = Credential model = Credential
@@ -4387,7 +4382,7 @@ class JobLaunchSerializer(BaseSerializer):
errors.setdefault('credentials', []).append(_( errors.setdefault('credentials', []).append(_(
'Removing {} credential at launch time without replacement is not supported. ' 'Removing {} credential at launch time without replacement is not supported. '
'Provided list lacked credential(s): {}.' 'Provided list lacked credential(s): {}.'
).format(cred.unique_hash(display=True), ', '.join([six.text_type(c) for c in removed_creds]))) ).format(cred.unique_hash(display=True), ', '.join([str(c) for c in removed_creds])))
# verify that credentials (either provided or existing) don't # verify that credentials (either provided or existing) don't
# require launch-time passwords that have not been provided # require launch-time passwords that have not been provided
@@ -4725,8 +4720,8 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
raise serializers.ValidationError(_('Manual Project cannot have a schedule set.')) raise serializers.ValidationError(_('Manual Project cannot have a schedule set.'))
elif type(value) == InventorySource and value.source == 'scm' and value.update_on_project_update: elif type(value) == InventorySource and value.source == 'scm' and value.update_on_project_update:
raise serializers.ValidationError(_( raise serializers.ValidationError(_(
six.text_type('Inventory sources with `update_on_project_update` cannot be scheduled. ' 'Inventory sources with `update_on_project_update` cannot be scheduled. '
'Schedule its source project `{}` instead.').format(value.source_project.name))) 'Schedule its source project `{}` instead.'.format(value.source_project.name)))
return value return value
@@ -5064,6 +5059,6 @@ class FactSerializer(BaseFactSerializer):
ret = super(FactSerializer, self).to_representation(obj) ret = super(FactSerializer, self).to_representation(obj)
if obj is None: if obj is None:
return ret return ret
if 'facts' in ret and isinstance(ret['facts'], six.string_types): if 'facts' in ret and isinstance(ret['facts'], str):
ret['facts'] = json.loads(ret['facts']) ret['facts'] = json.loads(ret['facts'])
return ret return ret

View File

@@ -12,7 +12,6 @@ import requests
import functools import functools
from base64 import b64encode from base64 import b64encode
from collections import OrderedDict, Iterable from collections import OrderedDict, Iterable
import six
# Django # Django
@@ -1435,7 +1434,7 @@ class HostList(HostRelatedSearchMixin, ListCreateAPIView):
try: try:
return super(HostList, self).list(*args, **kwargs) return super(HostList, self).list(*args, **kwargs)
except Exception as e: except Exception as e:
return Response(dict(error=_(six.text_type(e))), status=status.HTTP_400_BAD_REQUEST) return Response(dict(error=_(str(e))), status=status.HTTP_400_BAD_REQUEST)
class HostDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUpdateDestroyAPIView): class HostDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
@@ -1878,7 +1877,7 @@ class InventoryScriptView(RetrieveAPIView):
show_all = bool(request.query_params.get('all', '')) show_all = bool(request.query_params.get('all', ''))
subset = request.query_params.get('subset', '') subset = request.query_params.get('subset', '')
if subset: if subset:
if not isinstance(subset, six.string_types): if not isinstance(subset, str):
raise ParseError(_('Inventory subset argument must be a string.')) raise ParseError(_('Inventory subset argument must be a string.'))
if subset.startswith('slice'): if subset.startswith('slice'):
slice_number, slice_count = Inventory.parse_slice_params(subset) slice_number, slice_count = Inventory.parse_slice_params(subset)
@@ -2416,11 +2415,11 @@ class JobTemplateSurveySpec(GenericAPIView):
serializer_class = EmptySerializer serializer_class = EmptySerializer
ALLOWED_TYPES = { ALLOWED_TYPES = {
'text': six.string_types, 'text': str,
'textarea': six.string_types, 'textarea': str,
'password': six.string_types, 'password': str,
'multiplechoice': six.string_types, 'multiplechoice': str,
'multiselect': six.string_types, 'multiselect': str,
'integer': int, 'integer': int,
'float': float 'float': float
} }
@@ -2455,8 +2454,8 @@ class JobTemplateSurveySpec(GenericAPIView):
def _validate_spec_data(new_spec, old_spec): def _validate_spec_data(new_spec, old_spec):
schema_errors = {} schema_errors = {}
for field, expect_type, type_label in [ for field, expect_type, type_label in [
('name', six.string_types, 'string'), ('name', str, 'string'),
('description', six.string_types, 'string'), ('description', str, 'string'),
('spec', list, 'list of items')]: ('spec', list, 'list of items')]:
if field not in new_spec: if field not in new_spec:
schema_errors['error'] = _("Field '{}' is missing from survey spec.").format(field) schema_errors['error'] = _("Field '{}' is missing from survey spec.").format(field)
@@ -2474,7 +2473,7 @@ class JobTemplateSurveySpec(GenericAPIView):
old_spec_dict = JobTemplate.pivot_spec(old_spec) old_spec_dict = JobTemplate.pivot_spec(old_spec)
for idx, survey_item in enumerate(new_spec["spec"]): for idx, survey_item in enumerate(new_spec["spec"]):
context = dict( context = dict(
idx=six.text_type(idx), idx=str(idx),
survey_item=survey_item survey_item=survey_item
) )
# General element validation # General element validation
@@ -2486,7 +2485,7 @@ class JobTemplateSurveySpec(GenericAPIView):
field_name=field_name, **context field_name=field_name, **context
)), status=status.HTTP_400_BAD_REQUEST) )), status=status.HTTP_400_BAD_REQUEST)
val = survey_item[field_name] val = survey_item[field_name]
allow_types = six.string_types allow_types = str
type_label = 'string' type_label = 'string'
if field_name == 'required': if field_name == 'required':
allow_types = bool allow_types = bool
@@ -2534,7 +2533,7 @@ class JobTemplateSurveySpec(GenericAPIView):
))) )))
# Process encryption substitution # Process encryption substitution
if ("default" in survey_item and isinstance(survey_item['default'], six.string_types) and if ("default" in survey_item and isinstance(survey_item['default'], str) and
survey_item['default'].startswith('$encrypted$')): survey_item['default'].startswith('$encrypted$')):
# Submission expects the existence of encrypted DB value to replace given default # Submission expects the existence of encrypted DB value to replace given default
if qtype != "password": if qtype != "password":
@@ -2546,7 +2545,7 @@ class JobTemplateSurveySpec(GenericAPIView):
encryptedish_default_exists = False encryptedish_default_exists = False
if 'default' in old_element: if 'default' in old_element:
old_default = old_element['default'] old_default = old_element['default']
if isinstance(old_default, six.string_types): if isinstance(old_default, str):
if old_default.startswith('$encrypted$'): if old_default.startswith('$encrypted$'):
encryptedish_default_exists = True encryptedish_default_exists = True
elif old_default == "": # unencrypted blank string is allowed as DB value as special case elif old_default == "": # unencrypted blank string is allowed as DB value as special case
@@ -3075,8 +3074,8 @@ class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, CopyAPIView):
elif field_name in ['credentials']: elif field_name in ['credentials']:
for cred in item.all(): for cred in item.all():
if not user.can_access(cred.__class__, 'use', cred): if not user.can_access(cred.__class__, 'use', cred):
logger.debug(six.text_type( logger.debug(
'Deep copy: removing {} from relationship due to permissions').format(cred)) 'Deep copy: removing {} from relationship due to permissions'.format(cred))
item.remove(cred.pk) item.remove(cred.pk)
obj.save() obj.save()

View File

@@ -10,8 +10,6 @@ from django.utils.translation import ugettext_lazy as _
# Django REST Framework # Django REST Framework
from rest_framework.fields import * # noqa from rest_framework.fields import * # noqa
import six
logger = logging.getLogger('awx.conf.fields') logger = logging.getLogger('awx.conf.fields')
# Use DRF fields to convert/validate settings: # Use DRF fields to convert/validate settings:
@@ -139,7 +137,7 @@ class KeyValueField(DictField):
def to_internal_value(self, data): def to_internal_value(self, data):
ret = super(KeyValueField, self).to_internal_value(data) ret = super(KeyValueField, self).to_internal_value(data)
for value in data.values(): for value in data.values():
if not isinstance(value, six.string_types + six.integer_types + (float,)): if not isinstance(value, (str, int, float)):
if isinstance(value, OrderedDict): if isinstance(value, OrderedDict):
value = dict(value) value = dict(value)
self.fail('invalid_child', input=value) self.fail('invalid_child', input=value)

View File

@@ -1,7 +1,6 @@
import base64 import base64
import hashlib import hashlib
import six
from django.utils.encoding import smart_str from django.utils.encoding import smart_str
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
@@ -91,7 +90,7 @@ def encrypt_field(instance, field_name, ask=False, subfield=None, skip_utf8=Fals
if skip_utf8: if skip_utf8:
utf8 = False utf8 = False
else: else:
utf8 = type(value) == six.text_type utf8 = type(value) == str
value = smart_str(value) value = smart_str(value)
key = get_encryption_key(field_name, getattr(instance, 'pk', None)) key = get_encryption_key(field_name, getattr(instance, 'pk', None))
encryptor = Cipher(AES(key), ECB(), default_backend()).encryptor() encryptor = Cipher(AES(key), ECB(), default_backend()).encryptor()

View File

@@ -1,8 +1,6 @@
# Django REST Framework # Django REST Framework
from rest_framework import serializers from rest_framework import serializers
import six
# Tower # Tower
from awx.api.fields import VerbatimField from awx.api.fields import VerbatimField
from awx.api.serializers import BaseSerializer from awx.api.serializers import BaseSerializer
@@ -47,12 +45,12 @@ class SettingFieldMixin(object):
"""Mixin to use a registered setting field class for API display/validation.""" """Mixin to use a registered setting field class for API display/validation."""
def to_representation(self, obj): def to_representation(self, obj):
if getattr(self, 'encrypted', False) and isinstance(obj, six.string_types) and obj: if getattr(self, 'encrypted', False) and isinstance(obj, str) and obj:
return '$encrypted$' return '$encrypted$'
return obj return obj
def to_internal_value(self, value): def to_internal_value(self, value):
if getattr(self, 'encrypted', False) and isinstance(value, six.string_types) and value.startswith('$encrypted$'): if getattr(self, 'encrypted', False) and isinstance(value, str) and value.startswith('$encrypted$'):
raise serializers.SkipField() raise serializers.SkipField()
obj = super(SettingFieldMixin, self).to_internal_value(value) obj = super(SettingFieldMixin, self).to_internal_value(value)
return super(SettingFieldMixin, self).to_representation(obj) return super(SettingFieldMixin, self).to_representation(obj)

View File

@@ -1,7 +1,8 @@
import urllib.parse
import pytest import pytest
from django.core.urlresolvers import resolve from django.core.urlresolvers import resolve
from django.utils.six.moves.urllib.parse import urlparse
from django.contrib.auth.models import User from django.contrib.auth.models import User
from rest_framework.test import ( from rest_framework.test import (
@@ -33,7 +34,7 @@ def admin():
@pytest.fixture @pytest.fixture
def api_request(admin): def api_request(admin):
def rf(verb, url, data=None, user=admin): def rf(verb, url, data=None, user=admin):
view, view_args, view_kwargs = resolve(urlparse(url)[2]) view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
request = getattr(APIRequestFactory(), verb)(url, data=data, format='json') request = getattr(APIRequestFactory(), verb)(url, data=data, format='json')
if user: if user:
force_authenticate(request, user=user) force_authenticate(request, user=user)

View File

@@ -13,7 +13,6 @@ from django.core.cache.backends.locmem import LocMemCache
from django.core.exceptions import ImproperlyConfigured from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
import pytest import pytest
import six
from awx.conf import models, fields from awx.conf import models, fields
from awx.conf.settings import SettingsWrapper, EncryptedCacheProxy, SETTING_CACHE_NOTSET from awx.conf.settings import SettingsWrapper, EncryptedCacheProxy, SETTING_CACHE_NOTSET
@@ -70,7 +69,7 @@ def test_cached_settings_unicode_is_auto_decoded(settings):
value = 'Iñtërnâtiônàlizætiøn' # this simulates what python-memcached does on cache.set() value = 'Iñtërnâtiônàlizætiøn' # this simulates what python-memcached does on cache.set()
settings.cache.set('DEBUG', value) settings.cache.set('DEBUG', value)
assert settings.cache.get('DEBUG') == six.u('Iñtërnâtiônàlizætiøn') assert settings.cache.get('DEBUG') == 'Iñtërnâtiônàlizætiøn'
def test_read_only_setting(settings): def test_read_only_setting(settings):

View File

@@ -6,8 +6,6 @@ import glob
import os import os
import shutil import shutil
import six
# AWX # AWX
from awx.conf.registry import settings_registry from awx.conf.registry import settings_registry
@@ -15,7 +13,7 @@ __all__ = ['comment_assignments', 'conf_to_dict']
def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'): def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'):
if isinstance(patterns, six.string_types): if isinstance(patterns, str):
patterns = [patterns] patterns = [patterns]
diffs = [] diffs = []
for pattern in patterns: for pattern in patterns:
@@ -34,7 +32,7 @@ def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix=
def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup_filename=None): def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup_filename=None):
from redbaron import RedBaron, indent from redbaron import RedBaron, indent
if isinstance(assignment_names, six.string_types): if isinstance(assignment_names, str):
assignment_names = [assignment_names] assignment_names = [assignment_names]
else: else:
assignment_names = assignment_names[:] assignment_names = assignment_names[:]

View File

@@ -5,7 +5,6 @@
import os import os
import sys import sys
import logging import logging
import six
from functools import reduce from functools import reduce
# Django # Django
@@ -2590,7 +2589,7 @@ class RoleAccess(BaseAccess):
if (isinstance(obj.content_object, Organization) and if (isinstance(obj.content_object, Organization) and
obj.role_field in (Organization.member_role.field.parent_role + ['member_role'])): obj.role_field in (Organization.member_role.field.parent_role + ['member_role'])):
if not isinstance(sub_obj, User): if not isinstance(sub_obj, User):
logger.error(six.text_type('Unexpected attempt to associate {} with organization role.').format(sub_obj)) logger.error('Unexpected attempt to associate {} with organization role.'.format(sub_obj))
return False return False
if not UserAccess(self.user).can_admin(sub_obj, None, allow_orphans=True): if not UserAccess(self.user).can_admin(sub_obj, None, allow_orphans=True):
return False return False

View File

@@ -4,7 +4,6 @@ import importlib
import sys import sys
import traceback import traceback
import six
from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown from awx.main.tasks import dispatch_startup, inform_cluster_of_shutdown
@@ -90,7 +89,7 @@ class TaskWorker(BaseWorker):
try: try:
if getattr(exc, 'is_awx_task_error', False): if getattr(exc, 'is_awx_task_error', False):
# Error caused by user / tracked in job output # Error caused by user / tracked in job output
logger.warning(six.text_type("{}").format(exc)) logger.warning("{}".format(exc))
else: else:
task = body['task'] task = body['task']
args = body.get('args', []) args = body.get('args', [])

View File

@@ -1,13 +1,12 @@
# Copyright (c) 2018 Ansible by Red Hat # Copyright (c) 2018 Ansible by Red Hat
# All Rights Reserved. # All Rights Reserved.
import six
class _AwxTaskError(): class _AwxTaskError():
def build_exception(self, task, message=None): def build_exception(self, task, message=None):
if message is None: if message is None:
message = six.text_type("Execution error running {}").format(task.log_format) message = "Execution error running {}".format(task.log_format)
e = Exception(message) e = Exception(message)
e.task = task e.task = task
e.is_awx_task_error = True e.is_awx_task_error = True
@@ -15,7 +14,7 @@ class _AwxTaskError():
def TaskCancel(self, task, rc): def TaskCancel(self, task, rc):
"""Canceled flag caused run_pexpect to kill the job run""" """Canceled flag caused run_pexpect to kill the job run"""
message=six.text_type("{} was canceled (rc={})").format(task.log_format, rc) message="{} was canceled (rc={})".format(task.log_format, rc)
e = self.build_exception(task, message) e = self.build_exception(task, message)
e.rc = rc e.rc = rc
e.awx_task_error_type = "TaskCancel" e.awx_task_error_type = "TaskCancel"
@@ -23,7 +22,7 @@ class _AwxTaskError():
def TaskError(self, task, rc): def TaskError(self, task, rc):
"""Userspace error (non-zero exit code) in run_pexpect subprocess""" """Userspace error (non-zero exit code) in run_pexpect subprocess"""
message = six.text_type("{} encountered an error (rc={}), please see task stdout for details.").format(task.log_format, rc) message = "{} encountered an error (rc={}), please see task stdout for details.".format(task.log_format, rc)
e = self.build_exception(task, message) e = self.build_exception(task, message)
e.rc = rc e.rc = rc
e.awx_task_error_type = "TaskError" e.awx_task_error_type = "TaskError"

View File

@@ -6,7 +6,6 @@ import copy
import json import json
import operator import operator
import re import re
import six
import urllib.parse import urllib.parse
from jinja2 import Environment, StrictUndefined from jinja2 import Environment, StrictUndefined
@@ -80,7 +79,7 @@ class JSONField(upstream_JSONField):
class JSONBField(upstream_JSONBField): class JSONBField(upstream_JSONBField):
def get_prep_lookup(self, lookup_type, value): def get_prep_lookup(self, lookup_type, value):
if isinstance(value, six.string_types) and value == "null": if isinstance(value, str) and value == "null":
return 'null' return 'null'
return super(JSONBField, self).get_prep_lookup(lookup_type, value) return super(JSONBField, self).get_prep_lookup(lookup_type, value)
@@ -95,7 +94,7 @@ class JSONBField(upstream_JSONBField):
def from_db_value(self, value, expression, connection, context): def from_db_value(self, value, expression, connection, context):
# Work around a bug in django-jsonfield # Work around a bug in django-jsonfield
# https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos # https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos
if isinstance(value, six.string_types): if isinstance(value, str):
return json.loads(value) return json.loads(value)
return value return value
@@ -411,7 +410,7 @@ class JSONSchemaField(JSONBField):
format_checker=self.format_checker format_checker=self.format_checker
).iter_errors(value): ).iter_errors(value):
if error.validator == 'pattern' and 'error' in error.schema: if error.validator == 'pattern' and 'error' in error.schema:
error.message = six.text_type(error.schema['error']).format(instance=error.instance) error.message = error.schema['error'].format(instance=error.instance)
elif error.validator == 'type': elif error.validator == 'type':
expected_type = error.validator_value expected_type = error.validator_value
if expected_type == 'object': if expected_type == 'object':
@@ -450,7 +449,7 @@ class JSONSchemaField(JSONBField):
def from_db_value(self, value, expression, connection, context): def from_db_value(self, value, expression, connection, context):
# Work around a bug in django-jsonfield # Work around a bug in django-jsonfield
# https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos # https://bitbucket.org/schinckel/django-jsonfield/issues/57/cannot-use-in-the-same-project-as-djangos
if isinstance(value, six.string_types): if isinstance(value, str):
return json.loads(value) return json.loads(value)
return value return value
@@ -547,7 +546,7 @@ class CredentialInputField(JSONSchemaField):
v != '$encrypted$', v != '$encrypted$',
model_instance.pk model_instance.pk
]): ]):
if not isinstance(getattr(model_instance, k), six.string_types): if not isinstance(getattr(model_instance, k), str):
raise django_exceptions.ValidationError( raise django_exceptions.ValidationError(
_('secret values must be of type string, not {}').format(type(v).__name__), _('secret values must be of type string, not {}').format(type(v).__name__),
code='invalid', code='invalid',
@@ -564,7 +563,7 @@ class CredentialInputField(JSONSchemaField):
format_checker=self.format_checker format_checker=self.format_checker
).iter_errors(decrypted_values): ).iter_errors(decrypted_values):
if error.validator == 'pattern' and 'error' in error.schema: if error.validator == 'pattern' and 'error' in error.schema:
error.message = six.text_type(error.schema['error']).format(instance=error.instance) error.message = error.schema['error'].format(instance=error.instance)
if error.validator == 'dependencies': if error.validator == 'dependencies':
# replace the default error messaging w/ a better i18n string # replace the default error messaging w/ a better i18n string
# I wish there was a better way to determine the parameters of # I wish there was a better way to determine the parameters of

View File

@@ -5,7 +5,6 @@
import datetime import datetime
import logging import logging
import six
# Django # Django
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
@@ -43,7 +42,7 @@ class Command(BaseCommand):
n_deleted_items = 0 n_deleted_items = 0
pks_to_delete = set() pks_to_delete = set()
for asobj in ActivityStream.objects.iterator(): for asobj in ActivityStream.objects.iterator():
asobj_disp = '"%s" id: %s' % (six.text_type(asobj), asobj.id) asobj_disp = '"%s" id: %s' % (str(asobj), asobj.id)
if asobj.timestamp >= self.cutoff: if asobj.timestamp >= self.cutoff:
if self.dry_run: if self.dry_run:
self.logger.info("would skip %s" % asobj_disp) self.logger.info("would skip %s" % asobj_disp)

View File

@@ -5,7 +5,6 @@
import datetime import datetime
import logging import logging
import six
# Django # Django
from django.core.management.base import BaseCommand, CommandError from django.core.management.base import BaseCommand, CommandError
@@ -68,7 +67,7 @@ class Command(BaseCommand):
jobs = Job.objects.filter(created__lt=self.cutoff) jobs = Job.objects.filter(created__lt=self.cutoff)
for job in jobs.iterator(): for job in jobs.iterator():
job_display = '"%s" (%d host summaries, %d events)' % \ job_display = '"%s" (%d host summaries, %d events)' % \
(six.text_type(job), (str(job),
job.job_host_summaries.count(), job.job_events.count()) job.job_host_summaries.count(), job.job_events.count())
if job.status in ('pending', 'waiting', 'running'): if job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping' action_text = 'would skip' if self.dry_run else 'skipping'
@@ -89,7 +88,7 @@ class Command(BaseCommand):
ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff) ad_hoc_commands = AdHocCommand.objects.filter(created__lt=self.cutoff)
for ad_hoc_command in ad_hoc_commands.iterator(): for ad_hoc_command in ad_hoc_commands.iterator():
ad_hoc_command_display = '"%s" (%d events)' % \ ad_hoc_command_display = '"%s" (%d events)' % \
(six.text_type(ad_hoc_command), (str(ad_hoc_command),
ad_hoc_command.ad_hoc_command_events.count()) ad_hoc_command.ad_hoc_command_events.count())
if ad_hoc_command.status in ('pending', 'waiting', 'running'): if ad_hoc_command.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping' action_text = 'would skip' if self.dry_run else 'skipping'
@@ -109,7 +108,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0 skipped, deleted = 0, 0
project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff) project_updates = ProjectUpdate.objects.filter(created__lt=self.cutoff)
for pu in project_updates.iterator(): for pu in project_updates.iterator():
pu_display = '"%s" (type %s)' % (six.text_type(pu), six.text_type(pu.launch_type)) pu_display = '"%s" (type %s)' % (str(pu), str(pu.launch_type))
if pu.status in ('pending', 'waiting', 'running'): if pu.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping' action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display) self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display)
@@ -132,7 +131,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0 skipped, deleted = 0, 0
inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff) inventory_updates = InventoryUpdate.objects.filter(created__lt=self.cutoff)
for iu in inventory_updates.iterator(): for iu in inventory_updates.iterator():
iu_display = '"%s" (source %s)' % (six.text_type(iu), six.text_type(iu.source)) iu_display = '"%s" (source %s)' % (str(iu), str(iu.source))
if iu.status in ('pending', 'waiting', 'running'): if iu.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping' action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display) self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
@@ -155,7 +154,7 @@ class Command(BaseCommand):
skipped, deleted = 0, 0 skipped, deleted = 0, 0
system_jobs = SystemJob.objects.filter(created__lt=self.cutoff) system_jobs = SystemJob.objects.filter(created__lt=self.cutoff)
for sj in system_jobs.iterator(): for sj in system_jobs.iterator():
sj_display = '"%s" (type %s)' % (six.text_type(sj), six.text_type(sj.job_type)) sj_display = '"%s" (type %s)' % (str(sj), str(sj.job_type))
if sj.status in ('pending', 'waiting', 'running'): if sj.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping' action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display) self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
@@ -185,7 +184,7 @@ class Command(BaseCommand):
workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff) workflow_jobs = WorkflowJob.objects.filter(created__lt=self.cutoff)
for workflow_job in workflow_jobs.iterator(): for workflow_job in workflow_jobs.iterator():
workflow_job_display = '"{}" ({} nodes)'.format( workflow_job_display = '"{}" ({} nodes)'.format(
six.text_type(workflow_job), str(workflow_job),
workflow_job.workflow_nodes.count()) workflow_job.workflow_nodes.count())
if workflow_job.status in ('pending', 'waiting', 'running'): if workflow_job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping' action_text = 'would skip' if self.dry_run else 'skipping'
@@ -206,7 +205,7 @@ class Command(BaseCommand):
notifications = Notification.objects.filter(created__lt=self.cutoff) notifications = Notification.objects.filter(created__lt=self.cutoff)
for notification in notifications.iterator(): for notification in notifications.iterator():
notification_display = '"{}" (started {}, {} type, {} sent)'.format( notification_display = '"{}" (started {}, {} type, {} sent)'.format(
six.text_type(notification), six.text_type(notification.created), str(notification), str(notification.created),
notification.notification_type, notification.notifications_sent) notification.notification_type, notification.notifications_sent)
if notification.status in ('pending',): if notification.status in ('pending',):
action_text = 'would skip' if self.dry_run else 'skipping' action_text = 'would skip' if self.dry_run else 'skipping'

View File

@@ -3,7 +3,6 @@
from awx.main.models import Instance, InstanceGroup from awx.main.models import Instance, InstanceGroup
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
import six
class Ungrouped(object): class Ungrouped(object):
@@ -42,7 +41,7 @@ class Command(BaseCommand):
fmt += ' policy>={0.policy_instance_minimum}' fmt += ' policy>={0.policy_instance_minimum}'
if instance_group.controller: if instance_group.controller:
fmt += ' controller={0.controller.name}' fmt += ' controller={0.controller.name}'
print(six.text_type(fmt + ']').format(instance_group)) print((fmt + ']').format(instance_group))
for x in instance_group.instances.all(): for x in instance_group.instances.all():
color = '\033[92m' color = '\033[92m'
if x.capacity == 0 or x.enabled is False: if x.capacity == 0 or x.enabled is False:
@@ -52,5 +51,5 @@ class Command(BaseCommand):
fmt += ' last_isolated_check="{0.last_isolated_check:%Y-%m-%d %H:%M:%S}"' fmt += ' last_isolated_check="{0.last_isolated_check:%Y-%m-%d %H:%M:%S}"'
if x.capacity: if x.capacity:
fmt += ' heartbeat="{0.modified:%Y-%m-%d %H:%M:%S}"' fmt += ' heartbeat="{0.modified:%Y-%m-%d %H:%M:%S}"'
print(six.text_type(fmt + '\033[0m').format(x, x.version or '?')) print((fmt + '\033[0m').format(x, x.version or '?'))
print('') print('')

View File

@@ -1,7 +1,6 @@
# Copyright (c) 2017 Ansible Tower by Red Hat # Copyright (c) 2017 Ansible Tower by Red Hat
# All Rights Reserved. # All Rights Reserved.
import sys import sys
import six
from awx.main.utils.pglock import advisory_lock from awx.main.utils.pglock import advisory_lock
from awx.main.models import Instance, InstanceGroup from awx.main.models import Instance, InstanceGroup
@@ -73,7 +72,7 @@ class Command(BaseCommand):
if instance.exists(): if instance.exists():
instances.append(instance[0]) instances.append(instance[0])
else: else:
raise InstanceNotFound(six.text_type("Instance does not exist: {}").format(inst_name), changed) raise InstanceNotFound("Instance does not exist: {}".format(inst_name), changed)
ig.instances.add(*instances) ig.instances.add(*instances)
@@ -99,24 +98,24 @@ class Command(BaseCommand):
if options.get('hostnames'): if options.get('hostnames'):
hostname_list = options.get('hostnames').split(",") hostname_list = options.get('hostnames').split(",")
with advisory_lock(six.text_type('instance_group_registration_{}').format(queuename)): with advisory_lock('instance_group_registration_{}'.format(queuename)):
changed2 = False changed2 = False
changed3 = False changed3 = False
(ig, created, changed1) = self.get_create_update_instance_group(queuename, inst_per, inst_min) (ig, created, changed1) = self.get_create_update_instance_group(queuename, inst_per, inst_min)
if created: if created:
print(six.text_type("Creating instance group {}").format(ig.name)) print("Creating instance group {}".format(ig.name))
elif not created: elif not created:
print(six.text_type("Instance Group already registered {}").format(ig.name)) print("Instance Group already registered {}".format(ig.name))
if ctrl: if ctrl:
(ig_ctrl, changed2) = self.update_instance_group_controller(ig, ctrl) (ig_ctrl, changed2) = self.update_instance_group_controller(ig, ctrl)
if changed2: if changed2:
print(six.text_type("Set controller group {} on {}.").format(ctrl, queuename)) print("Set controller group {} on {}.".format(ctrl, queuename))
try: try:
(instances, changed3) = self.add_instances_to_group(ig, hostname_list) (instances, changed3) = self.add_instances_to_group(ig, hostname_list)
for i in instances: for i in instances:
print(six.text_type("Added instance {} to {}").format(i.hostname, ig.name)) print("Added instance {} to {}".format(i.hostname, ig.name))
except InstanceNotFound as e: except InstanceNotFound as e:
instance_not_found_err = e instance_not_found_err = e

View File

@@ -4,11 +4,11 @@
import uuid import uuid
import logging import logging
import threading import threading
import six
import time import time
import cProfile import cProfile
import pstats import pstats
import os import os
import urllib.parse
from django.conf import settings from django.conf import settings
from django.contrib.auth.models import User from django.contrib.auth.models import User
@@ -195,7 +195,7 @@ class URLModificationMiddleware(object):
def process_request(self, request): def process_request(self, request):
if hasattr(request, 'environ') and 'REQUEST_URI' in request.environ: if hasattr(request, 'environ') and 'REQUEST_URI' in request.environ:
old_path = six.moves.urllib.parse.urlsplit(request.environ['REQUEST_URI']).path old_path = urllib.parse.urlsplit(request.environ['REQUEST_URI']).path
old_path = old_path[request.path.find(request.path_info):] old_path = old_path[request.path.find(request.path_info):]
else: else:
old_path = request.path_info old_path = request.path_info

View File

@@ -1,7 +1,6 @@
import logging import logging
from django.db.models import Q from django.db.models import Q
import six
logger = logging.getLogger('awx.main.migrations') logger = logging.getLogger('awx.main.migrations')
@@ -39,8 +38,8 @@ def rename_inventory_sources(apps, schema_editor):
Q(deprecated_group__inventory__organization=org)).distinct().all()): Q(deprecated_group__inventory__organization=org)).distinct().all()):
inventory = invsrc.deprecated_group.inventory if invsrc.deprecated_group else invsrc.inventory inventory = invsrc.deprecated_group.inventory if invsrc.deprecated_group else invsrc.inventory
name = six.text_type('{0} - {1} - {2}').format(invsrc.name, inventory.name, i) name = '{0} - {1} - {2}'.format(invsrc.name, inventory.name, i)
logger.debug(six.text_type("Renaming InventorySource({0}) {1} -> {2}").format( logger.debug("Renaming InventorySource({0}) {1} -> {2}".format(
invsrc.pk, invsrc.name, name invsrc.pk, invsrc.name, name
)) ))
invsrc.name = name invsrc.name = name

View File

@@ -1,7 +1,6 @@
import logging import logging
import json import json
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
import six
from awx.conf.migrations._reencrypt import ( from awx.conf.migrations._reencrypt import (
decrypt_field, decrypt_field,

View File

@@ -3,8 +3,6 @@ import logging
from django.utils.timezone import now from django.utils.timezone import now
from django.utils.text import slugify from django.utils.text import slugify
import six
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
from awx.main import utils from awx.main import utils
@@ -26,7 +24,7 @@ def _create_fact_scan_project(ContentType, Project, org):
polymorphic_ctype=ct) polymorphic_ctype=ct)
proj.save() proj.save()
slug_name = slugify(six.text_type(name)).replace(u'-', u'_') slug_name = slugify(str(name)).replace(u'-', u'_')
proj.local_path = u'_%d__%s' % (int(proj.pk), slug_name) proj.local_path = u'_%d__%s' % (int(proj.pk), slug_name)
proj.save() proj.save()

View File

@@ -7,7 +7,6 @@ import os
import re import re
import stat import stat
import tempfile import tempfile
import six
# Jinja2 # Jinja2
from jinja2 import Template from jinja2 import Template
@@ -418,11 +417,11 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
type_alias = self.credential_type_id type_alias = self.credential_type_id
if self.kind == 'vault' and self.has_input('vault_id'): if self.kind == 'vault' and self.has_input('vault_id'):
if display: if display:
fmt_str = six.text_type('{} (id={})') fmt_str = '{} (id={})'
else: else:
fmt_str = six.text_type('{}_{}') fmt_str = '{}_{}'
return fmt_str.format(type_alias, self.get_input('vault_id')) return fmt_str.format(type_alias, self.get_input('vault_id'))
return six.text_type(type_alias) return str(type_alias)
@staticmethod @staticmethod
def unique_dict(cred_qs): def unique_dict(cred_qs):
@@ -679,9 +678,7 @@ class CredentialType(CommonModelNameNotUnique):
try: try:
injector_field.validate_env_var_allowed(env_var) injector_field.validate_env_var_allowed(env_var)
except ValidationError as e: except ValidationError as e:
logger.error(six.text_type( logger.error('Ignoring prohibited env var {}, reason: {}'.format(env_var, e))
'Ignoring prohibited env var {}, reason: {}'
).format(env_var, e))
continue continue
env[env_var] = Template(tmpl).render(**namespace) env[env_var] = Template(tmpl).render(**namespace)
safe_env[env_var] = Template(tmpl).render(**safe_namespace) safe_env[env_var] = Template(tmpl).render(**safe_namespace)

View File

@@ -9,7 +9,6 @@ from django.utils.text import Truncator
from django.utils.timezone import utc from django.utils.timezone import utc
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_text from django.utils.encoding import force_text
import six
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.main.fields import JSONField from awx.main.fields import JSONField
@@ -35,7 +34,7 @@ def sanitize_event_keys(kwargs, valid_keys):
for key in [ for key in [
'play', 'role', 'task', 'playbook' 'play', 'role', 'task', 'playbook'
]: ]:
if isinstance(kwargs.get('event_data', {}).get(key), six.string_types): if isinstance(kwargs.get('event_data', {}).get(key), str):
if len(kwargs['event_data'][key]) > 1024: if len(kwargs['event_data'][key]) > 1024:
kwargs['event_data'][key] = Truncator(kwargs['event_data'][key]).chars(1024) kwargs['event_data'][key] = Truncator(kwargs['event_data'][key]).chars(1024)

View File

@@ -9,7 +9,6 @@ import logging
import re import re
import copy import copy
import os.path import os.path
import six
from urllib.parse import urljoin from urllib.parse import urljoin
# Django # Django
@@ -1356,7 +1355,7 @@ class InventorySourceOptions(BaseModel):
source_vars_dict = VarsDictProperty('source_vars') source_vars_dict = VarsDictProperty('source_vars')
def clean_instance_filters(self): def clean_instance_filters(self):
instance_filters = six.text_type(self.instance_filters or '') instance_filters = str(self.instance_filters or '')
if self.source == 'ec2': if self.source == 'ec2':
invalid_filters = [] invalid_filters = []
instance_filter_re = re.compile(r'^((tag:.+)|([a-z][a-z\.-]*[a-z]))=.*$') instance_filter_re = re.compile(r'^((tag:.+)|([a-z][a-z\.-]*[a-z]))=.*$')
@@ -1382,7 +1381,7 @@ class InventorySourceOptions(BaseModel):
return '' return ''
def clean_group_by(self): def clean_group_by(self):
group_by = six.text_type(self.group_by or '') group_by = str(self.group_by or '')
if self.source == 'ec2': if self.source == 'ec2':
get_choices = getattr(self, 'get_%s_group_by_choices' % self.source) get_choices = getattr(self, 'get_%s_group_by_choices' % self.source)
valid_choices = [x[0] for x in get_choices()] valid_choices = [x[0] for x in get_choices()]
@@ -1539,7 +1538,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, RelatedJobsMix
if '_eager_fields' not in kwargs: if '_eager_fields' not in kwargs:
kwargs['_eager_fields'] = {} kwargs['_eager_fields'] = {}
if 'name' not in kwargs['_eager_fields']: if 'name' not in kwargs['_eager_fields']:
name = six.text_type('{} - {}').format(self.inventory.name, self.name) name = '{} - {}'.format(self.inventory.name, self.name)
name_field = self._meta.get_field('name') name_field = self._meta.get_field('name')
if len(name) > name_field.max_length: if len(name) > name_field.max_length:
name = name[:name_field.max_length] name = name[:name_field.max_length]

View File

@@ -10,7 +10,6 @@ import time
import json import json
from urllib.parse import urljoin from urllib.parse import urljoin
import six
# Django # Django
from django.conf import settings from django.conf import settings
@@ -823,7 +822,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
timeout = now() - datetime.timedelta(seconds=timeout) timeout = now() - datetime.timedelta(seconds=timeout)
hosts = hosts.filter(ansible_facts_modified__gte=timeout) hosts = hosts.filter(ansible_facts_modified__gte=timeout)
for host in hosts: for host in hosts:
filepath = os.sep.join(map(six.text_type, [destination, host.name])) filepath = os.sep.join(map(str, [destination, host.name]))
if not os.path.realpath(filepath).startswith(destination): if not os.path.realpath(filepath).startswith(destination):
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name))) system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
continue continue
@@ -840,7 +839,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
def finish_job_fact_cache(self, destination, modification_times): def finish_job_fact_cache(self, destination, modification_times):
destination = os.path.join(destination, 'facts') destination = os.path.join(destination, 'facts')
for host in self._get_inventory_hosts(): for host in self._get_inventory_hosts():
filepath = os.sep.join(map(six.text_type, [destination, host.name])) filepath = os.sep.join(map(str, [destination, host.name]))
if not os.path.realpath(filepath).startswith(destination): if not os.path.realpath(filepath).startswith(destination):
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name))) system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
continue continue

View File

@@ -3,7 +3,6 @@ import os
import json import json
from copy import copy, deepcopy from copy import copy, deepcopy
import six
# Django # Django
from django.apps import apps from django.apps import apps
@@ -167,7 +166,7 @@ class SurveyJobTemplateMixin(models.Model):
decrypted_default = default decrypted_default = default
if ( if (
survey_element['type'] == "password" and survey_element['type'] == "password" and
isinstance(decrypted_default, six.string_types) and isinstance(decrypted_default, str) and
decrypted_default.startswith('$encrypted$') decrypted_default.startswith('$encrypted$')
): ):
decrypted_default = decrypt_value(get_encryption_key('value', pk=None), decrypted_default) decrypted_default = decrypt_value(get_encryption_key('value', pk=None), decrypted_default)
@@ -190,7 +189,7 @@ class SurveyJobTemplateMixin(models.Model):
if (survey_element['type'] == "password"): if (survey_element['type'] == "password"):
password_value = data.get(survey_element['variable']) password_value = data.get(survey_element['variable'])
if ( if (
isinstance(password_value, six.string_types) and isinstance(password_value, str) and
password_value == '$encrypted$' password_value == '$encrypted$'
): ):
if survey_element.get('default') is None and survey_element['required']: if survey_element.get('default') is None and survey_element['required']:
@@ -203,7 +202,7 @@ class SurveyJobTemplateMixin(models.Model):
errors.append("'%s' value missing" % survey_element['variable']) errors.append("'%s' value missing" % survey_element['variable'])
elif survey_element['type'] in ["textarea", "text", "password"]: elif survey_element['type'] in ["textarea", "text", "password"]:
if survey_element['variable'] in data: if survey_element['variable'] in data:
if not isinstance(data[survey_element['variable']], six.string_types): if not isinstance(data[survey_element['variable']], str):
errors.append("Value %s for '%s' expected to be a string." % (data[survey_element['variable']], errors.append("Value %s for '%s' expected to be a string." % (data[survey_element['variable']],
survey_element['variable'])) survey_element['variable']))
return errors return errors
@@ -247,7 +246,7 @@ class SurveyJobTemplateMixin(models.Model):
errors.append("'%s' value is expected to be a list." % survey_element['variable']) errors.append("'%s' value is expected to be a list." % survey_element['variable'])
else: else:
choice_list = copy(survey_element['choices']) choice_list = copy(survey_element['choices'])
if isinstance(choice_list, six.string_types): if isinstance(choice_list, str):
choice_list = choice_list.split('\n') choice_list = choice_list.split('\n')
for val in data[survey_element['variable']]: for val in data[survey_element['variable']]:
if val not in choice_list: if val not in choice_list:
@@ -255,7 +254,7 @@ class SurveyJobTemplateMixin(models.Model):
choice_list)) choice_list))
elif survey_element['type'] == 'multiplechoice': elif survey_element['type'] == 'multiplechoice':
choice_list = copy(survey_element['choices']) choice_list = copy(survey_element['choices'])
if isinstance(choice_list, six.string_types): if isinstance(choice_list, str):
choice_list = choice_list.split('\n') choice_list = choice_list.split('\n')
if survey_element['variable'] in data: if survey_element['variable'] in data:
if data[survey_element['variable']] not in choice_list: if data[survey_element['variable']] not in choice_list:
@@ -315,7 +314,7 @@ class SurveyJobTemplateMixin(models.Model):
if 'prompts' not in _exclude_errors: if 'prompts' not in _exclude_errors:
errors['extra_vars'] = [_('Variables {list_of_keys} are not allowed on launch. Check the Prompt on Launch setting '+ errors['extra_vars'] = [_('Variables {list_of_keys} are not allowed on launch. Check the Prompt on Launch setting '+
'on the {model_name} to include Extra Variables.').format( 'on the {model_name} to include Extra Variables.').format(
list_of_keys=six.text_type(', ').join([six.text_type(key) for key in extra_vars.keys()]), list_of_keys=', '.join([str(key) for key in extra_vars.keys()]),
model_name=self._meta.verbose_name.title())] model_name=self._meta.verbose_name.title())]
return (accepted, rejected, errors) return (accepted, rejected, errors)
@@ -386,7 +385,7 @@ class SurveyJobMixin(models.Model):
extra_vars = json.loads(self.extra_vars) extra_vars = json.loads(self.extra_vars)
for key in self.survey_passwords: for key in self.survey_passwords:
value = extra_vars.get(key) value = extra_vars.get(key)
if value and isinstance(value, six.string_types) and value.startswith('$encrypted$'): if value and isinstance(value, str) and value.startswith('$encrypted$'):
extra_vars[key] = decrypt_value(get_encryption_key('value', pk=None), value) extra_vars[key] = decrypt_value(get_encryption_key('value', pk=None), value)
return json.dumps(extra_vars) return json.dumps(extra_vars)
else: else:

View File

@@ -15,7 +15,6 @@ from django.utils.text import slugify
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.utils.timezone import now, make_aware, get_default_timezone from django.utils.timezone import now, make_aware, get_default_timezone
import six
# AWX # AWX
from awx.api.versioning import reverse from awx.api.versioning import reverse
@@ -134,7 +133,7 @@ class ProjectOptions(models.Model):
def clean_scm_url(self): def clean_scm_url(self):
if self.scm_type == 'insights': if self.scm_type == 'insights':
self.scm_url = settings.INSIGHTS_URL_BASE self.scm_url = settings.INSIGHTS_URL_BASE
scm_url = six.text_type(self.scm_url or '') scm_url = str(self.scm_url or '')
if not self.scm_type: if not self.scm_type:
return '' return ''
try: try:
@@ -145,7 +144,7 @@ class ProjectOptions(models.Model):
scm_url_parts = urlparse.urlsplit(scm_url) scm_url_parts = urlparse.urlsplit(scm_url)
if self.scm_type and not any(scm_url_parts): if self.scm_type and not any(scm_url_parts):
raise ValidationError(_('SCM URL is required.')) raise ValidationError(_('SCM URL is required.'))
return six.text_type(self.scm_url or '') return str(self.scm_url or '')
def clean_credential(self): def clean_credential(self):
if not self.scm_type: if not self.scm_type:
@@ -329,7 +328,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
skip_update = bool(kwargs.pop('skip_update', False)) skip_update = bool(kwargs.pop('skip_update', False))
# Create auto-generated local path if project uses SCM. # Create auto-generated local path if project uses SCM.
if self.pk and self.scm_type and not self.local_path.startswith('_'): if self.pk and self.scm_type and not self.local_path.startswith('_'):
slug_name = slugify(six.text_type(self.name)).replace(u'-', u'_') slug_name = slugify(str(self.name)).replace(u'-', u'_')
self.local_path = u'_%d__%s' % (int(self.pk), slug_name) self.local_path = u'_%d__%s' % (int(self.pk), slug_name)
if 'local_path' not in update_fields: if 'local_path' not in update_fields:
update_fields.append('local_path') update_fields.append('local_path')
@@ -544,8 +543,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage
res = super(ProjectUpdate, self).cancel(job_explanation=job_explanation, is_chain=is_chain) res = super(ProjectUpdate, self).cancel(job_explanation=job_explanation, is_chain=is_chain)
if res and self.launch_type != 'sync': if res and self.launch_type != 'sync':
for inv_src in self.scm_inventory_updates.filter(status='running'): for inv_src in self.scm_inventory_updates.filter(status='running'):
inv_src.cancel(job_explanation=six.text_type( inv_src.cancel(job_explanation='Source project update `{}` was canceled.'.format(self.name))
'Source project update `{}` was canceled.').format(self.name))
return res return res
''' '''

View File

@@ -12,7 +12,6 @@ import socket
import subprocess import subprocess
import tempfile import tempfile
from collections import OrderedDict from collections import OrderedDict
import six
# Django # Django
from django.conf import settings from django.conf import settings
@@ -351,8 +350,8 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
validated_kwargs = kwargs.copy() validated_kwargs = kwargs.copy()
if unallowed_fields: if unallowed_fields:
if parent_field_name is None: if parent_field_name is None:
logger.warn(six.text_type('Fields {} are not allowed as overrides to spawn from {}.').format( logger.warn('Fields {} are not allowed as overrides to spawn from {}.'.format(
six.text_type(', ').join(unallowed_fields), self ', '.join(unallowed_fields), self
)) ))
for f in unallowed_fields: for f in unallowed_fields:
validated_kwargs.pop(f) validated_kwargs.pop(f)
@@ -1305,9 +1304,9 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
'dispatcher', self.execution_node 'dispatcher', self.execution_node
).running(timeout=timeout) ).running(timeout=timeout)
except socket.timeout: except socket.timeout:
logger.error(six.text_type( logger.error('could not reach dispatcher on {} within {}s'.format(
'could not reach dispatcher on {} within {}s' self.execution_node, timeout
).format(self.execution_node, timeout)) ))
running = False running = False
return running return running

View File

@@ -6,7 +6,6 @@ from datetime import timedelta
import logging import logging
import uuid import uuid
import json import json
import six
import random import random
# Django # Django
@@ -131,7 +130,7 @@ class TaskManager():
job.job_explanation = _( job.job_explanation = _(
"Workflow Job spawned from workflow could not start because it " "Workflow Job spawned from workflow could not start because it "
"would result in recursion (spawn order, most recent first: {})" "would result in recursion (spawn order, most recent first: {})"
).format(six.text_type(', ').join([six.text_type('<{}>').format(tmp) for tmp in display_list])) ).format(', '.join(['<{}>'.format(tmp) for tmp in display_list]))
else: else:
logger.debug('Starting workflow-in-workflow id={}, wfjt={}, ancestors={}'.format( logger.debug('Starting workflow-in-workflow id={}, wfjt={}, ancestors={}'.format(
job.id, spawn_node.unified_job_template.pk, [wa.pk for wa in workflow_ancestors])) job.id, spawn_node.unified_job_template.pk, [wa.pk for wa in workflow_ancestors]))
@@ -182,7 +181,7 @@ class TaskManager():
logger.info('Marking %s as %s.', workflow_job.log_format, 'failed' if has_failed else 'successful') logger.info('Marking %s as %s.', workflow_job.log_format, 'failed' if has_failed else 'successful')
result.append(workflow_job.id) result.append(workflow_job.id)
new_status = 'failed' if has_failed else 'successful' new_status = 'failed' if has_failed else 'successful'
logger.debug(six.text_type("Transitioning {} to {} status.").format(workflow_job.log_format, new_status)) logger.debug("Transitioning {} to {} status.".format(workflow_job.log_format, new_status))
update_fields = ['status', 'start_args'] update_fields = ['status', 'start_args']
workflow_job.status = new_status workflow_job.status = new_status
if reason: if reason:
@@ -217,7 +216,7 @@ class TaskManager():
try: try:
controller_node = rampart_group.choose_online_controller_node() controller_node = rampart_group.choose_online_controller_node()
except IndexError: except IndexError:
logger.debug(six.text_type("No controllers available in group {} to run {}").format( logger.debug("No controllers available in group {} to run {}".format(
rampart_group.name, task.log_format)) rampart_group.name, task.log_format))
return return
@@ -240,19 +239,19 @@ class TaskManager():
# non-Ansible jobs on isolated instances run on controller # non-Ansible jobs on isolated instances run on controller
task.instance_group = rampart_group.controller task.instance_group = rampart_group.controller
task.execution_node = random.choice(list(rampart_group.controller.instances.all().values_list('hostname', flat=True))) task.execution_node = random.choice(list(rampart_group.controller.instances.all().values_list('hostname', flat=True)))
logger.info(six.text_type('Submitting isolated {} to queue {}.').format( logger.info('Submitting isolated {} to queue {}.'.format(
task.log_format, task.instance_group.name, task.execution_node)) task.log_format, task.instance_group.name, task.execution_node))
elif controller_node: elif controller_node:
task.instance_group = rampart_group task.instance_group = rampart_group
task.execution_node = instance.hostname task.execution_node = instance.hostname
task.controller_node = controller_node task.controller_node = controller_node
logger.info(six.text_type('Submitting isolated {} to queue {} controlled by {}.').format( logger.info('Submitting isolated {} to queue {} controlled by {}.'.format(
task.log_format, task.execution_node, controller_node)) task.log_format, task.execution_node, controller_node))
else: else:
task.instance_group = rampart_group task.instance_group = rampart_group
if instance is not None: if instance is not None:
task.execution_node = instance.hostname task.execution_node = instance.hostname
logger.info(six.text_type('Submitting {} to <instance group, instance> <{},{}>.').format( logger.info('Submitting {} to <instance group, instance> <{},{}>.'.format(
task.log_format, task.instance_group_id, task.execution_node)) task.log_format, task.instance_group_id, task.execution_node))
with disable_activity_stream(): with disable_activity_stream():
task.celery_task_id = str(uuid.uuid4()) task.celery_task_id = str(uuid.uuid4())
@@ -436,7 +435,7 @@ class TaskManager():
def process_dependencies(self, dependent_task, dependency_tasks): def process_dependencies(self, dependent_task, dependency_tasks):
for task in dependency_tasks: for task in dependency_tasks:
if self.is_job_blocked(task): if self.is_job_blocked(task):
logger.debug(six.text_type("Dependent {} is blocked from running").format(task.log_format)) logger.debug("Dependent {} is blocked from running".format(task.log_format))
continue continue
preferred_instance_groups = task.preferred_instance_groups preferred_instance_groups = task.preferred_instance_groups
found_acceptable_queue = False found_acceptable_queue = False
@@ -445,16 +444,16 @@ class TaskManager():
if idle_instance_that_fits is None: if idle_instance_that_fits is None:
idle_instance_that_fits = rampart_group.find_largest_idle_instance() idle_instance_that_fits = rampart_group.find_largest_idle_instance()
if self.get_remaining_capacity(rampart_group.name) <= 0: if self.get_remaining_capacity(rampart_group.name) <= 0:
logger.debug(six.text_type("Skipping group {} capacity <= 0").format(rampart_group.name)) logger.debug("Skipping group {} capacity <= 0".format(rampart_group.name))
continue continue
execution_instance = rampart_group.fit_task_to_most_remaining_capacity_instance(task) execution_instance = rampart_group.fit_task_to_most_remaining_capacity_instance(task)
if execution_instance: if execution_instance:
logger.debug(six.text_type("Starting dependent {} in group {} instance {}").format( logger.debug("Starting dependent {} in group {} instance {}".format(
task.log_format, rampart_group.name, execution_instance.hostname)) task.log_format, rampart_group.name, execution_instance.hostname))
elif not execution_instance and idle_instance_that_fits: elif not execution_instance and idle_instance_that_fits:
execution_instance = idle_instance_that_fits execution_instance = idle_instance_that_fits
logger.debug(six.text_type("Starting dependent {} in group {} on idle instance {}").format( logger.debug("Starting dependent {} in group {} on idle instance {}".format(
task.log_format, rampart_group.name, execution_instance.hostname)) task.log_format, rampart_group.name, execution_instance.hostname))
if execution_instance: if execution_instance:
self.graph[rampart_group.name]['graph'].add_job(task) self.graph[rampart_group.name]['graph'].add_job(task)
@@ -464,17 +463,17 @@ class TaskManager():
found_acceptable_queue = True found_acceptable_queue = True
break break
else: else:
logger.debug(six.text_type("No instance available in group {} to run job {} w/ capacity requirement {}").format( logger.debug("No instance available in group {} to run job {} w/ capacity requirement {}".format(
rampart_group.name, task.log_format, task.task_impact)) rampart_group.name, task.log_format, task.task_impact))
if not found_acceptable_queue: if not found_acceptable_queue:
logger.debug(six.text_type("Dependent {} couldn't be scheduled on graph, waiting for next cycle").format(task.log_format)) logger.debug("Dependent {} couldn't be scheduled on graph, waiting for next cycle".format(task.log_format))
def process_pending_tasks(self, pending_tasks): def process_pending_tasks(self, pending_tasks):
running_workflow_templates = set([wf.unified_job_template_id for wf in self.get_running_workflow_jobs()]) running_workflow_templates = set([wf.unified_job_template_id for wf in self.get_running_workflow_jobs()])
for task in pending_tasks: for task in pending_tasks:
self.process_dependencies(task, self.generate_dependencies(task)) self.process_dependencies(task, self.generate_dependencies(task))
if self.is_job_blocked(task): if self.is_job_blocked(task):
logger.debug(six.text_type("{} is blocked from running").format(task.log_format)) logger.debug("{} is blocked from running".format(task.log_format))
continue continue
preferred_instance_groups = task.preferred_instance_groups preferred_instance_groups = task.preferred_instance_groups
found_acceptable_queue = False found_acceptable_queue = False
@@ -482,7 +481,7 @@ class TaskManager():
if isinstance(task, WorkflowJob): if isinstance(task, WorkflowJob):
if task.unified_job_template_id in running_workflow_templates: if task.unified_job_template_id in running_workflow_templates:
if not task.allow_simultaneous: if not task.allow_simultaneous:
logger.debug(six.text_type("{} is blocked from running, workflow already running").format(task.log_format)) logger.debug("{} is blocked from running, workflow already running".format(task.log_format))
continue continue
else: else:
running_workflow_templates.add(task.unified_job_template_id) running_workflow_templates.add(task.unified_job_template_id)
@@ -493,17 +492,17 @@ class TaskManager():
idle_instance_that_fits = rampart_group.find_largest_idle_instance() idle_instance_that_fits = rampart_group.find_largest_idle_instance()
remaining_capacity = self.get_remaining_capacity(rampart_group.name) remaining_capacity = self.get_remaining_capacity(rampart_group.name)
if remaining_capacity <= 0: if remaining_capacity <= 0:
logger.debug(six.text_type("Skipping group {}, remaining_capacity {} <= 0").format( logger.debug("Skipping group {}, remaining_capacity {} <= 0".format(
rampart_group.name, remaining_capacity)) rampart_group.name, remaining_capacity))
continue continue
execution_instance = rampart_group.fit_task_to_most_remaining_capacity_instance(task) execution_instance = rampart_group.fit_task_to_most_remaining_capacity_instance(task)
if execution_instance: if execution_instance:
logger.debug(six.text_type("Starting {} in group {} instance {} (remaining_capacity={})").format( logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity)) task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
elif not execution_instance and idle_instance_that_fits: elif not execution_instance and idle_instance_that_fits:
execution_instance = idle_instance_that_fits execution_instance = idle_instance_that_fits
logger.debug(six.text_type("Starting {} in group {} instance {} (remaining_capacity={})").format( logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format(
task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity)) task.log_format, rampart_group.name, execution_instance.hostname, remaining_capacity))
if execution_instance: if execution_instance:
self.graph[rampart_group.name]['graph'].add_job(task) self.graph[rampart_group.name]['graph'].add_job(task)
@@ -511,10 +510,10 @@ class TaskManager():
found_acceptable_queue = True found_acceptable_queue = True
break break
else: else:
logger.debug(six.text_type("No instance available in group {} to run job {} w/ capacity requirement {}").format( logger.debug("No instance available in group {} to run job {} w/ capacity requirement {}".format(
rampart_group.name, task.log_format, task.task_impact)) rampart_group.name, task.log_format, task.task_impact))
if not found_acceptable_queue: if not found_acceptable_queue:
logger.debug(six.text_type("{} couldn't be scheduled on graph, waiting for next cycle").format(task.log_format)) logger.debug("{} couldn't be scheduled on graph, waiting for next cycle".format(task.log_format))
def calculate_capacity_consumed(self, tasks): def calculate_capacity_consumed(self, tasks):
self.graph = InstanceGroup.objects.capacity_values(tasks=tasks, graph=self.graph) self.graph = InstanceGroup.objects.capacity_values(tasks=tasks, graph=self.graph)
@@ -527,7 +526,7 @@ class TaskManager():
return (task.task_impact + current_capacity > capacity_total) return (task.task_impact + current_capacity > capacity_total)
def consume_capacity(self, task, instance_group): def consume_capacity(self, task, instance_group):
logger.debug(six.text_type('{} consumed {} capacity units from {} with prior total of {}').format( logger.debug('{} consumed {} capacity units from {} with prior total of {}'.format(
task.log_format, task.task_impact, instance_group, task.log_format, task.task_impact, instance_group,
self.graph[instance_group]['consumed_capacity'])) self.graph[instance_group]['consumed_capacity']))
self.graph[instance_group]['consumed_capacity'] += task.task_impact self.graph[instance_group]['consumed_capacity'] += task.task_impact

View File

@@ -28,7 +28,6 @@ from django.utils import timezone
from crum import get_current_request, get_current_user from crum import get_current_request, get_current_user
from crum.signals import current_user_getter from crum.signals import current_user_getter
import six
# AWX # AWX
from awx.main.models import * # noqa from awx.main.models import * # noqa
@@ -117,7 +116,7 @@ def emit_update_inventory_computed_fields(sender, **kwargs):
elif sender == Group.inventory_sources.through: elif sender == Group.inventory_sources.through:
sender_name = 'group.inventory_sources' sender_name = 'group.inventory_sources'
else: else:
sender_name = six.text_type(sender._meta.verbose_name) sender_name = str(sender._meta.verbose_name)
if kwargs['signal'] == post_save: if kwargs['signal'] == post_save:
if sender == Job: if sender == Job:
return return
@@ -147,7 +146,7 @@ def emit_update_inventory_on_created_or_deleted(sender, **kwargs):
pass pass
else: else:
return return
sender_name = six.text_type(sender._meta.verbose_name) sender_name = str(sender._meta.verbose_name)
logger.debug("%s created or deleted, updating inventory computed fields: %r %r", logger.debug("%s created or deleted, updating inventory computed fields: %r %r",
sender_name, sender, kwargs) sender_name, sender, kwargs)
try: try:
@@ -437,7 +436,7 @@ def activity_stream_create(sender, instance, created, **kwargs):
# Special case where Job survey password variables need to be hidden # Special case where Job survey password variables need to be hidden
if type(instance) == Job: if type(instance) == Job:
changes['credentials'] = [ changes['credentials'] = [
six.text_type('{} ({})').format(c.name, c.id) '{} ({})'.format(c.name, c.id)
for c in instance.credentials.iterator() for c in instance.credentials.iterator()
] ]
changes['labels'] = [l.name for l in instance.labels.iterator()] changes['labels'] = [l.name for l in instance.labels.iterator()]

View File

@@ -13,7 +13,6 @@ import logging
import os import os
import re import re
import shutil import shutil
import six
import stat import stat
import tempfile import tempfile
import time import time
@@ -93,7 +92,7 @@ def dispatch_startup():
with disable_activity_stream(): with disable_activity_stream():
sch.save() sch.save()
except Exception: except Exception:
logger.exception(six.text_type("Failed to rebuild schedule {}.").format(sch)) logger.exception("Failed to rebuild schedule {}.".format(sch))
# #
# When the dispatcher starts, if the instance cannot be found in the database, # When the dispatcher starts, if the instance cannot be found in the database,
@@ -125,8 +124,8 @@ def inform_cluster_of_shutdown():
reaper.reap(this_inst) reaper.reap(this_inst)
except Exception: except Exception:
logger.exception('failed to reap jobs for {}'.format(this_inst.hostname)) logger.exception('failed to reap jobs for {}'.format(this_inst.hostname))
logger.warning(six.text_type('Normal shutdown signal for instance {}, ' logger.warning('Normal shutdown signal for instance {}, '
'removed self from capacity pool.').format(this_inst.hostname)) 'removed self from capacity pool.'.format(this_inst.hostname))
except Exception: except Exception:
logger.exception('Encountered problem with normal shutdown signal.') logger.exception('Encountered problem with normal shutdown signal.')
@@ -164,14 +163,14 @@ def apply_cluster_membership_policies():
]) ])
for hostname in ig.policy_instance_list: for hostname in ig.policy_instance_list:
if hostname not in instance_hostnames_map: if hostname not in instance_hostnames_map:
logger.info(six.text_type("Unknown instance {} in {} policy list").format(hostname, ig.name)) logger.info("Unknown instance {} in {} policy list".format(hostname, ig.name))
continue continue
inst = instance_hostnames_map[hostname] inst = instance_hostnames_map[hostname]
group_actual.instances.append(inst.id) group_actual.instances.append(inst.id)
# NOTE: arguable behavior: policy-list-group is not added to # NOTE: arguable behavior: policy-list-group is not added to
# instance's group count for consideration in minimum-policy rules # instance's group count for consideration in minimum-policy rules
if group_actual.instances: if group_actual.instances:
logger.info(six.text_type("Policy List, adding Instances {} to Group {}").format(group_actual.instances, ig.name)) logger.info("Policy List, adding Instances {} to Group {}".format(group_actual.instances, ig.name))
if ig.controller_id is None: if ig.controller_id is None:
actual_groups.append(group_actual) actual_groups.append(group_actual)
@@ -199,7 +198,7 @@ def apply_cluster_membership_policies():
i.groups.append(g.obj.id) i.groups.append(g.obj.id)
policy_min_added.append(i.obj.id) policy_min_added.append(i.obj.id)
if policy_min_added: if policy_min_added:
logger.info(six.text_type("Policy minimum, adding Instances {} to Group {}").format(policy_min_added, g.obj.name)) logger.info("Policy minimum, adding Instances {} to Group {}".format(policy_min_added, g.obj.name))
# Finally, process instance policy percentages # Finally, process instance policy percentages
for g in sorted(actual_groups, key=lambda x: len(x.instances)): for g in sorted(actual_groups, key=lambda x: len(x.instances)):
@@ -215,7 +214,7 @@ def apply_cluster_membership_policies():
i.groups.append(g.obj.id) i.groups.append(g.obj.id)
policy_per_added.append(i.obj.id) policy_per_added.append(i.obj.id)
if policy_per_added: if policy_per_added:
logger.info(six.text_type("Policy percentage, adding Instances {} to Group {}").format(policy_per_added, g.obj.name)) logger.info("Policy percentage, adding Instances {} to Group {}".format(policy_per_added, g.obj.name))
# Determine if any changes need to be made # Determine if any changes need to be made
needs_change = False needs_change = False
@@ -259,15 +258,15 @@ def delete_project_files(project_path):
if os.path.exists(project_path): if os.path.exists(project_path):
try: try:
shutil.rmtree(project_path) shutil.rmtree(project_path)
logger.info(six.text_type('Success removing project files {}').format(project_path)) logger.info('Success removing project files {}'.format(project_path))
except Exception: except Exception:
logger.exception(six.text_type('Could not remove project directory {}').format(project_path)) logger.exception('Could not remove project directory {}'.format(project_path))
if os.path.exists(lock_file): if os.path.exists(lock_file):
try: try:
os.remove(lock_file) os.remove(lock_file)
logger.debug(six.text_type('Success removing {}').format(lock_file)) logger.debug('Success removing {}'.format(lock_file))
except Exception: except Exception:
logger.exception(six.text_type('Could not remove lock file {}').format(lock_file)) logger.exception('Could not remove lock file {}'.format(lock_file))
@task() @task()
@@ -288,7 +287,7 @@ def send_notifications(notification_list, job_id=None):
notification.status = "successful" notification.status = "successful"
notification.notifications_sent = sent notification.notifications_sent = sent
except Exception as e: except Exception as e:
logger.error(six.text_type("Send Notification Failed {}").format(e)) logger.error("Send Notification Failed {}".format(e))
notification.status = "failed" notification.status = "failed"
notification.error = smart_str(e) notification.error = smart_str(e)
update_fields.append('error') update_fields.append('error')
@@ -296,7 +295,7 @@ def send_notifications(notification_list, job_id=None):
try: try:
notification.save(update_fields=update_fields) notification.save(update_fields=update_fields)
except Exception: except Exception:
logger.exception(six.text_type('Error saving notification {} result.').format(notification.id)) logger.exception('Error saving notification {} result.'.format(notification.id))
@task() @task()
@@ -327,7 +326,7 @@ def purge_old_stdout_files():
for f in os.listdir(settings.JOBOUTPUT_ROOT): for f in os.listdir(settings.JOBOUTPUT_ROOT):
if os.path.getctime(os.path.join(settings.JOBOUTPUT_ROOT,f)) < nowtime - settings.LOCAL_STDOUT_EXPIRE_TIME: if os.path.getctime(os.path.join(settings.JOBOUTPUT_ROOT,f)) < nowtime - settings.LOCAL_STDOUT_EXPIRE_TIME:
os.unlink(os.path.join(settings.JOBOUTPUT_ROOT,f)) os.unlink(os.path.join(settings.JOBOUTPUT_ROOT,f))
logger.info(six.text_type("Removing {}").format(os.path.join(settings.JOBOUTPUT_ROOT,f))) logger.info("Removing {}".format(os.path.join(settings.JOBOUTPUT_ROOT,f)))
@task(queue=get_local_queuename) @task(queue=get_local_queuename)
@@ -340,7 +339,7 @@ def cluster_node_heartbeat():
(changed, instance) = Instance.objects.get_or_register() (changed, instance) = Instance.objects.get_or_register()
if changed: if changed:
logger.info(six.text_type("Registered tower node '{}'").format(instance.hostname)) logger.info("Registered tower node '{}'".format(instance.hostname))
for inst in list(instance_list): for inst in list(instance_list):
if inst.hostname == settings.CLUSTER_HOST_ID: if inst.hostname == settings.CLUSTER_HOST_ID:
@@ -352,7 +351,7 @@ def cluster_node_heartbeat():
if this_inst: if this_inst:
startup_event = this_inst.is_lost(ref_time=nowtime) startup_event = this_inst.is_lost(ref_time=nowtime)
if this_inst.capacity == 0 and this_inst.enabled: if this_inst.capacity == 0 and this_inst.enabled:
logger.warning(six.text_type('Rejoining the cluster as instance {}.').format(this_inst.hostname)) logger.warning('Rejoining the cluster as instance {}.'.format(this_inst.hostname))
if this_inst.enabled: if this_inst.enabled:
this_inst.refresh_capacity() this_inst.refresh_capacity()
elif this_inst.capacity != 0 and not this_inst.enabled: elif this_inst.capacity != 0 and not this_inst.enabled:
@@ -367,11 +366,12 @@ def cluster_node_heartbeat():
if other_inst.version == "": if other_inst.version == "":
continue continue
if Version(other_inst.version.split('-', 1)[0]) > Version(awx_application_version.split('-', 1)[0]) and not settings.DEBUG: if Version(other_inst.version.split('-', 1)[0]) > Version(awx_application_version.split('-', 1)[0]) and not settings.DEBUG:
logger.error(six.text_type("Host {} reports version {}, but this node {} is at {}, shutting down") logger.error("Host {} reports version {}, but this node {} is at {}, shutting down".format(
.format(other_inst.hostname, other_inst.hostname,
other_inst.version, other_inst.version,
this_inst.hostname, this_inst.hostname,
this_inst.version)) this_inst.version
))
# Shutdown signal will set the capacity to zero to ensure no Jobs get added to this instance. # Shutdown signal will set the capacity to zero to ensure no Jobs get added to this instance.
# The heartbeat task will reset the capacity to the system capacity after upgrade. # The heartbeat task will reset the capacity to the system capacity after upgrade.
stop_local_services(communicate=False) stop_local_services(communicate=False)
@@ -392,17 +392,17 @@ def cluster_node_heartbeat():
if other_inst.capacity != 0 and not settings.AWX_AUTO_DEPROVISION_INSTANCES: if other_inst.capacity != 0 and not settings.AWX_AUTO_DEPROVISION_INSTANCES:
other_inst.capacity = 0 other_inst.capacity = 0
other_inst.save(update_fields=['capacity']) other_inst.save(update_fields=['capacity'])
logger.error(six.text_type("Host {} last checked in at {}, marked as lost.").format( logger.error("Host {} last checked in at {}, marked as lost.".format(
other_inst.hostname, other_inst.modified)) other_inst.hostname, other_inst.modified))
elif settings.AWX_AUTO_DEPROVISION_INSTANCES: elif settings.AWX_AUTO_DEPROVISION_INSTANCES:
deprovision_hostname = other_inst.hostname deprovision_hostname = other_inst.hostname
other_inst.delete() other_inst.delete()
logger.info(six.text_type("Host {} Automatically Deprovisioned.").format(deprovision_hostname)) logger.info("Host {} Automatically Deprovisioned.".format(deprovision_hostname))
except DatabaseError as e: except DatabaseError as e:
if 'did not affect any rows' in str(e): if 'did not affect any rows' in str(e):
logger.debug(six.text_type('Another instance has marked {} as lost').format(other_inst.hostname)) logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
else: else:
logger.exception(six.text_type('Error marking {} as lost').format(other_inst.hostname)) logger.exception('Error marking {} as lost'.format(other_inst.hostname))
@task(queue=get_local_queuename) @task(queue=get_local_queuename)
@@ -429,7 +429,7 @@ def awx_isolated_heartbeat():
isolated_instance.save(update_fields=['last_isolated_check']) isolated_instance.save(update_fields=['last_isolated_check'])
# Slow pass looping over isolated IGs and their isolated instances # Slow pass looping over isolated IGs and their isolated instances
if len(isolated_instance_qs) > 0: if len(isolated_instance_qs) > 0:
logger.debug(six.text_type("Managing isolated instances {}.").format(','.join([inst.hostname for inst in isolated_instance_qs]))) logger.debug("Managing isolated instances {}.".format(','.join([inst.hostname for inst in isolated_instance_qs])))
isolated_manager.IsolatedManager.health_check(isolated_instance_qs, awx_application_version) isolated_manager.IsolatedManager.health_check(isolated_instance_qs, awx_application_version)
@@ -462,7 +462,7 @@ def awx_periodic_scheduler():
try: try:
job_kwargs = schedule.get_job_kwargs() job_kwargs = schedule.get_job_kwargs()
new_unified_job = schedule.unified_job_template.create_unified_job(**job_kwargs) new_unified_job = schedule.unified_job_template.create_unified_job(**job_kwargs)
logger.info(six.text_type('Spawned {} from schedule {}-{}.').format( logger.info('Spawned {} from schedule {}-{}.'.format(
new_unified_job.log_format, schedule.name, schedule.pk)) new_unified_job.log_format, schedule.name, schedule.pk))
if invalid_license: if invalid_license:
@@ -575,7 +575,7 @@ def update_host_smart_inventory_memberships():
changed_inventories.add(smart_inventory) changed_inventories.add(smart_inventory)
SmartInventoryMembership.objects.bulk_create(memberships) SmartInventoryMembership.objects.bulk_create(memberships)
except IntegrityError as e: except IntegrityError as e:
logger.error(six.text_type("Update Host Smart Inventory Memberships failed due to an exception: {}").format(e)) logger.error("Update Host Smart Inventory Memberships failed due to an exception: {}".format(e))
return return
# Update computed fields for changed inventories outside atomic action # Update computed fields for changed inventories outside atomic action
for smart_inventory in changed_inventories: for smart_inventory in changed_inventories:
@@ -602,7 +602,7 @@ def delete_inventory(inventory_id, user_id, retries=5):
'inventories-status_changed', 'inventories-status_changed',
{'group_name': 'inventories', 'inventory_id': inventory_id, 'status': 'deleted'} {'group_name': 'inventories', 'inventory_id': inventory_id, 'status': 'deleted'}
) )
logger.debug(six.text_type('Deleted inventory {} as user {}.').format(inventory_id, user_id)) logger.debug('Deleted inventory {} as user {}.'.format(inventory_id, user_id))
except Inventory.DoesNotExist: except Inventory.DoesNotExist:
logger.exception("Delete Inventory failed due to missing inventory: " + str(inventory_id)) logger.exception("Delete Inventory failed due to missing inventory: " + str(inventory_id))
return return
@@ -626,7 +626,7 @@ def with_path_cleanup(f):
elif os.path.exists(p): elif os.path.exists(p):
os.remove(p) os.remove(p)
except OSError: except OSError:
logger.exception(six.text_type("Failed to remove tmp file: {}").format(p)) logger.exception("Failed to remove tmp file: {}".format(p))
self.cleanup_paths = [] self.cleanup_paths = []
return _wrapped return _wrapped
@@ -1064,13 +1064,13 @@ class BaseTask(object):
try: try:
self.post_run_hook(instance, status, **kwargs) self.post_run_hook(instance, status, **kwargs)
except Exception: except Exception:
logger.exception(six.text_type('{} Post run hook errored.').format(instance.log_format)) logger.exception('{} Post run hook errored.'.format(instance.log_format))
instance = self.update_model(pk) instance = self.update_model(pk)
if instance.cancel_flag: if instance.cancel_flag:
status = 'canceled' status = 'canceled'
cancel_wait = (now() - instance.modified).seconds if instance.modified else 0 cancel_wait = (now() - instance.modified).seconds if instance.modified else 0
if cancel_wait > 5: if cancel_wait > 5:
logger.warn(six.text_type('Request to cancel {} took {} seconds to complete.').format(instance.log_format, cancel_wait)) logger.warn('Request to cancel {} took {} seconds to complete.'.format(instance.log_format, cancel_wait))
instance = self.update_model(pk, status=status, result_traceback=tb, instance = self.update_model(pk, status=status, result_traceback=tb,
output_replacements=output_replacements, output_replacements=output_replacements,
@@ -1079,7 +1079,7 @@ class BaseTask(object):
try: try:
self.final_run_hook(instance, status, **kwargs) self.final_run_hook(instance, status, **kwargs)
except Exception: except Exception:
logger.exception(six.text_type('{} Final run hook errored.').format(instance.log_format)) logger.exception('{} Final run hook errored.'.format(instance.log_format))
instance.websocket_emit_status(status) instance.websocket_emit_status(status)
if status != 'successful': if status != 'successful':
if status == 'canceled': if status == 'canceled':
@@ -1258,7 +1258,7 @@ class RunJob(BaseTask):
env['ANSIBLE_NET_SSH_KEYFILE'] = ssh_keyfile env['ANSIBLE_NET_SSH_KEYFILE'] = ssh_keyfile
authorize = network_cred.get_input('authorize', default=False) authorize = network_cred.get_input('authorize', default=False)
env['ANSIBLE_NET_AUTHORIZE'] = six.text_type(int(authorize)) env['ANSIBLE_NET_AUTHORIZE'] = str(int(authorize))
if authorize: if authorize:
env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='') env['ANSIBLE_NET_AUTH_PASS'] = network_cred.get_input('authorize_password', default='')
@@ -1684,15 +1684,15 @@ class RunProjectUpdate(BaseTask):
if not inv_src.update_on_project_update: if not inv_src.update_on_project_update:
continue continue
if inv_src.scm_last_revision == scm_revision: if inv_src.scm_last_revision == scm_revision:
logger.debug(six.text_type('Skipping SCM inventory update for `{}` because ' logger.debug('Skipping SCM inventory update for `{}` because '
'project has not changed.').format(inv_src.name)) 'project has not changed.'.format(inv_src.name))
continue continue
logger.debug(six.text_type('Local dependent inventory update for `{}`.').format(inv_src.name)) logger.debug('Local dependent inventory update for `{}`.'.format(inv_src.name))
with transaction.atomic(): with transaction.atomic():
if InventoryUpdate.objects.filter(inventory_source=inv_src, if InventoryUpdate.objects.filter(inventory_source=inv_src,
status__in=ACTIVE_STATES).exists(): status__in=ACTIVE_STATES).exists():
logger.info(six.text_type('Skipping SCM inventory update for `{}` because ' logger.info('Skipping SCM inventory update for `{}` because '
'another update is already active.').format(inv_src.name)) 'another update is already active.'.format(inv_src.name))
continue continue
local_inv_update = inv_src.create_inventory_update( local_inv_update = inv_src.create_inventory_update(
_eager_fields=dict( _eager_fields=dict(
@@ -1705,8 +1705,9 @@ class RunProjectUpdate(BaseTask):
try: try:
inv_update_class().run(local_inv_update.id) inv_update_class().run(local_inv_update.id)
except Exception: except Exception:
logger.exception(six.text_type('{} Unhandled exception updating dependent SCM inventory sources.') logger.exception('{} Unhandled exception updating dependent SCM inventory sources.'.format(
.format(project_update.log_format)) project_update.log_format
))
try: try:
project_update.refresh_from_db() project_update.refresh_from_db()
@@ -1719,10 +1720,10 @@ class RunProjectUpdate(BaseTask):
logger.warning('%s Dependent inventory update deleted during execution.', project_update.log_format) logger.warning('%s Dependent inventory update deleted during execution.', project_update.log_format)
continue continue
if project_update.cancel_flag: if project_update.cancel_flag:
logger.info(six.text_type('Project update {} was canceled while updating dependent inventories.').format(project_update.log_format)) logger.info('Project update {} was canceled while updating dependent inventories.'.format(project_update.log_format))
break break
if local_inv_update.cancel_flag: if local_inv_update.cancel_flag:
logger.info(six.text_type('Continuing to process project dependencies after {} was canceled').format(local_inv_update.log_format)) logger.info('Continuing to process project dependencies after {} was canceled'.format(local_inv_update.log_format))
if local_inv_update.status == 'successful': if local_inv_update.status == 'successful':
inv_src.scm_last_revision = scm_revision inv_src.scm_last_revision = scm_revision
inv_src.save(update_fields=['scm_last_revision']) inv_src.save(update_fields=['scm_last_revision'])
@@ -1731,7 +1732,7 @@ class RunProjectUpdate(BaseTask):
try: try:
fcntl.flock(self.lock_fd, fcntl.LOCK_UN) fcntl.flock(self.lock_fd, fcntl.LOCK_UN)
except IOError as e: except IOError as e:
logger.error(six.text_type("I/O error({0}) while trying to open lock file [{1}]: {2}").format(e.errno, instance.get_lock_file(), e.strerror)) logger.error("I/O error({0}) while trying to open lock file [{1}]: {2}".format(e.errno, instance.get_lock_file(), e.strerror))
os.close(self.lock_fd) os.close(self.lock_fd)
raise raise
@@ -1749,7 +1750,7 @@ class RunProjectUpdate(BaseTask):
try: try:
self.lock_fd = os.open(lock_path, os.O_RDONLY | os.O_CREAT) self.lock_fd = os.open(lock_path, os.O_RDONLY | os.O_CREAT)
except OSError as e: except OSError as e:
logger.error(six.text_type("I/O error({0}) while trying to open lock file [{1}]: {2}").format(e.errno, lock_path, e.strerror)) logger.error("I/O error({0}) while trying to open lock file [{1}]: {2}".format(e.errno, lock_path, e.strerror))
raise raise
start_time = time.time() start_time = time.time()
@@ -1757,23 +1758,23 @@ class RunProjectUpdate(BaseTask):
try: try:
instance.refresh_from_db(fields=['cancel_flag']) instance.refresh_from_db(fields=['cancel_flag'])
if instance.cancel_flag: if instance.cancel_flag:
logger.info(six.text_type("ProjectUpdate({0}) was cancelled".format(instance.pk))) logger.info("ProjectUpdate({0}) was cancelled".format(instance.pk))
return return
fcntl.flock(self.lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) fcntl.flock(self.lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
break break
except IOError as e: except IOError as e:
if e.errno not in (errno.EAGAIN, errno.EACCES): if e.errno not in (errno.EAGAIN, errno.EACCES):
os.close(self.lock_fd) os.close(self.lock_fd)
logger.error(six.text_type("I/O error({0}) while trying to aquire lock on file [{1}]: {2}").format(e.errno, lock_path, e.strerror)) logger.error("I/O error({0}) while trying to aquire lock on file [{1}]: {2}".format(e.errno, lock_path, e.strerror))
raise raise
else: else:
time.sleep(1.0) time.sleep(1.0)
waiting_time = time.time() - start_time waiting_time = time.time() - start_time
if waiting_time > 1.0: if waiting_time > 1.0:
logger.info(six.text_type( logger.info(
'{} spent {} waiting to acquire lock for local source tree ' '{} spent {} waiting to acquire lock for local source tree '
'for path {}.').format(instance.log_format, waiting_time, lock_path)) 'for path {}.'.format(instance.log_format, waiting_time, lock_path))
def pre_run_hook(self, instance, **kwargs): def pre_run_hook(self, instance, **kwargs):
# re-create root project folder if a natural disaster has destroyed it # re-create root project folder if a natural disaster has destroyed it
@@ -1790,7 +1791,7 @@ class RunProjectUpdate(BaseTask):
if lines: if lines:
p.scm_revision = lines[0].strip() p.scm_revision = lines[0].strip()
else: else:
logger.info(six.text_type("{} Could not find scm revision in check").format(instance.log_format)) logger.info("{} Could not find scm revision in check".format(instance.log_format))
p.playbook_files = p.playbooks p.playbook_files = p.playbooks
p.inventory_files = p.inventories p.inventory_files = p.inventories
p.save() p.save()
@@ -1912,7 +1913,7 @@ class RunInventoryUpdate(BaseTask):
ec2_opts['cache_path'] = cache_path ec2_opts['cache_path'] = cache_path
ec2_opts.setdefault('cache_max_age', '300') ec2_opts.setdefault('cache_max_age', '300')
for k, v in ec2_opts.items(): for k, v in ec2_opts.items():
cp.set(section, k, six.text_type(v)) cp.set(section, k, str(v))
# Allow custom options to vmware inventory script. # Allow custom options to vmware inventory script.
elif inventory_update.source == 'vmware': elif inventory_update.source == 'vmware':
@@ -1931,7 +1932,7 @@ class RunInventoryUpdate(BaseTask):
vmware_opts.setdefault('groupby_patterns', inventory_update.group_by) vmware_opts.setdefault('groupby_patterns', inventory_update.group_by)
for k, v in vmware_opts.items(): for k, v in vmware_opts.items():
cp.set(section, k, six.text_type(v)) cp.set(section, k, str(v))
elif inventory_update.source == 'satellite6': elif inventory_update.source == 'satellite6':
section = 'foreman' section = 'foreman'
@@ -1950,7 +1951,7 @@ class RunInventoryUpdate(BaseTask):
elif k == 'satellite6_want_hostcollections' and isinstance(v, bool): elif k == 'satellite6_want_hostcollections' and isinstance(v, bool):
want_hostcollections = v want_hostcollections = v
else: else:
cp.set(section, k, six.text_type(v)) cp.set(section, k, str(v))
if credential: if credential:
cp.set(section, 'url', credential.get_input('host', default='')) cp.set(section, 'url', credential.get_input('host', default=''))
@@ -2009,7 +2010,7 @@ class RunInventoryUpdate(BaseTask):
azure_rm_opts = dict(inventory_update.source_vars_dict.items()) azure_rm_opts = dict(inventory_update.source_vars_dict.items())
for k, v in azure_rm_opts.items(): for k, v in azure_rm_opts.items():
cp.set(section, k, six.text_type(v)) cp.set(section, k, str(v))
# Return INI content. # Return INI content.
if cp.sections(): if cp.sections():
@@ -2094,7 +2095,7 @@ class RunInventoryUpdate(BaseTask):
elif inventory_update.source in ['scm', 'custom']: elif inventory_update.source in ['scm', 'custom']:
for env_k in inventory_update.source_vars_dict: for env_k in inventory_update.source_vars_dict:
if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLACKLIST: if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLACKLIST:
env[str(env_k)] = six.text_type(inventory_update.source_vars_dict[env_k]) env[str(env_k)] = str(inventory_update.source_vars_dict[env_k])
elif inventory_update.source == 'tower': elif inventory_update.source == 'tower':
env['TOWER_INVENTORY'] = inventory_update.instance_filters env['TOWER_INVENTORY'] = inventory_update.instance_filters
env['TOWER_LICENSE_TYPE'] = get_licenser().validate()['license_type'] env['TOWER_LICENSE_TYPE'] = get_licenser().validate()['license_type']
@@ -2410,7 +2411,7 @@ class RunSystemJob(BaseTask):
'--management-jobs', '--ad-hoc-commands', '--workflow-jobs', '--management-jobs', '--ad-hoc-commands', '--workflow-jobs',
'--notifications']) '--notifications'])
except Exception: except Exception:
logger.exception(six.text_type("{} Failed to parse system job").format(system_job.log_format)) logger.exception("{} Failed to parse system job".format(system_job.log_format))
return args return args
def build_env(self, instance, **kwargs): def build_env(self, instance, **kwargs):
@@ -2436,7 +2437,7 @@ def _reconstruct_relationships(copy_mapping):
setattr(new_obj, field_name, related_obj) setattr(new_obj, field_name, related_obj)
elif field.many_to_many: elif field.many_to_many:
for related_obj in getattr(old_obj, field_name).all(): for related_obj in getattr(old_obj, field_name).all():
logger.debug(six.text_type('Deep copy: Adding {} to {}({}).{} relationship').format( logger.debug('Deep copy: Adding {} to {}({}).{} relationship'.format(
related_obj, new_obj, model, field_name related_obj, new_obj, model, field_name
)) ))
getattr(new_obj, field_name).add(copy_mapping.get(related_obj, related_obj)) getattr(new_obj, field_name).add(copy_mapping.get(related_obj, related_obj))
@@ -2448,7 +2449,7 @@ def deep_copy_model_obj(
model_module, model_name, obj_pk, new_obj_pk, model_module, model_name, obj_pk, new_obj_pk,
user_pk, sub_obj_list, permission_check_func=None user_pk, sub_obj_list, permission_check_func=None
): ):
logger.info(six.text_type('Deep copy {} from {} to {}.').format(model_name, obj_pk, new_obj_pk)) logger.info('Deep copy {} from {} to {}.'.format(model_name, obj_pk, new_obj_pk))
from awx.api.generics import CopyAPIView from awx.api.generics import CopyAPIView
from awx.main.signals import disable_activity_stream from awx.main.signals import disable_activity_stream
model = getattr(importlib.import_module(model_module), model_name, None) model = getattr(importlib.import_module(model_module), model_name, None)

View File

@@ -1,6 +1,5 @@
from django.contrib.auth.models import User from django.contrib.auth.models import User
import six
from awx.main.models import ( from awx.main.models import (
Organization, Organization,
@@ -150,7 +149,7 @@ def create_survey_spec(variables=None, default_type='integer', required=True, mi
vars_list = variables vars_list = variables
else: else:
vars_list = [variables] vars_list = [variables]
if isinstance(variables[0], six.string_types): if isinstance(variables[0], str):
slogan = variables[0] slogan = variables[0]
else: else:
slogan = variables[0].get('question_name', 'something') slogan = variables[0].get('question_name', 'something')

View File

@@ -2,7 +2,6 @@ from unittest import mock
import pytest import pytest
import json import json
import six
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.main.utils import timestamp_apiformat from awx.main.utils import timestamp_apiformat
@@ -107,7 +106,7 @@ def test_content(hosts, fact_scans, get, user, fact_ansible_json, monkeypatch_js
assert fact_known.host_id == response.data['host'] assert fact_known.host_id == response.data['host']
# TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug # TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug
assert fact_ansible_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], six.text_type) else response.data['facts']) assert fact_ansible_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], str) else response.data['facts'])
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp'] assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
assert fact_known.module == response.data['module'] assert fact_known.module == response.data['module']
@@ -119,7 +118,7 @@ def _test_search_by_module(hosts, fact_scans, get, user, fact_json, module_name)
(fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params) (fact_known, response) = setup_common(hosts, fact_scans, get, user, module_name=module_name, get_params=params)
# TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug # TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug
assert fact_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], six.text_type) else response.data['facts']) assert fact_json == (json.loads(response.data['facts']) if isinstance(response.data['facts'], str) else response.data['facts'])
assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp'] assert timestamp_apiformat(fact_known.timestamp) == response.data['timestamp']
assert module_name == response.data['module'] assert module_name == response.data['module']

View File

@@ -17,7 +17,6 @@ from awx.api.versioning import reverse
from awx.conf.models import Setting from awx.conf.models import Setting
from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException from awx.main.utils.handlers import AWXProxyHandler, LoggingConnectivityException
import six
TEST_GIF_LOGO = 'data:image/gif;base64,R0lGODlhIQAjAPIAAP//////AP8AAMzMAJmZADNmAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQJCgAHACwAAAAAIQAjAAADo3i63P4wykmrvTjrzZsxXfR94WMQBFh6RECuixHMLyzPQ13ewZCvow9OpzEAjIBj79cJJmU+FceIVEZ3QRozxBttmyOBwPBtisdX4Bha3oxmS+llFIPHQXQKkiSEXz9PeklHBzx3hYNyEHt4fmmAhHp8Nz45KgV5FgWFOFEGmwWbGqEfniChohmoQZ+oqRiZDZhEgk81I4mwg4EKVbxzrDHBEAkAIfkECQoABwAsAAAAACEAIwAAA6V4utz+MMpJq724GpP15p1kEAQYQmOwnWjgrmxjuMEAx8rsDjZ+fJvdLWQAFAHGWo8FRM54JqIRmYTigDrDMqZTbbbMj0CgjTLHZKvPQH6CTx+a2vKR0XbbOsoZ7SphG057gjl+c0dGgzeGNiaBiSgbBQUHBV08NpOVlkMSk0FKjZuURHiiOJxQnSGfQJuoEKREejK0dFRGjoiQt7iOuLx0rgxYEQkAIfkECQoABwAsAAAAACEAIwAAA7h4utxnxslJDSGR6nrz/owxYB64QUEwlGaVqlB7vrAJscsd3Lhy+wBArGEICo3DUFH4QDqK0GMy51xOgcGlEAfJ+iAFie62chR+jYKaSAuQGOqwJp7jGQRDuol+F/jxZWsyCmoQfwYwgoM5Oyg1i2w0A2WQIW2TPYOIkleQmy+UlYygoaIPnJmapKmqKiusMmSdpjxypnALtrcHioq3ury7hGm3dnVosVpMWFmwREZbddDOSsjVswcJACH5BAkKAAcALAAAAAAhACMAAAOxeLrc/jDKSZUxNS9DCNYV54HURQwfGRlDEFwqdLVuGjOsW9/Odb0wnsUAKBKNwsMFQGwyNUHckVl8bqI4o43lA26PNkv1S9DtNuOeVirw+aTI3qWAQwnud1vhLSnQLS0GeFF+GoVKNF0fh4Z+LDQ6Bn5/MTNmL0mAl2E3j2aclTmRmYCQoKEDiaRDKFhJez6UmbKyQowHtzy1uEl8DLCnEktrQ2PBD1NxSlXKIW5hz6cJACH5BAkKAAcALAAAAAAhACMAAAOkeLrc/jDKSau9OOvNlTFd9H3hYxAEWDJfkK5LGwTq+g0zDR/GgM+10A04Cm56OANgqTRmkDTmSOiLMgFOTM9AnFJHuexzYBAIijZf2SweJ8ttbbXLmd5+wBiJosSCoGF/fXEeS1g8gHl9hxODKkh4gkwVIwUekESIhA4FlgV3PyCWG52WI2oGnR2lnUWpqhqVEF4Xi7QjhpsshpOFvLosrnpoEAkAIfkECQoABwAsAAAAACEAIwAAA6l4utz+MMpJq71YGpPr3t1kEAQXQltQnk8aBCa7bMMLy4wx1G8s072PL6SrGQDI4zBThCU/v50zCVhidIYgNPqxWZkDg0AgxB2K4vEXbBSvr1JtZ3uOext0x7FqovF6OXtfe1UzdjAxhINPM013ChtJER8FBQeVRX8GlpggFZWWfjwblTiigGZnfqRmpUKbljKxDrNMeY2eF4R8jUiSur6/Z8GFV2WBtwwJACH5BAkKAAcALAAAAAAhACMAAAO6eLrcZi3KyQwhkGpq8f6ONWQgaAxB8JTfg6YkO50pzD5xhaurhCsGAKCnEw6NucNDCAkyI8ugdAhFKpnJJdMaeiofBejowUseCr9GYa0j1GyMdVgjBxoEuPSZXWKf7gKBeHtzMms0gHgGfDIVLztmjScvNZEyk28qjT40b5aXlHCbDgOhnzedoqOOlKeopaqrCy56sgtotbYKhYW6e7e9tsHBssO6eSTIm1peV0iuFUZDyU7NJnmcuQsJACH5BAkKAAcALAAAAAAhACMAAAOteLrc/jDKSZsxNS9DCNYV54Hh4H0kdAXBgKaOwbYX/Miza1vrVe8KA2AoJL5gwiQgeZz4GMXlcHl8xozQ3kW3KTajL9zsBJ1+sV2fQfALem+XAlRApxu4ioI1UpC76zJ4fRqDBzI+LFyFhH1iiS59fkgziW07jjRAG5QDeECOLk2Tj6KjnZafW6hAej6Smgevr6yysza2tiCuMasUF2Yov2gZUUQbU8YaaqjLpQkAOw==' # NOQA TEST_GIF_LOGO = 'data:image/gif;base64,R0lGODlhIQAjAPIAAP//////AP8AAMzMAJmZADNmAAAAAAAAACH/C05FVFNDQVBFMi4wAwEAAAAh+QQJCgAHACwAAAAAIQAjAAADo3i63P4wykmrvTjrzZsxXfR94WMQBFh6RECuixHMLyzPQ13ewZCvow9OpzEAjIBj79cJJmU+FceIVEZ3QRozxBttmyOBwPBtisdX4Bha3oxmS+llFIPHQXQKkiSEXz9PeklHBzx3hYNyEHt4fmmAhHp8Nz45KgV5FgWFOFEGmwWbGqEfniChohmoQZ+oqRiZDZhEgk81I4mwg4EKVbxzrDHBEAkAIfkECQoABwAsAAAAACEAIwAAA6V4utz+MMpJq724GpP15p1kEAQYQmOwnWjgrmxjuMEAx8rsDjZ+fJvdLWQAFAHGWo8FRM54JqIRmYTigDrDMqZTbbbMj0CgjTLHZKvPQH6CTx+a2vKR0XbbOsoZ7SphG057gjl+c0dGgzeGNiaBiSgbBQUHBV08NpOVlkMSk0FKjZuURHiiOJxQnSGfQJuoEKREejK0dFRGjoiQt7iOuLx0rgxYEQkAIfkECQoABwAsAAAAACEAIwAAA7h4utxnxslJDSGR6nrz/owxYB64QUEwlGaVqlB7vrAJscsd3Lhy+wBArGEICo3DUFH4QDqK0GMy51xOgcGlEAfJ+iAFie62chR+jYKaSAuQGOqwJp7jGQRDuol+F/jxZWsyCmoQfwYwgoM5Oyg1i2w0A2WQIW2TPYOIkleQmy+UlYygoaIPnJmapKmqKiusMmSdpjxypnALtrcHioq3ury7hGm3dnVosVpMWFmwREZbddDOSsjVswcJACH5BAkKAAcALAAAAAAhACMAAAOxeLrc/jDKSZUxNS9DCNYV54HURQwfGRlDEFwqdLVuGjOsW9/Odb0wnsUAKBKNwsMFQGwyNUHckVl8bqI4o43lA26PNkv1S9DtNuOeVirw+aTI3qWAQwnud1vhLSnQLS0GeFF+GoVKNF0fh4Z+LDQ6Bn5/MTNmL0mAl2E3j2aclTmRmYCQoKEDiaRDKFhJez6UmbKyQowHtzy1uEl8DLCnEktrQ2PBD1NxSlXKIW5hz6cJACH5BAkKAAcALAAAAAAhACMAAAOkeLrc/jDKSau9OOvNlTFd9H3hYxAEWDJfkK5LGwTq+g0zDR/GgM+10A04Cm56OANgqTRmkDTmSOiLMgFOTM9AnFJHuexzYBAIijZf2SweJ8ttbbXLmd5+wBiJosSCoGF/fXEeS1g8gHl9hxODKkh4gkwVIwUekESIhA4FlgV3PyCWG52WI2oGnR2lnUWpqhqVEF4Xi7QjhpsshpOFvLosrnpoEAkAIfkECQoABwAsAAAAACEAIwAAA6l4utz+MMpJq71YGpPr3t1kEAQXQltQnk8aBCa7bMMLy4wx1G8s072PL6SrGQDI4zBThCU/v50zCVhidIYgNPqxWZkDg0AgxB2K4vEXbBSvr1JtZ3uOext0x7FqovF6OXtfe1UzdjAxhINPM013ChtJER8FBQeVRX8GlpggFZWWfjwblTiigGZnfqRmpUKbljKxDrNMeY2eF4R8jUiSur6/Z8GFV2WBtwwJACH5BAkKAAcALAAAAAAhACMAAAO6eLrcZi3KyQwhkGpq8f6ONWQgaAxB8JTfg6YkO50pzD5xhaurhCsGAKCnEw6NucNDCAkyI8ugdAhFKpnJJdMaeiofBejowUseCr9GYa0j1GyMdVgjBxoEuPSZXWKf7gKBeHtzMms0gHgGfDIVLztmjScvNZEyk28qjT40b5aXlHCbDgOhnzedoqOOlKeopaqrCy56sgtotbYKhYW6e7e9tsHBssO6eSTIm1peV0iuFUZDyU7NJnmcuQsJACH5BAkKAAcALAAAAAAhACMAAAOteLrc/jDKSZsxNS9DCNYV54Hh4H0kdAXBgKaOwbYX/Miza1vrVe8KA2AoJL5gwiQgeZz4GMXlcHl8xozQ3kW3KTajL9zsBJ1+sV2fQfALem+XAlRApxu4ioI1UpC76zJ4fRqDBzI+LFyFhH1iiS59fkgziW07jjRAG5QDeECOLk2Tj6KjnZafW6hAej6Smgevr6yysza2tiCuMasUF2Yov2gZUUQbU8YaaqjLpQkAOw==' # NOQA
TEST_PNG_LOGO = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACEAAAAjCAYAAAAaLGNkAAAAAXNSR0IB2cksfwAAAdVpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpDb21wcmVzc2lvbj4xPC90aWZmOkNvbXByZXNzaW9uPgogICAgICAgICA8dGlmZjpQaG90b21ldHJpY0ludGVycHJldGF0aW9uPjI8L3RpZmY6UGhvdG9tZXRyaWNJbnRlcnByZXRhdGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+Cjl0tmoAAAHVSURBVFgJ7VZRsoMgDNTOu5E9U+/Ud6Z6JssGNg2oNKD90xkHCNnNkgTbYbieKwNXBn6bgSXQ4+16xi5UDiqDN3Pecr6+1fM5DHh7n1NEIPjjoRLKzOjG3qQ5dRtEy2LCjh/Gz2wDZE2nZYKkrxdn/kY9XQQkGCGqqDY5IgJFkEKgBCzDNGXhTKEye7boFRH6IPJj5EshiNCSjV4R4eSx7zhmR2tcdIuwmWiMeao7e0JHViZEWUI5aP8a9O+rx74D6sGEiJftiX3YeueIiFXg2KrhpqzjVC3dPZFYJZ7NOwwtNwM8R0UkLfH0sT5qck+OlkMq0BucKr0iWG7gpAQksD9esM1z3Lnf6SHjLh67nnKEGxC/iomWhByTeXOQJGHHcKxwHhHKnt1HIdYtmexkIb/HOURWTSJqn2gKMDG0bDUc/D0iAseovxUBoylmQCug6IVhSv+4DIeKI94jAr4AjiSEgQ25JYB+YWT9BZ94AM8erwgFkRifaArA6U0G5KT0m//z26REZuK9okgrT6VwE1jTHjbVzyNAyRwTEPOtuiex9FVBNZCkruaA4PZqFp1u8Rpww9/6rcK5y0EkAxRiZJt79PWOVYWGRE9pbJhavMengMflGyumk0akMsQnAAAAAElFTkSuQmCC' # NOQA TEST_PNG_LOGO = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACEAAAAjCAYAAAAaLGNkAAAAAXNSR0IB2cksfwAAAdVpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6dGlmZj0iaHR0cDovL25zLmFkb2JlLmNvbS90aWZmLzEuMC8iPgogICAgICAgICA8dGlmZjpDb21wcmVzc2lvbj4xPC90aWZmOkNvbXByZXNzaW9uPgogICAgICAgICA8dGlmZjpQaG90b21ldHJpY0ludGVycHJldGF0aW9uPjI8L3RpZmY6UGhvdG9tZXRyaWNJbnRlcnByZXRhdGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+Cjl0tmoAAAHVSURBVFgJ7VZRsoMgDNTOu5E9U+/Ud6Z6JssGNg2oNKD90xkHCNnNkgTbYbieKwNXBn6bgSXQ4+16xi5UDiqDN3Pecr6+1fM5DHh7n1NEIPjjoRLKzOjG3qQ5dRtEy2LCjh/Gz2wDZE2nZYKkrxdn/kY9XQQkGCGqqDY5IgJFkEKgBCzDNGXhTKEye7boFRH6IPJj5EshiNCSjV4R4eSx7zhmR2tcdIuwmWiMeao7e0JHViZEWUI5aP8a9O+rx74D6sGEiJftiX3YeueIiFXg2KrhpqzjVC3dPZFYJZ7NOwwtNwM8R0UkLfH0sT5qck+OlkMq0BucKr0iWG7gpAQksD9esM1z3Lnf6SHjLh67nnKEGxC/iomWhByTeXOQJGHHcKxwHhHKnt1HIdYtmexkIb/HOURWTSJqn2gKMDG0bDUc/D0iAseovxUBoylmQCug6IVhSv+4DIeKI94jAr4AjiSEgQ25JYB+YWT9BZ94AM8erwgFkRifaArA6U0G5KT0m//z26REZuK9okgrT6VwE1jTHjbVzyNAyRwTEPOtuiex9FVBNZCkruaA4PZqFp1u8Rpww9/6rcK5y0EkAxRiZJt79PWOVYWGRE9pbJhavMengMflGyumk0akMsQnAAAAAElFTkSuQmCC' # NOQA
@@ -78,7 +77,7 @@ def test_awx_task_env_validity(get, patch, admin, value, expected):
if expected == 200: if expected == 200:
resp = get(url, user=admin) resp = get(url, user=admin)
assert resp.data['AWX_TASK_ENV'] == dict((k, six.text_type(v)) for k, v in value.items()) assert resp.data['AWX_TASK_ENV'] == dict((k, str(v)) for k, v in value.items())
@pytest.mark.django_db @pytest.mark.django_db

View File

@@ -3,15 +3,14 @@ import pytest
from unittest import mock from unittest import mock
import json import json
import os import os
import six
import tempfile import tempfile
import shutil import shutil
import urllib.parse
from datetime import timedelta from datetime import timedelta
from unittest.mock import PropertyMock from unittest.mock import PropertyMock
# Django # Django
from django.core.urlresolvers import resolve from django.core.urlresolvers import resolve
from django.utils.six.moves.urllib.parse import urlparse
from django.utils import timezone from django.utils import timezone
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
@@ -523,7 +522,7 @@ def _request(verb):
if 'format' not in kwargs and 'content_type' not in kwargs: if 'format' not in kwargs and 'content_type' not in kwargs:
kwargs['format'] = 'json' kwargs['format'] = 'json'
view, view_args, view_kwargs = resolve(urlparse(url)[2]) view, view_args, view_kwargs = resolve(urllib.parse.urlparse(url)[2])
request = getattr(APIRequestFactory(), verb)(url, **kwargs) request = getattr(APIRequestFactory(), verb)(url, **kwargs)
if isinstance(kwargs.get('cookies', None), dict): if isinstance(kwargs.get('cookies', None), dict):
for key, value in kwargs['cookies'].items(): for key, value in kwargs['cookies'].items():
@@ -730,7 +729,7 @@ def get_db_prep_save(self, value, connection, **kwargs):
return None return None
# default values come in as strings; only non-strings should be # default values come in as strings; only non-strings should be
# run through `dumps` # run through `dumps`
if not isinstance(value, six.string_types): if not isinstance(value, str):
value = dumps(value) value = dumps(value)
return value return value

View File

@@ -2,7 +2,6 @@
import pytest import pytest
from unittest import mock from unittest import mock
import six
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
@@ -249,7 +248,7 @@ def test_inventory_update_name(inventory, inventory_source):
@pytest.mark.django_db @pytest.mark.django_db
def test_inventory_name_with_unicode(inventory, inventory_source): def test_inventory_name_with_unicode(inventory, inventory_source):
inventory.name = six.u('オオオ') inventory.name = 'オオオ'
inventory.save() inventory.save()
iu = inventory_source.update() iu = inventory_source.update()
assert iu.name.startswith(inventory.name) assert iu.name.startswith(inventory.name)

View File

@@ -1,5 +1,4 @@
import pytest import pytest
import six
from awx.main.models import JobTemplate, Job, JobHostSummary, WorkflowJob from awx.main.models import JobTemplate, Job, JobHostSummary, WorkflowJob
@@ -71,12 +70,12 @@ def test_job_host_summary_representation(host):
host=host, job=job, host=host, job=job,
changed=1, dark=2, failures=3, ok=4, processed=5, skipped=6 changed=1, dark=2, failures=3, ok=4, processed=5, skipped=6
) )
assert 'single-host changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == six.text_type(jhs) assert 'single-host changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == str(jhs)
# Representation should be robust to deleted related items # Representation should be robust to deleted related items
jhs = JobHostSummary.objects.get(pk=jhs.id) jhs = JobHostSummary.objects.get(pk=jhs.id)
host.delete() host.delete()
assert 'N/A changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == six.text_type(jhs) assert 'N/A changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == str(jhs)
@pytest.mark.django_db @pytest.mark.django_db

View File

@@ -1,6 +1,5 @@
import itertools import itertools
import pytest import pytest
import six
# CRUM # CRUM
from crum import impersonate from crum import impersonate
@@ -74,7 +73,7 @@ class TestCreateUnifiedJob:
new_creds = [] new_creds = []
for cred in jt_linked.credentials.all(): for cred in jt_linked.credentials.all():
new_creds.append(Credential.objects.create( new_creds.append(Credential.objects.create(
name=six.text_type(cred.name) + six.text_type('_new'), name=str(cred.name) + '_new',
credential_type=cred.credential_type, credential_type=cred.credential_type,
inputs=cred.inputs inputs=cred.inputs
)) ))

View File

@@ -19,7 +19,6 @@ from django.utils.encoding import smart_str, smart_bytes
from awx.main.expect import run, isolated_manager from awx.main.expect import run, isolated_manager
from django.conf import settings from django.conf import settings
import six
HERE, FILENAME = os.path.split(__file__) HERE, FILENAME = os.path.split(__file__)
@@ -107,7 +106,7 @@ def test_cancel_callback_error():
@pytest.mark.timeout(3) # https://github.com/ansible/tower/issues/2391#issuecomment-401946895 @pytest.mark.timeout(3) # https://github.com/ansible/tower/issues/2391#issuecomment-401946895
@pytest.mark.parametrize('value', ['abc123', six.u('Iñtërnâtiônàlizætiøn')]) @pytest.mark.parametrize('value', ['abc123', 'Iñtërnâtiônàlizætiøn'])
def test_env_vars(value): def test_env_vars(value):
stdout = StringIO() stdout = StringIO()
status, rc = run.run_pexpect( status, rc = run.run_pexpect(

View File

@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import pytest import pytest
import six
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from rest_framework.serializers import ValidationError as DRFValidationError from rest_framework.serializers import ValidationError as DRFValidationError
@@ -152,8 +151,7 @@ def test_cred_type_injectors_schema(injectors, valid):
) )
field = CredentialType._meta.get_field('injectors') field = CredentialType._meta.get_field('injectors')
if valid is False: if valid is False:
with pytest.raises(ValidationError, message=six.text_type( with pytest.raises(ValidationError, message="Injector was supposed to throw a validation error, data: {}".format(injectors)):
"Injector was supposed to throw a validation error, data: {}").format(injectors)):
field.clean(injectors, type_) field.clean(injectors, type_)
else: else:
field.clean(injectors, type_) field.clean(injectors, type_)

View File

@@ -14,7 +14,6 @@ from backports.tempfile import TemporaryDirectory
import fcntl import fcntl
from unittest import mock from unittest import mock
import pytest import pytest
import six
import yaml import yaml
from django.conf import settings from django.conf import settings
@@ -1562,7 +1561,7 @@ class TestJobCredentials(TestJobExecution):
self.task.run(self.pk) self.task.run(self.pk)
def test_custom_environment_injectors_with_unicode_content(self): def test_custom_environment_injectors_with_unicode_content(self):
value = six.u('Iñtërnâtiônàlizætiøn') value = 'Iñtërnâtiônàlizætiøn'
some_cloud = CredentialType( some_cloud = CredentialType(
kind='cloud', kind='cloud',
name='SomeCloud', name='SomeCloud',

View File

@@ -9,7 +9,6 @@ from awx.main.utils.filters import SmartFilter, ExternalLoggerEnabled
# Django # Django
from django.db.models import Q from django.db.models import Q
import six
@pytest.mark.parametrize('params, logger_name, expected', [ @pytest.mark.parametrize('params, logger_name, expected', [
@@ -111,7 +110,7 @@ class TestSmartFilterQueryFromString():
]) ])
def test_query_generated(self, mock_get_host_model, filter_string, q_expected): def test_query_generated(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string) q = SmartFilter.query_from_string(filter_string)
assert six.text_type(q) == six.text_type(q_expected) assert str(q) == str(q_expected)
@pytest.mark.parametrize("filter_string", [ @pytest.mark.parametrize("filter_string", [
'ansible_facts__facts__facts__blank=' 'ansible_facts__facts__facts__blank='
@@ -138,7 +137,7 @@ class TestSmartFilterQueryFromString():
]) ])
def test_unicode(self, mock_get_host_model, filter_string, q_expected): def test_unicode(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string) q = SmartFilter.query_from_string(filter_string)
assert six.text_type(q) == six.text_type(q_expected) assert str(q) == str(q_expected)
@pytest.mark.parametrize("filter_string,q_expected", [ @pytest.mark.parametrize("filter_string,q_expected", [
('(a=b)', Q(**{u"a": u"b"})), ('(a=b)', Q(**{u"a": u"b"})),
@@ -154,7 +153,7 @@ class TestSmartFilterQueryFromString():
]) ])
def test_boolean_parenthesis(self, mock_get_host_model, filter_string, q_expected): def test_boolean_parenthesis(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string) q = SmartFilter.query_from_string(filter_string)
assert six.text_type(q) == six.text_type(q_expected) assert str(q) == str(q_expected)
@pytest.mark.parametrize("filter_string,q_expected", [ @pytest.mark.parametrize("filter_string,q_expected", [
('ansible_facts__a__b__c[]=3', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [3]}}}})), ('ansible_facts__a__b__c[]=3', Q(**{u"ansible_facts__contains": {u"a": {u"b": {u"c": [3]}}}})),
@@ -177,7 +176,7 @@ class TestSmartFilterQueryFromString():
]) ])
def test_contains_query_generated(self, mock_get_host_model, filter_string, q_expected): def test_contains_query_generated(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string) q = SmartFilter.query_from_string(filter_string)
assert six.text_type(q) == six.text_type(q_expected) assert str(q) == str(q_expected)
@pytest.mark.parametrize("filter_string,q_expected", [ @pytest.mark.parametrize("filter_string,q_expected", [
#('a__b__c[]="true"', Q(**{u"a__b__c__contains": u"\"true\""})), #('a__b__c[]="true"', Q(**{u"a__b__c__contains": u"\"true\""})),
@@ -187,7 +186,7 @@ class TestSmartFilterQueryFromString():
]) ])
def test_contains_query_generated_unicode(self, mock_get_host_model, filter_string, q_expected): def test_contains_query_generated_unicode(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string) q = SmartFilter.query_from_string(filter_string)
assert six.text_type(q) == six.text_type(q_expected) assert str(q) == str(q_expected)
@pytest.mark.parametrize("filter_string,q_expected", [ @pytest.mark.parametrize("filter_string,q_expected", [
('ansible_facts__a=null', Q(**{u"ansible_facts__contains": {u"a": None}})), ('ansible_facts__a=null', Q(**{u"ansible_facts__contains": {u"a": None}})),
@@ -195,7 +194,7 @@ class TestSmartFilterQueryFromString():
]) ])
def test_contains_query_generated_null(self, mock_get_host_model, filter_string, q_expected): def test_contains_query_generated_null(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string) q = SmartFilter.query_from_string(filter_string)
assert six.text_type(q) == six.text_type(q_expected) assert str(q) == str(q_expected)
@pytest.mark.parametrize("filter_string,q_expected", [ @pytest.mark.parametrize("filter_string,q_expected", [
@@ -213,7 +212,7 @@ class TestSmartFilterQueryFromString():
]) ])
def test_search_related_fields(self, mock_get_host_model, filter_string, q_expected): def test_search_related_fields(self, mock_get_host_model, filter_string, q_expected):
q = SmartFilter.query_from_string(filter_string) q = SmartFilter.query_from_string(filter_string)
assert six.text_type(q) == six.text_type(q_expected) assert str(q) == str(q_expected)
''' '''

View File

@@ -1,4 +1,3 @@
import six
from awx.main.models import Job, JobEvent from awx.main.models import Job, JobEvent
@@ -15,7 +14,7 @@ def test_log_from_job_event_object():
# Check entire body of data for any exceptions from getattr on event object # Check entire body of data for any exceptions from getattr on event object
for fd in data_for_log: for fd in data_for_log:
if not isinstance(data_for_log[fd], six.string_types): if not isinstance(data_for_log[fd], str):
continue continue
assert 'Exception' not in data_for_log[fd], 'Exception delivered in data: {}'.format(data_for_log[fd]) assert 'Exception' not in data_for_log[fd], 'Exception delivered in data: {}'.format(data_for_log[fd])

View File

@@ -14,7 +14,6 @@ import urllib.parse
import threading import threading
import contextlib import contextlib
import tempfile import tempfile
import six
import psutil import psutil
from functools import reduce, wraps from functools import reduce, wraps
from io import StringIO from io import StringIO
@@ -82,7 +81,7 @@ def get_object_or_403(klass, *args, **kwargs):
def to_python_boolean(value, allow_none=False): def to_python_boolean(value, allow_none=False):
value = six.text_type(value) value = str(value)
if value.lower() in ('true', '1', 't'): if value.lower() in ('true', '1', 't'):
return True return True
elif value.lower() in ('false', '0', 'f'): elif value.lower() in ('false', '0', 'f'):
@@ -90,7 +89,7 @@ def to_python_boolean(value, allow_none=False):
elif allow_none and value.lower() in ('none', 'null'): elif allow_none and value.lower() in ('none', 'null'):
return None return None
else: else:
raise ValueError(_(u'Unable to convert "%s" to boolean') % six.text_type(value)) raise ValueError(_(u'Unable to convert "%s" to boolean') % value)
def region_sorting(region): def region_sorting(region):
@@ -339,7 +338,7 @@ def update_scm_url(scm_type, url, username=True, password=True,
netloc = u'' netloc = u''
netloc = u'@'.join(filter(None, [netloc, parts.hostname])) netloc = u'@'.join(filter(None, [netloc, parts.hostname]))
if parts.port: if parts.port:
netloc = u':'.join([netloc, six.text_type(parts.port)]) netloc = u':'.join([netloc, str(parts.port)])
new_url = urllib.parse.urlunsplit([parts.scheme, netloc, parts.path, new_url = urllib.parse.urlunsplit([parts.scheme, netloc, parts.path,
parts.query, parts.fragment]) parts.query, parts.fragment])
if scp_format and parts.scheme == 'git+ssh': if scp_format and parts.scheme == 'git+ssh':
@@ -376,7 +375,7 @@ def _convert_model_field_for_display(obj, field_name, password_fields=None):
if password_fields is None: if password_fields is None:
password_fields = set(getattr(type(obj), 'PASSWORD_FIELDS', [])) | set(['password']) password_fields = set(getattr(type(obj), 'PASSWORD_FIELDS', [])) | set(['password'])
if field_name in password_fields or ( if field_name in password_fields or (
isinstance(field_val, six.string_types) and isinstance(field_val, str) and
field_val.startswith('$encrypted$') field_val.startswith('$encrypted$')
): ):
return u'hidden' return u'hidden'
@@ -623,7 +622,7 @@ def parse_yaml_or_json(vars_str, silent_failure=True):
''' '''
if isinstance(vars_str, dict): if isinstance(vars_str, dict):
return vars_str return vars_str
elif isinstance(vars_str, six.string_types) and vars_str == '""': elif isinstance(vars_str, str) and vars_str == '""':
return {} return {}
try: try:

View File

@@ -3,7 +3,6 @@ import hashlib
import logging import logging
from collections import namedtuple from collections import namedtuple
import six
from cryptography.fernet import Fernet, InvalidToken from cryptography.fernet import Fernet, InvalidToken
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from django.utils.encoding import smart_str, smart_bytes from django.utils.encoding import smart_str, smart_bytes
@@ -144,6 +143,6 @@ def encrypt_dict(data, fields):
def is_encrypted(value): def is_encrypted(value):
if not isinstance(value, six.string_types): if not isinstance(value, str):
return False return False
return value.startswith('$encrypted$') and len(value) > len('$encrypted$') return value.startswith('$encrypted$') and len(value) > len('$encrypted$')

View File

@@ -10,7 +10,6 @@ from pyparsing import (
) )
from logging import Filter, _nameToLevel from logging import Filter, _nameToLevel
import six
from django.apps import apps from django.apps import apps
from django.db import models from django.db import models
@@ -154,12 +153,12 @@ class SmartFilter(object):
self.result = Host.objects.filter(**kwargs) self.result = Host.objects.filter(**kwargs)
def strip_quotes_traditional_logic(self, v): def strip_quotes_traditional_logic(self, v):
if type(v) is six.text_type and v.startswith('"') and v.endswith('"'): if type(v) is str and v.startswith('"') and v.endswith('"'):
return v[1:-1] return v[1:-1]
return v return v
def strip_quotes_json_logic(self, v): def strip_quotes_json_logic(self, v):
if type(v) is six.text_type and v.startswith('"') and v.endswith('"') and v != u'"null"': if type(v) is str and v.startswith('"') and v.endswith('"') and v != u'"null"':
return v[1:-1] return v[1:-1]
return v return v
@@ -238,7 +237,7 @@ class SmartFilter(object):
# value # value
# ="something" # ="something"
if t_len > (v_offset + 2) and t[v_offset] == "\"" and t[v_offset + 2] == "\"": if t_len > (v_offset + 2) and t[v_offset] == "\"" and t[v_offset + 2] == "\"":
v = u'"' + six.text_type(t[v_offset + 1]) + u'"' v = u'"' + str(t[v_offset + 1]) + u'"'
#v = t[v_offset + 1] #v = t[v_offset + 1]
# empty "" # empty ""
elif t_len > (v_offset + 1): elif t_len > (v_offset + 1):
@@ -307,9 +306,9 @@ class SmartFilter(object):
* handle key with __ in it * handle key with __ in it
''' '''
filter_string_raw = filter_string filter_string_raw = filter_string
filter_string = six.text_type(filter_string) filter_string = str(filter_string)
unicode_spaces = list(set(six.text_type(c) for c in filter_string if c.isspace())) unicode_spaces = list(set(str(c) for c in filter_string if c.isspace()))
unicode_spaces_other = unicode_spaces + [u'(', u')', u'=', u'"'] unicode_spaces_other = unicode_spaces + [u'(', u')', u'=', u'"']
atom = CharsNotIn(unicode_spaces_other) atom = CharsNotIn(unicode_spaces_other)
atom_inside_quotes = CharsNotIn(u'"') atom_inside_quotes = CharsNotIn(u'"')

View File

@@ -7,7 +7,6 @@ import json
import time import time
import logging import logging
import six
from django.conf import settings from django.conf import settings
@@ -40,7 +39,7 @@ class LogstashFormatter(LogstashFormatterVersion1):
data = copy(raw_data['ansible_facts']) data = copy(raw_data['ansible_facts'])
else: else:
data = copy(raw_data) data = copy(raw_data)
if isinstance(data, six.string_types): if isinstance(data, str):
data = json.loads(data) data = json.loads(data)
data_for_log = {} data_for_log = {}

View File

@@ -8,7 +8,6 @@ import requests
import time import time
import socket import socket
import select import select
import six
from urllib import parse as urlparse from urllib import parse as urlparse
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from requests.exceptions import RequestException from requests.exceptions import RequestException
@@ -211,7 +210,7 @@ def _encode_payload_for_socket(payload):
encoded_payload = payload encoded_payload = payload
if isinstance(encoded_payload, dict): if isinstance(encoded_payload, dict):
encoded_payload = json.dumps(encoded_payload, ensure_ascii=False) encoded_payload = json.dumps(encoded_payload, ensure_ascii=False)
if isinstance(encoded_payload, six.text_type): if isinstance(encoded_payload, str):
encoded_payload = encoded_payload.encode('utf-8') encoded_payload = encoded_payload.encode('utf-8')
return encoded_payload return encoded_payload
@@ -237,7 +236,7 @@ class TCPHandler(BaseHandler):
except Exception as e: except Exception as e:
ret = SocketResult(False, "Error sending message from %s: %s" % ret = SocketResult(False, "Error sending message from %s: %s" %
(TCPHandler.__name__, (TCPHandler.__name__,
' '.join(six.text_type(arg) for arg in e.args))) ' '.join(str(arg) for arg in e.args)))
logger.exception(ret.reason) logger.exception(ret.reason)
finally: finally:
sok.close() sok.close()

View File

@@ -1,5 +1,5 @@
# Python # Python
import six import urllib.parse
from collections import deque from collections import deque
# Django # Django
from django.db import models from django.db import models
@@ -12,7 +12,7 @@ NAMED_URL_RES_INNER_DILIMITER = "+"
NAMED_URL_RES_DILIMITER_ENCODE = "%2B" NAMED_URL_RES_DILIMITER_ENCODE = "%2B"
URL_PATH_RESERVED_CHARSET = {} URL_PATH_RESERVED_CHARSET = {}
for c in ';/?:@=&[]': for c in ';/?:@=&[]':
URL_PATH_RESERVED_CHARSET[c] = six.moves.urllib.parse.quote(c, safe='') URL_PATH_RESERVED_CHARSET[c] = urllib.parse.quote(c, safe='')
FK_NAME = 0 FK_NAME = 0
NEXT_NODE = 1 NEXT_NODE = 1
@@ -126,7 +126,7 @@ class GraphNode(object):
for attr_name, attr_value in zip(stack[-1].fields, named_url_parts): for attr_name, attr_value in zip(stack[-1].fields, named_url_parts):
attr_name = ("__%s" % attr_name) if evolving_prefix else attr_name attr_name = ("__%s" % attr_name) if evolving_prefix else attr_name
if isinstance(attr_value, str): if isinstance(attr_value, str):
attr_value = six.moves.urllib.parse.unquote(attr_value) attr_value = urllib.parse.unquote(attr_value)
kwargs[evolving_prefix + attr_name] = attr_value kwargs[evolving_prefix + attr_name] = attr_value
idx += 1 idx += 1
if stack[-1].counter >= len(stack[-1].adj_list): if stack[-1].counter >= len(stack[-1].adj_list):

View File

@@ -1,5 +1,4 @@
import re import re
import six
import yaml import yaml
@@ -9,7 +8,7 @@ __all__ = ['safe_dump', 'SafeLoader']
class SafeStringDumper(yaml.SafeDumper): class SafeStringDumper(yaml.SafeDumper):
def represent_data(self, value): def represent_data(self, value):
if isinstance(value, six.string_types): if isinstance(value, str):
return self.represent_scalar('!unsafe', value) return self.represent_scalar('!unsafe', value)
return super(SafeStringDumper, self).represent_data(value) return super(SafeStringDumper, self).represent_data(value)
@@ -17,7 +16,7 @@ class SafeStringDumper(yaml.SafeDumper):
class SafeLoader(yaml.Loader): class SafeLoader(yaml.Loader):
def construct_yaml_unsafe(self, node): def construct_yaml_unsafe(self, node):
class UnsafeText(six.text_type): class UnsafeText(str):
__UNSAFE__ = True __UNSAFE__ = True
node = UnsafeText(self.construct_scalar(node)) node = UnsafeText(self.construct_scalar(node))
return node return node
@@ -75,7 +74,7 @@ def sanitize_jinja(arg):
""" """
For some string, prevent usage of Jinja-like flags For some string, prevent usage of Jinja-like flags
""" """
if isinstance(arg, six.string_types): if isinstance(arg, str):
# If the argument looks like it contains Jinja expressions # If the argument looks like it contains Jinja expressions
# {{ x }} ... # {{ x }} ...
if re.search(r'\{\{[^}]+}}', arg) is not None: if re.search(r'\{\{[^}]+}}', arg) is not None:

View File

@@ -4,7 +4,6 @@
import os import os
import re # noqa import re # noqa
import sys import sys
import six
from datetime import timedelta from datetime import timedelta
# global settings # global settings
@@ -499,11 +498,11 @@ CELERYBEAT_SCHEDULE = {
AWX_INCONSISTENT_TASK_INTERVAL = 60 * 3 AWX_INCONSISTENT_TASK_INTERVAL = 60 * 3
AWX_CELERY_QUEUES_STATIC = [ AWX_CELERY_QUEUES_STATIC = [
six.text_type(CELERY_DEFAULT_QUEUE), CELERY_DEFAULT_QUEUE,
] ]
AWX_CELERY_BCAST_QUEUES_STATIC = [ AWX_CELERY_BCAST_QUEUES_STATIC = [
six.text_type('tower_broadcast_all'), 'tower_broadcast_all',
] ]
ASGI_AMQP = { ASGI_AMQP = {

View File

@@ -6,7 +6,6 @@ import logging
import uuid import uuid
import ldap import ldap
import six
# Django # Django
from django.dispatch import receiver from django.dispatch import receiver
@@ -258,7 +257,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
def get_user_permanent_id(self, attributes): def get_user_permanent_id(self, attributes):
uid = attributes[self.conf.get('attr_user_permanent_id', OID_USERID)] uid = attributes[self.conf.get('attr_user_permanent_id', OID_USERID)]
if isinstance(uid, six.string_types): if isinstance(uid, str):
return uid return uid
return uid[0] return uid[0]
@@ -277,7 +276,7 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
logger.warn("Could not map user detail '%s' from SAML attribute '%s'; " logger.warn("Could not map user detail '%s' from SAML attribute '%s'; "
"update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.", "update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.",
conf_key[5:], key, self.name, conf_key) conf_key[5:], key, self.name, conf_key)
return six.text_type(value) if value is not None else value return str(value) if value is not None else value
class SAMLAuth(BaseSAMLAuth): class SAMLAuth(BaseSAMLAuth):
@@ -330,10 +329,10 @@ def _update_m2m_from_groups(user, ldap_user, rel, opts, remove=True):
elif opts is True: elif opts is True:
should_add = True should_add = True
else: else:
if isinstance(opts, six.string_types): if isinstance(opts, str):
opts = [opts] opts = [opts]
for group_dn in opts: for group_dn in opts:
if not isinstance(group_dn, six.string_types): if not isinstance(group_dn, str):
continue continue
if ldap_user._get_groups().is_member_of(group_dn): if ldap_user._get_groups().is_member_of(group_dn):
should_add = True should_add = True
@@ -366,9 +365,9 @@ def on_populate_user(sender, **kwargs):
field_len = len(getattr(user, field)) field_len = len(getattr(user, field))
if field_len > max_len: if field_len > max_len:
setattr(user, field, getattr(user, field)[:max_len]) setattr(user, field, getattr(user, field)[:max_len])
logger.warn(six.text_type( logger.warn(
'LDAP user {} has {} > max {} characters' 'LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)
).format(user.username, field, max_len)) )
# Update organization membership based on group memberships. # Update organization membership based on group memberships.
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {}) org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})

View File

@@ -5,7 +5,6 @@
import urllib.parse import urllib.parse
# Six # Six
import six
# Django # Django
from django.conf import settings from django.conf import settings
@@ -83,7 +82,7 @@ class SocialAuthMiddleware(SocialAuthExceptionMiddleware):
return redirect(url) return redirect(url)
def get_message(self, request, exception): def get_message(self, request, exception):
msg = six.text_type(exception) msg = str(exception)
if msg and msg[-1] not in '.?!': if msg and msg[-1] not in '.?!':
msg = msg + '.' msg = msg + '.'
return msg return msg

View File

@@ -5,7 +5,6 @@
import re import re
import logging import logging
import six
# Python Social Auth # Python Social Auth
from social_core.exceptions import AuthException from social_core.exceptions import AuthException
@@ -67,10 +66,10 @@ def _update_m2m_from_expression(user, rel, expr, remove=True):
elif expr is True: elif expr is True:
should_add = True should_add = True
else: else:
if isinstance(expr, (six.string_types, type(re.compile('')))): if isinstance(expr, (str, type(re.compile('')))):
expr = [expr] expr = [expr]
for ex in expr: for ex in expr:
if isinstance(ex, six.string_types): if isinstance(ex, str):
if user.username == ex or user.email == ex: if user.username == ex or user.email == ex:
should_add = True should_add = True
elif isinstance(ex, type(re.compile(''))): elif isinstance(ex, type(re.compile(''))):

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
import argparse import argparse
import six import urllib.parse
import requests import requests
@@ -10,7 +10,7 @@ NAMED_URL_RES_INNER_DILIMITER = "-"
NAMED_URL_RES_DILIMITER_ENCODE = "%2D" NAMED_URL_RES_DILIMITER_ENCODE = "%2D"
URL_PATH_RESERVED_CHARSET = {} URL_PATH_RESERVED_CHARSET = {}
for c in ';/?:@=&[]': for c in ';/?:@=&[]':
URL_PATH_RESERVED_CHARSET[c] = six.moves.urllib.parse.quote(c, safe='') URL_PATH_RESERVED_CHARSET[c] = urllib.parse.quote(c, safe='')
def _get_named_url_graph(url, auth): def _get_named_url_graph(url, auth):