mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 10:00:01 -03:30
Modernize Python 2 code to get ready for Python 3
This commit is contained in:
parent
e982f6ed06
commit
e18838a4b7
@ -33,7 +33,7 @@ class OrderedDictLoader(yaml.SafeLoader):
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
try:
|
||||
hash(key)
|
||||
except TypeError, exc:
|
||||
except TypeError as exc:
|
||||
raise yaml.constructor.ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unacceptable key (%s)" % exc, key_node.start_mark
|
||||
|
||||
@ -1173,7 +1173,7 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
if host_filter:
|
||||
try:
|
||||
SmartFilter().query_from_string(host_filter)
|
||||
except RuntimeError, e:
|
||||
except RuntimeError as e:
|
||||
raise models.base.ValidationError(e)
|
||||
return host_filter
|
||||
|
||||
|
||||
@ -2042,7 +2042,7 @@ class InventoryDetail(ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
try:
|
||||
obj.schedule_deletion(getattr(request.user, 'id', None))
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
except RuntimeError, e:
|
||||
except RuntimeError as e:
|
||||
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
|
||||
@ -275,7 +275,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
setting_ids[setting.key] = setting.id
|
||||
try:
|
||||
value = decrypt_field(setting, 'value')
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
#TODO: Remove in Tower 3.3
|
||||
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
|
||||
value = old_decrypt_field(setting, 'value')
|
||||
|
||||
@ -47,7 +47,7 @@ def open_fifo_write(path, data):
|
||||
This blocks the thread until an external process (such as ssh-agent)
|
||||
reads data from the pipe.
|
||||
'''
|
||||
os.mkfifo(path, 0600)
|
||||
os.mkfifo(path, 0o600)
|
||||
thread.start_new_thread(lambda p, d: open(p, 'w').write(d), (path, data))
|
||||
|
||||
|
||||
|
||||
@ -356,7 +356,7 @@ class SmartFilterField(models.TextField):
|
||||
value = urllib.unquote(value)
|
||||
try:
|
||||
SmartFilter().query_from_string(value)
|
||||
except RuntimeError, e:
|
||||
except RuntimeError as e:
|
||||
raise models.base.ValidationError(e)
|
||||
return super(SmartFilterField, self).get_prep_value(value)
|
||||
|
||||
|
||||
@ -17,7 +17,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
if getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', False):
|
||||
print settings.AWX_ISOLATED_PUBLIC_KEY
|
||||
print(settings.AWX_ISOLATED_PUBLIC_KEY)
|
||||
return
|
||||
|
||||
key = rsa.generate_private_key(
|
||||
@ -41,4 +41,4 @@ class Command(BaseCommand):
|
||||
) + " generated-by-awx@%s" % datetime.datetime.utcnow().isoformat()
|
||||
)
|
||||
pemfile.save()
|
||||
print pemfile.value
|
||||
print(pemfile.value)
|
||||
|
||||
@ -41,10 +41,9 @@ class Command(BaseCommand):
|
||||
run.open_fifo_write(ssh_key_path, settings.AWX_ISOLATED_PRIVATE_KEY)
|
||||
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
||||
try:
|
||||
print ' '.join(args)
|
||||
print(' '.join(args))
|
||||
subprocess.check_call(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
sys.exit(e.returncode)
|
||||
finally:
|
||||
shutil.rmtree(path)
|
||||
|
||||
|
||||
@ -46,6 +46,6 @@ class CallbackQueueDispatcher(object):
|
||||
delivery_mode="persistent" if settings.PERSISTENT_CALLBACK_MESSAGES else "transient",
|
||||
routing_key=self.connection_queue)
|
||||
return
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
self.logger.info('Publish Job Event Exception: %r, retry=%d', e,
|
||||
retry_count, exc_info=True)
|
||||
|
||||
@ -577,5 +577,5 @@ def delete_inventory_for_org(sender, instance, **kwargs):
|
||||
for inventory in inventories:
|
||||
try:
|
||||
inventory.schedule_deletion(user_id=getattr(user, 'id', None))
|
||||
except RuntimeError, e:
|
||||
except RuntimeError as e:
|
||||
logger.debug(e)
|
||||
|
||||
@ -17,19 +17,19 @@ class Migration(DataMigration):
|
||||
obj1 = eval(obj_type + ".objects.get(id=" + str(activity_stream_object.object1_id) + ")")
|
||||
if hasattr(activity_stream_object, activity_stream_object.object1):
|
||||
getattr(activity_stream_object, activity_stream_object.object1).add(obj1)
|
||||
except ObjectDoesNotExist, e:
|
||||
except ObjectDoesNotExist as e:
|
||||
print("Object 1 for AS id=%s does not exist. (Object Type: %s, id: %s" % (str(activity_stream_object.id),
|
||||
activity_stream_object.object1_type,
|
||||
str(activity_stream_object.object1_id)))
|
||||
continue
|
||||
if activity_stream_object.operation in ('associate', 'disassociate'):
|
||||
try:
|
||||
obj_type = "orm." + activity_stream_object.object2_type.split(".")[-1]
|
||||
obj_type = "orm." + activity_stream_object.object2_type.split(".")[-1]
|
||||
if obj_type == 'orm.User':
|
||||
obj_type = 'orm["auth.User"]'
|
||||
obj2 = eval(obj_type + ".objects.get(id=" + str(activity_stream_object.object2_id) + ")")
|
||||
getattr(activity_stream_object, activity_stream_object.object2).add(obj2)
|
||||
except ObjectDoesNotExist, e:
|
||||
except ObjectDoesNotExist as e:
|
||||
print("Object 2 for AS id=%s does not exist. (Object Type: %s, id: %s" % (str(activity_stream_object.id),
|
||||
activity_stream_object.object2_type,
|
||||
str(activity_stream_object.object2_id)))
|
||||
|
||||
@ -1135,7 +1135,7 @@ class RunJob(BaseTask):
|
||||
# job and visible inside the proot environment (when enabled).
|
||||
cp_dir = os.path.join(kwargs['private_data_dir'], 'cp')
|
||||
if not os.path.exists(cp_dir):
|
||||
os.mkdir(cp_dir, 0700)
|
||||
os.mkdir(cp_dir, 0o700)
|
||||
env['ANSIBLE_SSH_CONTROL_PATH'] = os.path.join(cp_dir, '%%h%%p%%r')
|
||||
|
||||
# Allow the inventory script to include host variables inline via ['_meta']['hostvars'].
|
||||
|
||||
@ -172,7 +172,7 @@ def test_utc_until_in_the_past(job_template):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.models.schedules.now', lambda: datetime(2030, 03, 05, tzinfo=pytz.utc))
|
||||
@mock.patch('awx.main.models.schedules.now', lambda: datetime(2030, 3, 5, tzinfo=pytz.utc))
|
||||
def test_dst_phantom_hour(job_template):
|
||||
# The DST period in the United States begins at 02:00 (2 am) local time, so
|
||||
# the hour from 2:00:00 to 2:59:59 does not exist in the night of the
|
||||
|
||||
@ -69,7 +69,7 @@ def parse_configuration():
|
||||
errors.append("Missing TOWER_INVENTORY in environment")
|
||||
if errors:
|
||||
raise RuntimeError("\n".join(errors))
|
||||
|
||||
|
||||
return dict(tower_host=host_name,
|
||||
tower_user=username,
|
||||
tower_pass=password,
|
||||
@ -103,9 +103,9 @@ def read_tower_inventory(tower_host, tower_user, tower_pass, inventory, license_
|
||||
return response.json()
|
||||
json_reason = response.json()
|
||||
reason = json_reason.get('detail', 'Retrieving Tower Inventory Failed')
|
||||
except requests.ConnectionError, e:
|
||||
except requests.ConnectionError as e:
|
||||
reason = "Connection to remote host failed: {}".format(e)
|
||||
except json.JSONDecodeError, e:
|
||||
except json.JSONDecodeError as e:
|
||||
reason = "Failed to parse json from host: {}".format(e)
|
||||
raise RuntimeError(reason)
|
||||
|
||||
|
||||
@ -89,7 +89,7 @@ options = vars(options)
|
||||
|
||||
|
||||
if options['preset']:
|
||||
print ' Using preset data numbers set ' + str(options['preset'])
|
||||
print(' Using preset data numbers set ' + str(options['preset']))
|
||||
# Read the numbers of resources from presets file, if provided
|
||||
presets_filename = os.path.abspath(os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), 'presets.tsv'))
|
||||
|
||||
16
tools/rdb.py
16
tools/rdb.py
@ -183,7 +183,7 @@ def listen():
|
||||
def _consume(queue):
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
sock.bind(('0.0.0.0', 6899))
|
||||
print 'listening for rdb notifications on :6899...'
|
||||
print('listening for rdb notifications on :6899...')
|
||||
while True:
|
||||
r, w, x = select.select([sock], [], [])
|
||||
for i in r:
|
||||
@ -201,13 +201,13 @@ def listen():
|
||||
if port == 'q':
|
||||
break
|
||||
port = int(port)
|
||||
print 'opening telnet session at localhost:%d...' % port
|
||||
print('opening telnet session at localhost:%d...' % port)
|
||||
telnet(port)
|
||||
print 'listening for rdb notifications on :6899...'
|
||||
print('listening for rdb notifications on :6899...')
|
||||
except Empty:
|
||||
pass
|
||||
except KeyboardInterrupt:
|
||||
print 'got Ctrl-C'
|
||||
print('got Ctrl-C')
|
||||
queue.put('q')
|
||||
|
||||
|
||||
@ -218,18 +218,18 @@ def telnet(port):
|
||||
try:
|
||||
s.connect(('0.0.0.0', port))
|
||||
except Exception:
|
||||
print 'unable to connect'
|
||||
print('unable to connect')
|
||||
return
|
||||
print 'connected to 0.0.0.0:%d' % port
|
||||
print('connected to 0.0.0.0:%d' % port)
|
||||
|
||||
while True:
|
||||
socket_list = [sys.stdin, s]
|
||||
r, w, e = select.select(socket_list , [], [])
|
||||
r, w, e = select.select(socket_list, [], [])
|
||||
for sock in r:
|
||||
if sock == s:
|
||||
data = sock.recv(4096)
|
||||
if not data:
|
||||
print 'connection closed'
|
||||
print('connection closed')
|
||||
return
|
||||
else:
|
||||
sys.stdout.write(data)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user