Merge pull request #601 from ryanpetrello/flake8-fixes

backport a few fixes from awx to address busted ci
This commit is contained in:
Ryan Petrello 2017-12-01 12:48:06 -05:00 committed by GitHub
commit 8d162f9044
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 34 additions and 35 deletions

View File

@ -3816,7 +3816,7 @@ class ActivityStreamSerializer(BaseSerializer):
def get_object_association(self, obj):
try:
return obj.object_relationship_type.split(".")[-1].split("_")[1]
except:
except Exception:
pass
return ""

View File

@ -362,7 +362,7 @@ class ApiV1ConfigView(APIView):
try:
settings.LICENSE = {}
return Response(status=status.HTTP_204_NO_CONTENT)
except:
except Exception:
# FIX: Log
return Response({"error": _("Failed to remove license (%s)") % has_error}, status=status.HTTP_400_BAD_REQUEST)
@ -3275,7 +3275,7 @@ class WorkflowJobTemplateNodeDetail(WorkflowsEnforcementMixin, RetrieveUpdateDes
try:
obj = self.get_object()
data.update(obj.char_prompts)
except:
except Exception:
pass
return super(WorkflowJobTemplateNodeDetail, self).update_raw_data(data)

View File

@ -84,7 +84,7 @@ class URLField(CharField):
else:
netloc = '{}@{}' % (url_parts.username, netloc)
value = urlparse.urlunsplit([url_parts.scheme, netloc, url_parts.path, url_parts.query, url_parts.fragment])
except:
except Exception:
raise # If something fails here, just fall through and let the validators check it.
super(URLField, self).run_validators(value)

View File

@ -162,14 +162,14 @@ class SettingsRegistry(object):
if category_slug == 'user' and for_user:
try:
field_instance.default = original_field_instance.to_representation(getattr(self.settings, setting))
except:
except Exception:
logger.warning('Unable to retrieve default value for user setting "%s".', setting, exc_info=True)
elif not field_instance.read_only or field_instance.default is empty or field_instance.defined_in_file:
try:
field_instance.default = original_field_instance.to_representation(self.settings._awx_conf_settings._get_default(setting))
except AttributeError:
pass
except:
except Exception:
logger.warning('Unable to retrieve default value for setting "%s".', setting, exc_info=True)
# `PENDO_TRACKING_STATE` is disabled for the open source awx license

View File

@ -366,7 +366,7 @@ class SettingsWrapper(UserSettingsHolder):
return internal_value
else:
return field.run_validation(value)
except:
except Exception:
logger.warning(
'The current value "%r" for setting "%s" is invalid.',
value, name, exc_info=True)

View File

@ -16,7 +16,7 @@ class argv_placeholder(object):
def __del__(self):
try:
argv_ready(sys.argv)
except:
except Exception:
pass

View File

@ -23,9 +23,9 @@ with mock.patch.dict(os.environ, {'ANSIBLE_STDOUT_CALLBACK': CALLBACK,
'ANSIBLE_CALLBACK_PLUGINS': PLUGINS}):
from ansible.cli.playbook import PlaybookCLI
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.inventory import Inventory
from ansible.inventory.manager import InventoryManager
from ansible.parsing.dataloader import DataLoader
from ansible.vars import VariableManager
from ansible.vars.manager import VariableManager
# Add awx/lib to sys.path so we can use the plugin
path = os.path.abspath(os.path.join(PLUGINS, '..', '..'))
@ -62,9 +62,8 @@ def executor(tmpdir_factory, request):
cli.parse()
options = cli.parser.parse_args(['-v'])[0]
loader = DataLoader()
variable_manager = VariableManager()
inventory = Inventory(loader=loader, variable_manager=variable_manager,
host_list=['localhost'])
variable_manager = VariableManager(loader=loader)
inventory = InventoryManager(loader=loader, sources='localhost,')
variable_manager.set_inventory(inventory)
return PlaybookExecutor(playbooks=playbook_files, inventory=inventory,

View File

@ -105,7 +105,7 @@ def _load_default_license_from_file():
license_data = json.load(open(license_file))
logger.debug('Read license data from "%s".', license_file)
return license_data
except:
except Exception:
logger.warning('Could not read license from "%s".', license_file, exc_info=True)
return {}

View File

@ -122,7 +122,7 @@ def run_pexpect(args, cwd, env, logfile,
if cancelled_callback:
try:
canceled = cancelled_callback()
except:
except Exception:
logger.exception('Could not check cancel callback - canceling immediately')
if isinstance(extra_update_fields, dict):
extra_update_fields['job_explanation'] = "System error during job execution, check system logs"

View File

@ -168,7 +168,7 @@ class AnsibleInventoryLoader(object):
data = json.loads(stdout)
if not isinstance(data, dict):
raise TypeError('Returned JSON must be a dictionary, got %s instead' % str(type(data)))
except:
except Exception:
logger.error('Failed to load JSON from: %s', stdout)
raise
return data

View File

@ -105,7 +105,7 @@ class Schedule(CommonModel):
if not isinstance(extra_data, dict):
try:
extra_data = json.loads(self.extra_data)
except:
except Exception:
raise ValidationError(_("Expected JSON"))
if extra_data and 'days' in extra_data:

View File

@ -820,7 +820,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
def result_stdout_size(self):
try:
return os.stat(self.result_stdout_file).st_size
except:
except Exception:
return len(self.result_stdout)
def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False):
@ -1082,7 +1082,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
update_fields.append('job_explanation')
instance.save(update_fields=update_fields)
self.websocket_emit_status("canceled")
except: # FIXME: Log this exception!
except Exception: # FIXME: Log this exception!
if settings.DEBUG:
raise

View File

@ -24,7 +24,7 @@ import yaml
import fcntl
try:
import psutil
except:
except Exception:
psutil = None
# Celery
@ -101,7 +101,7 @@ def celery_startup(conf=None, **kwargs):
from awx.main.signals import disable_activity_stream
with disable_activity_stream():
sch.save()
except:
except Exception:
logger.exception("Failed to rebuild schedule {}.".format(sch))
@ -110,7 +110,7 @@ def task_set_logger_pre_run(*args, **kwargs):
try:
cache.close()
configure_external_logger(settings, is_startup=False)
except:
except Exception:
# General exception because LogErrorsTask not used with celery signals
logger.exception('Encountered error on initial log configuration.')
@ -123,7 +123,7 @@ def inform_cluster_of_shutdown(*args, **kwargs):
this_inst.save(update_fields=['capacity', 'modified'])
logger.warning('Normal shutdown signal for instance {}, '
'removed self from capacity pool.'.format(this_inst.hostname))
except:
except Exception:
# General exception because LogErrorsTask not used with celery signals
logger.exception('Encountered problem with normal shutdown signal.')
@ -318,7 +318,7 @@ def _send_notification_templates(instance, status_str):
raise ValueError(_("status_str must be either succeeded or failed"))
try:
notification_templates = instance.get_notification_templates()
except:
except Exception:
logger.warn("No notification template defined for emitting notification")
notification_templates = None
if notification_templates:
@ -449,7 +449,7 @@ def delete_inventory(self, inventory_id, user_id):
else:
try:
user = User.objects.get(id=user_id)
except:
except Exception:
user = None
with ignore_inventory_computed_fields(), ignore_inventory_group_removal(), impersonate(user):
try:
@ -917,7 +917,7 @@ class BaseTask(LogErrorsTask):
**extra_update_fields)
try:
self.final_run_hook(instance, status, **kwargs)
except:
except Exception:
logger.exception('%s Final run hook errored.', instance.log_format)
instance.websocket_emit_status(status)
if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'):

View File

@ -437,7 +437,7 @@ def group_factory(inventory):
def g(name):
try:
return Group.objects.get(name=name, inventory=inventory)
except:
except Exception:
return Group.objects.create(inventory=inventory, name=name)
return g
@ -478,7 +478,7 @@ def inventory_source_factory(inventory_factory):
source = 'file'
try:
return inventory.inventory_sources.get(name=name)
except:
except Exception:
return inventory.inventory_sources.create(name=name, source=source)
return invsrc

View File

@ -159,7 +159,7 @@ def get_ansible_version():
stdout=subprocess.PIPE)
result = proc.communicate()[0]
return result.split('\n')[0].replace('ansible', '').strip()
except:
except Exception:
return 'unknown'
@ -173,7 +173,7 @@ def get_ssh_version():
stderr=subprocess.PIPE)
result = proc.communicate()[1]
return result.split(" ")[0].split("_")[1]
except:
except Exception:
return 'unknown'
@ -185,7 +185,7 @@ def get_awx_version():
try:
import pkg_resources
return pkg_resources.require('awx')[0].version
except:
except Exception:
return __version__

View File

@ -152,7 +152,7 @@ class BaseHandler(logging.Handler):
return self._format_and_send_record(record)
except (KeyboardInterrupt, SystemExit):
raise
except:
except Exception:
self.handleError(record)
def _get_host(self, scheme='', hostname_only=False):

View File

@ -41,7 +41,7 @@ from ansible import constants as C
try:
from ansible.cache.base import BaseCacheModule
except:
except Exception:
from ansible.plugins.cache.base import BaseCacheModule

View File

@ -158,7 +158,7 @@ class CustomPdb(Rdb):
try:
socket.gethostbyname('docker.for.mac.localhost')
host = 'docker.for.mac.localhost'
except:
except Exception:
host = os.popen('ip route').read().split(' ')[2]
sock, port = Rdb.get_avail_port(self, *args, **kwargs)
socket.socket(socket.AF_INET, socket.SOCK_DGRAM).sendto(
@ -217,7 +217,7 @@ def telnet(port):
try:
s.connect(('0.0.0.0', port))
except:
except Exception:
print 'unable to connect'
return
print 'connected to 0.0.0.0:%d' % port