Merge pull request #4031 from jangsutsr/4023_fix_flake8_E305

Fix flake8 E305 errors.
This commit is contained in:
Aaron Tan 2016-11-15 17:24:19 -05:00 committed by GitHub
commit 929528b9cc
34 changed files with 47 additions and 1 deletions

View File

@ -469,6 +469,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
else:
return self.attach(request, *args, **kwargs)
'''
Models for which you want the last instance to be deleted from the database
when the last disassociate is called should inherit from this class. Further,

View File

@ -1736,6 +1736,7 @@ class GroupList(ListCreateAPIView):
serializer_class = GroupSerializer
capabilities_prefetch = ['inventory.admin', 'inventory.adhoc', 'inventory.update']
'''
Useful when you have a self-refering ManyToManyRelationship.
* Tower uses a shallow (2-deep only) url pattern. For example:
@ -4013,7 +4014,6 @@ class RoleChildrenList(SubListAPIView):
return Role.filter_visible_roles(self.request.user, role.children.all())
# Create view functions for all of the class-based views to simplify inclusion
# in URL patterns and reverse URL lookups, converting CamelCase names to
# lowercase_with_underscore (e.g. MyView.as_view() becomes my_view).

View File

@ -60,6 +60,7 @@ register(
def _get_read_only_ansible_cow_selection_default():
return getattr(settings, 'ANSIBLE_COW_SELECTION', 'No default cow!')
register(
'READONLY_ANSIBLE_COW_SELECTION',
field_class=fields.CharField,

View File

@ -118,4 +118,5 @@ class SettingsRegistry(object):
logger.warning('Unable to retrieve default value for setting "%s".', setting, exc_info=True)
return field_instance
settings_registry = SettingsRegistry()

View File

@ -67,6 +67,7 @@ def with_verbosity(f):
return f(*args, **kwargs)
return wrapper
Display.verbose = with_verbosity(Display.verbose)
@ -89,4 +90,5 @@ def display_with_context(f):
event_context.remove_local(uuid=None)
return wrapper
Display.display = display_with_context(Display.display)

View File

@ -134,4 +134,5 @@ class EventContext(object):
def dump_end(self, fileobj):
self.dump(fileobj, self.get_end_dict())
event_context = EventContext()

View File

@ -88,6 +88,7 @@ def _load_default_license_from_file():
logger.warning('Could not read license from "%s".', license_file, exc_info=True)
return {}
register(
'LICENSE',
field_class=fields.DictField,

View File

@ -124,6 +124,7 @@ class SocketController(object):
self.server = server
return server
socketController = SocketController(SocketSessionManager())
#

View File

@ -34,8 +34,11 @@ def _new_handle_m2m_field(self, obj, field):
except AttributeError:
return
return _original_handle_m2m_field(self, obj, field)
_PythonSerializer.handle_m2m_field = _new_handle_m2m_field
# Add custom methods to User model for permissions checks.
from django.contrib.auth.models import User # noqa
from awx.main.access import * # noqa
@ -58,6 +61,7 @@ def user_get_admin_of_organizations(user):
def user_get_auditor_of_organizations(user):
return Organization.objects.filter(auditor_role__members=user)
User.add_to_class('organizations', user_get_organizations)
User.add_to_class('admin_of_organizations', user_get_admin_of_organizations)
User.add_to_class('auditor_of_organizations', user_get_auditor_of_organizations)
@ -74,6 +78,7 @@ def user_is_system_auditor(user, tf):
else:
Role.singleton('system_auditor').members.remove(user)
User.add_to_class('is_system_auditor', user_is_system_auditor)
# Import signal handlers only after models have been defined.

View File

@ -191,6 +191,7 @@ class Profile(CreatedModifiedModel):
default='',
)
"""
Since expiration and session expiration is event driven a token could be
invalidated for both reasons. Further, we only support a single reason for a

View File

@ -39,4 +39,5 @@ class ActivityStreamRegistrar(object):
m2m_attr = getattr(model, m2mfield.name)
m2m_changed.disconnect(dispatch_uid=str(self.__class__) + str(m2m_attr.through) + "_associate")
activity_stream_registrar = ActivityStreamRegistrar()

View File

@ -200,6 +200,7 @@ def cleanup_detached_labels_on_deleted_parent(sender, instance, **kwargs):
if l.is_candidate_for_detach():
l.delete()
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
@ -307,6 +308,7 @@ class ActivityStreamEnabled(threading.local):
def __nonzero__(self):
return bool(self.enabled and getattr(settings, 'ACTIVITY_STREAM_ENABLED', True))
activity_stream_enabled = ActivityStreamEnabled()
@contextlib.contextmanager

View File

@ -89,6 +89,7 @@ class MockCommonlySlowTestMixin(object):
mock.patch.object(generics, 'get_view_description', return_value=None).start()
super(MockCommonlySlowTestMixin, self).__init__(*args, **kwargs)
ansible_version = get_ansible_version()
class BaseTestMixin(MockCommonlySlowTestMixin):
'''

View File

@ -40,6 +40,7 @@ def organization_resource_creator(organization, user):
return organization
return rf
COUNTS_PRIMES = {
'users': 11,
'admins': 5,

View File

@ -35,6 +35,7 @@ def test_cleanup_granularity(fact_scans, hosts):
deleted_count = cleanup_facts.cleanup(timestamp_future, granularity)
assert 60 == deleted_count
'''
Delete half of the scans
'''

View File

@ -57,6 +57,7 @@ def test_process_fact_message_services(fact_msg_services):
check_process_fact_message_module(fact_returned, fact_msg_services, 'services')
'''
We pickypack our fact sending onto the Ansible fact interface.
The interface is <hostname, facts>. Where facts is a json blob of all the facts.

View File

@ -51,6 +51,7 @@ def disable_signals():
mocked = mock.patch('django.dispatch.Signal.send', autospec=True)
mocked.start()
'''
FIXME: Not sure how "far" just setting the BROKER_URL will get us.
We may need to incluence CELERY's configuration like we do in the old unit tests (see base.py)

View File

@ -20,6 +20,7 @@ def test_newest_scan_exact(hosts, fact_scans):
assert fact_found == fact_known
'''
Show me the most recent state of the sytem at any point of time.
or, said differently
@ -48,6 +49,7 @@ def test_newest_scan_less_than(hosts, fact_scans):
assert fact_found == fact_known
'''
Tests query Fact that is in the middle of the fact scan timeline, but not an exact timestamp.
'''
@ -69,6 +71,7 @@ def test_query_middle_of_timeline(hosts, fact_scans):
assert fact_found == fact_known
'''
Query time less than any fact scan. Should return None
'''
@ -83,6 +86,7 @@ def test_query_result_empty(hosts, fact_scans):
assert fact_found is None
'''
Query by fact module other than 'ansible'
'''

View File

@ -36,5 +36,6 @@ def do_init():
jt_never= JobTemplate.objects.get(id=7)
do_init_workflow(jt_success, jt_fail, jt_never)
if __name__ == "__main__":
do_init()

View File

@ -41,5 +41,6 @@ def do_init():
jt_parallel.append(JobTemplate.objects.get(id=18))
do_init_workflow(jt_success, jt_fail, jt_never, jt_parallel)
if __name__ == "__main__":
do_init()

View File

@ -74,6 +74,7 @@ class AuthTokenLimitTest(BaseTest):
response = self.get(user_me_url, expect=401, auth=auth_token1)
self.assertEqual(AuthToken.reason_long('limit_reached'), response['detail'])
'''
Ensure ips from the X-Forwarded-For get honored and used in auth tokens
'''

View File

@ -105,6 +105,7 @@ def jt_ask(job_template_factory):
def project_unit():
return Project(name='example-proj')
example_prompts = dict(job_type='check', job_tags='quack', limit='duck', skip_tags='oink')
@pytest.fixture

View File

@ -185,6 +185,7 @@ def successful_inventory_update(epoch, inventory_update_factory):
inventory_update['status'] = 'successful'
return inventory_update
'''
Job
'''
@ -219,6 +220,7 @@ def running_job(job_factory):
job['status'] = 'running'
return job
'''
Inventory id -> [InventorySourceDict, ...]
'''

View File

@ -48,6 +48,7 @@ def dag_simple_edge_labels():
return dag
'''
class TestSimpleDAG(object):
def test_get_root_nodes(self, dag_root):

View File

@ -662,6 +662,7 @@ def getattrd(obj, name, default=NoDefaultProvided):
return default
raise
current_apps = apps
def set_current_apps(apps):
global current_apps

View File

@ -161,4 +161,5 @@ def main():
results = dict(ansible_facts=dict(files=files))
module.exit_json(**results)
main()

View File

@ -85,4 +85,5 @@ def main():
results = dict(skipped=True, msg="Unsupported Distribution")
module.exit_json(**results)
main()

View File

@ -190,4 +190,5 @@ def main():
results['msg'] = "WARNING: Could not find status for all services. Sometimes this is due to insufficient privileges."
module.exit_json(**results)
main()

View File

@ -37,6 +37,7 @@ def is_testing(argv=None):
def IS_TESTING(argv=None):
return is_testing(argv)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
SQL_DEBUG = DEBUG

View File

@ -18,6 +18,7 @@ def xmlsec_initialize(*args, **kwargs):
original_xmlsec_initialize(*args, **kwargs)
xmlsec_initialized = True
dm.xmlsec.binding.initialize = xmlsec_initialize

View File

@ -23,6 +23,7 @@ class SocialAuthCallbackURL(object):
path = reverse('social:complete', args=(self.provider,))
return urlparse.urljoin(settings.TOWER_URL_BASE, path)
SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT = _('''\
Mapping to organization admins/users from social auth accounts. This setting
controls which users are placed into which Tower organizations based on
@ -799,6 +800,7 @@ register(
def get_saml_metadata_url():
return urlparse.urljoin(settings.TOWER_URL_BASE, reverse('sso:saml_metadata'))
register(
'SOCIAL_AUTH_SAML_CALLBACK_URL',
field_class=fields.CharField,

View File

@ -33,6 +33,7 @@ class BaseRedirectView(RedirectView):
else:
return url
sso_error = BaseRedirectView.as_view()
sso_inactive = BaseRedirectView.as_view()
@ -67,6 +68,7 @@ class CompleteView(BaseRedirectView):
response.set_cookie('current_user', current_user)
return response
sso_complete = CompleteView.as_view()
@ -86,4 +88,5 @@ class MetadataView(View):
else:
return HttpResponse(content=str(errors), content_type='text/plain')
saml_metadata = MetadataView.as_view()

View File

@ -77,6 +77,7 @@ def proc_data_files(data_files):
#####################################################################
setup(
name='ansible-tower',
version=__version__.split("-")[0], # FIXME: Should keep full version here?

View File

@ -101,6 +101,7 @@ def spread(n, m):
ret[0] += n
return ret
ids = defaultdict(lambda: 0)