diff --git a/awx/api/filters.py b/awx/api/filters.py index f4e65a8d82..55155224c4 100644 --- a/awx/api/filters.py +++ b/awx/api/filters.py @@ -58,7 +58,7 @@ class TypeFilterBackend(BaseFilterBackend): else: queryset = queryset.none() return queryset - except FieldError, e: + except FieldError as e: # Return a 400 for invalid field names. raise ParseError(*e.args) @@ -139,7 +139,7 @@ class FieldLookupBackend(BaseFilterBackend): elif new_lookup.endswith('__regex') or new_lookup.endswith('__iregex'): try: re.compile(value) - except re.error, e: + except re.error as e: raise ValueError(e.args[0]) else: value = self.value_to_python_for_field(field, value) @@ -221,9 +221,9 @@ class FieldLookupBackend(BaseFilterBackend): queryset = queryset.filter(q) queryset = queryset.filter(*args).distinct() return queryset - except (FieldError, FieldDoesNotExist, ValueError), e: + except (FieldError, FieldDoesNotExist, ValueError) as e: raise ParseError(e.args[0]) - except ValidationError, e: + except ValidationError as e: raise ParseError(e.messages) class OrderByBackend(BaseFilterBackend): @@ -261,6 +261,6 @@ class OrderByBackend(BaseFilterBackend): new_order_by.append(field) queryset = queryset.order_by(*new_order_by) return queryset - except FieldError, e: + except FieldError as e: # Return a 400 for invalid field names. raise ParseError(*e.args) diff --git a/awx/api/permissions.py b/awx/api/permissions.py index e02dab3e60..6e1320e2d8 100644 --- a/awx/api/permissions.py +++ b/awx/api/permissions.py @@ -127,7 +127,7 @@ class ModelAccessPermission(permissions.BasePermission): view.__class__.__name__, obj) try: response = self.check_permissions(request, view, obj) - except Exception, e: + except Exception as e: logger.debug('has_permission raised %r', e, exc_info=True) raise else: diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 8803d3301f..2d2a43c51a 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -219,8 +219,8 @@ class BaseSerializer(serializers.ModelSerializer): class Meta: fields = ('id', 'type', 'url', 'related', 'summary_fields', 'created', 'modified', 'name', 'description') - summary_fields = () # FIXME: List of field names from this serializer that should be used when included as part of another's summary_fields. - summarizable_fields = () # FIXME: List of field names on this serializer that should be included in summary_fields. + summary_fields = () + summarizable_fields = () # add the URL and related resources type = serializers.SerializerMethodField() @@ -669,11 +669,6 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer): else: return super(UnifiedJobStdoutSerializer, self).get_types() - # TODO: Needed? - #def to_representation(self, obj): - # ret = super(UnifiedJobStdoutSerializer, self).to_representation(obj) - # return ret.get('result_stdout', '') - class UserSerializer(BaseSerializer): @@ -1310,7 +1305,6 @@ class InventorySourceOptionsSerializer(BaseSerializer): def validate_source_vars(self, value): # source_env must be blank, a valid JSON or YAML dict, or ... - # FIXME: support key=value pairs. try: json.loads((value or '').strip() or '{}') return value @@ -1336,9 +1330,9 @@ class InventorySourceOptionsSerializer(BaseSerializer): try: if source_script.organization != self.instance.inventory.organization: errors['source_script'] = "The 'source_script' does not belong to the same organization as the inventory." - except Exception: - # TODO: Log + except Exception as exc: errors['source_script'] = "'source_script' doesn't exist." + logger.error(str(exc)) if errors: raise serializers.ValidationError(errors) @@ -1611,8 +1605,6 @@ class ResourceAccessListElementSerializer(UserSerializer): class CredentialSerializer(BaseSerializer): - # FIXME: may want to make some fields filtered based on user accessing - class Meta: model = Credential fields = ('*', 'kind', 'cloud', 'host', 'username', @@ -1887,7 +1879,6 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): def validate_extra_vars(self, value): # extra_vars must be blank, a valid JSON or YAML dict, or ... - # FIXME: support key=value pairs. try: json.loads((value or '').strip() or '{}') return value @@ -2575,7 +2566,6 @@ class ScheduleSerializer(BaseSerializer): try: rrule.rrulestr(rrule_value) except Exception: - # TODO: Log raise serializers.ValidationError("rrule parsing failed validation.") return value @@ -2610,7 +2600,6 @@ class ActivityStreamSerializer(BaseSerializer): try: return json.loads(obj.changes) except Exception: - # TODO: Log logger.warn("Error deserializing activity stream json changes") return {} diff --git a/awx/api/views.py b/awx/api/views.py index 60357cf347..eb9ba6f642 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -277,7 +277,7 @@ class ApiV1ConfigView(APIView): for fname in (TEMPORARY_TASK_FILE, TASK_FILE): try: os.remove(fname) - except OSError, e: + except OSError as e: if e.errno != errno.ENOENT: has_error = e.errno break @@ -1742,33 +1742,6 @@ class GroupChildrenList(SubListCreateAttachDetachAPIView): parent.delete() return Response(status=status.HTTP_204_NO_CONTENT) - def _unattach(self, request, *args, **kwargs): # FIXME: Disabled for now for UI support. - ''' - Special case for disassociating a child group from the parent. If the - child group has no more parents, then automatically mark it inactive. - ''' - sub_id = request.data.get('id', None) - if not sub_id: - data = dict(msg="'id' is required to disassociate.") - return Response(data, status=status.HTTP_400_BAD_REQUEST) - - parent = self.get_parent_object() - # TODO: flake8 warns, pending removal if unneeded - # parent_key = getattr(self, 'parent_key', None) - relationship = getattr(parent, self.relationship) - sub = get_object_or_400(self.model, pk=sub_id) - - if not request.user.can_access(self.parent_model, 'unattach', parent, - sub, self.relationship, request.data): - raise PermissionDenied() - - if sub.parents.exclude(pk=parent.pk).count() == 0: - sub.delete() - else: - relationship.remove(sub) - - return Response(status=status.HTTP_204_NO_CONTENT) - class GroupPotentialChildrenList(SubListAPIView): model = Group @@ -2328,7 +2301,6 @@ class JobTemplateSurveySpec(GenericAPIView): try: obj.survey_spec = json.dumps(request.data) except ValueError: - # TODO: Log return Response(dict(error="Invalid JSON when parsing survey spec."), status=status.HTTP_400_BAD_REQUEST) if "name" not in obj.survey_spec: return Response(dict(error="'name' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST) @@ -2481,7 +2453,6 @@ class JobTemplateCallback(GenericAPIView): ansible_ssh_host = host.variables_dict.get('ansible_ssh_host', '') if ansible_ssh_host in remote_hosts: matches.add(host) - # FIXME: Not entirely sure if this statement will ever be needed? if host.name != ansible_ssh_host and host.name in remote_hosts: matches.add(host) if len(matches) == 1: @@ -2551,17 +2522,14 @@ class JobTemplateCallback(GenericAPIView): # Check matching hosts. if not matching_hosts: data = dict(msg='No matching host could be found!') - # FIXME: Log! return Response(data, status=status.HTTP_400_BAD_REQUEST) elif len(matching_hosts) > 1: data = dict(msg='Multiple hosts matched the request!') - # FIXME: Log! return Response(data, status=status.HTTP_400_BAD_REQUEST) else: host = list(matching_hosts)[0] if not job_template.can_start_without_user_input(): data = dict(msg='Cannot start automatically, user input required!') - # FIXME: Log! return Response(data, status=status.HTTP_400_BAD_REQUEST) limit = host.name @@ -3451,7 +3419,7 @@ class UnifiedJobStdout(RetrieveAPIView): response = HttpResponse(FileWrapper(content_fd), content_type='text/plain') response["Content-Disposition"] = 'attachment; filename="job_%s.txt"' % str(unified_job.id) return response - except Exception, e: + except Exception as e: return Response({"error": "Error generating stdout download file: %s" % str(e)}, status=status.HTTP_400_BAD_REQUEST) elif request.accepted_renderer.format == 'txt': return Response(unified_job.result_stdout) @@ -3690,7 +3658,7 @@ class RoleUsersList(SubListCreateAttachDetachAPIView): return super(RoleUsersList, self).post(request, *args, **kwargs) -class RoleTeamsList(ListAPIView): +class RoleTeamsList(SubListAPIView): model = Team serializer_class = TeamSerializer @@ -3700,8 +3668,8 @@ class RoleTeamsList(ListAPIView): new_in_300 = True def get_queryset(self): - # TODO: Check - role = get_object_or_404(Role, pk=self.kwargs['pk']) + role = self.get_parent_object() + self.check_parent_access(role) return Team.objects.filter(member_role__children=role) def post(self, request, pk, *args, **kwargs): @@ -3742,10 +3710,9 @@ class RoleParentsList(SubListAPIView): new_in_300 = True def get_queryset(self): - # XXX: This should be the intersection between the roles of the user - # and the roles that the requesting user has access to see role = Role.objects.get(pk=self.kwargs['pk']) - return role.parents.all() + return Role.filter_visible_roles(self.request.user, role.parents.all()) + class RoleChildrenList(SubListAPIView): @@ -3757,10 +3724,8 @@ class RoleChildrenList(SubListAPIView): new_in_300 = True def get_queryset(self): - # XXX: This should be the intersection between the roles of the user - # and the roles that the requesting user has access to see role = Role.objects.get(pk=self.kwargs['pk']) - return role.children.all() + return Role.filter_visible_roles(self.request.user, role.children.all()) diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index 60d86a9cfd..73bb0f99f4 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -78,7 +78,7 @@ class MemObject(object): v = yaml.safe_load(file(path, 'r').read()) if hasattr(v, 'items'): # is a dict all_vars.update(v) - except yaml.YAMLError, e: + except yaml.YAMLError as e: if hasattr(e, 'problem_mark'): logger.error('Invalid YAML in %s:%s col %s', path, e.problem_mark.line + 1, @@ -1329,7 +1329,7 @@ class Command(NoArgsCommand): self.logger.warning('Inventory import required %d queries ' 'taking %0.3fs', len(queries_this_import), sqltime) - except Exception, e: + except Exception as e: if isinstance(e, KeyboardInterrupt): status = 'canceled' exc = e diff --git a/awx/main/management/commands/run_callback_receiver.py b/awx/main/management/commands/run_callback_receiver.py index 790257e4e6..e6080fa419 100644 --- a/awx/main/management/commands/run_callback_receiver.py +++ b/awx/main/management/commands/run_callback_receiver.py @@ -173,7 +173,7 @@ class CallbackReceiver(object): # If for any reason there's a problem, just use 0. try: verbose = Job.objects.get(id=data['job_id']).verbosity - except Exception, e: + except Exception as e: verbose = 0 # Convert the datetime for the job event's creation appropriately, @@ -191,7 +191,7 @@ class CallbackReceiver(object): # Print the data to stdout if we're in DEBUG mode. if settings.DEBUG: - print data + print(data) # Sanity check: Don't honor keys that we don't recognize. for key in data.keys(): @@ -234,7 +234,7 @@ class CallbackReceiver(object): # If for any reason there's a problem, just use 0. try: verbose = AdHocCommand.objects.get(id=data['ad_hoc_command_id']).verbosity - except Exception, e: + except Exception as e: verbose = 0 # Convert the datetime for the job event's creation appropriately, @@ -252,7 +252,7 @@ class CallbackReceiver(object): # Print the data to stdout if we're in DEBUG mode. if settings.DEBUG: - print data + print(data) # Sanity check: Don't honor keys that we don't recognize. for key in data.keys(): @@ -288,7 +288,7 @@ class CallbackReceiver(object): message = queue_actual.get(block=True, timeout=1) except QueueEmpty: continue - except Exception, e: + except Exception as e: logger.error("Exception on listen socket, restarting: " + str(e)) break self.process_job_event(message) diff --git a/awx/main/management/commands/run_fact_cache_receiver.py b/awx/main/management/commands/run_fact_cache_receiver.py index 2e3159ba44..4241a2000c 100644 --- a/awx/main/management/commands/run_fact_cache_receiver.py +++ b/awx/main/management/commands/run_fact_cache_receiver.py @@ -59,7 +59,7 @@ class FactCacheReceiver(object): except Fact.MultipleObjectsReturned: logger.warn('Database inconsistent. Multiple Hosts found for <%s, %s>.' % (hostname, inventory_id)) return None - except Exception, e: + except Exception as e: logger.error("Exception communicating with Fact Cache Database: %s" % str(e)) return None diff --git a/awx/main/management/commands/run_socketio_service.py b/awx/main/management/commands/run_socketio_service.py index 56d8cc5b1c..0e3df4ccaf 100644 --- a/awx/main/management/commands/run_socketio_service.py +++ b/awx/main/management/commands/run_socketio_service.py @@ -96,7 +96,7 @@ class SocketController(object): if socket_session and socket_session.is_valid(): try: socket.send_packet(packet) - except Exception, e: + except Exception as e: logger.error("Error sending client packet to %s: %s" % (str(session_id), str(packet))) logger.error("Error was: " + str(e)) @@ -116,7 +116,7 @@ class SocketController(object): if socket: try: socket.send_packet(packet) - except Exception, e: + except Exception as e: logger.error("Error sending client packet to %s: %s" % (str(socket_session.session_id), str(packet))) logger.error("Error was: " + str(e)) @@ -129,18 +129,18 @@ socketController = SocketController(SocketSessionManager()) # # Socket session is attached to self.session['socket_session'] # self.session and self.socket.session point to the same dict -# +# class TowerBaseNamespace(BaseNamespace): def get_allowed_methods(self): return ['recv_disconnect'] - + def get_initial_acl(self): request_token = self._get_request_token() if request_token: - # (1) This is the first time the socket has been seen (first + # (1) This is the first time the socket has been seen (first # namespace joined). - # (2) This socket has already been seen (already joined and maybe + # (2) This socket has already been seen (already joined and maybe # left a namespace) # # Note: Assume that the user token is valid if the session is found @@ -168,7 +168,7 @@ class TowerBaseNamespace(BaseNamespace): if k == "Token": token_actual = urllib.unquote_plus(v).decode().replace("\"","") return token_actual - except Exception, e: + except Exception as e: logger.error("Exception validating user: " + str(e)) return False return False diff --git a/awx/main/migrations/_old_access.py b/awx/main/migrations/_old_access.py index 72bda1ee8d..ce952461ac 100644 --- a/awx/main/migrations/_old_access.py +++ b/awx/main/migrations/_old_access.py @@ -880,7 +880,6 @@ class JobTemplateAccess(BaseAccess): team_ids = Team.objects.filter(deprecated_users__in=[self.user]) - # TODO: I think the below queries can be combined deploy_permissions_ids = Permission.objects.filter( Q(user=self.user) | Q(team_id__in=team_ids), permission_type__in=allowed_deploy, @@ -1094,7 +1093,6 @@ class JobAccess(BaseAccess): allowed_check = [PERM_JOBTEMPLATE_CREATE, PERM_INVENTORY_DEPLOY, PERM_INVENTORY_CHECK] team_ids = Team.objects.filter(deprecated_users__in=[self.user]) - # TODO: I think the below queries can be combined deploy_permissions_ids = Permission.objects.filter( Q(user=self.user) | Q(team__in=team_ids), permission_type__in=allowed_deploy, diff --git a/awx/main/models/base.py b/awx/main/models/base.py index 67b2511d72..c4914cdd20 100644 --- a/awx/main/models/base.py +++ b/awx/main/models/base.py @@ -137,7 +137,7 @@ class BaseModel(models.Model): errors = {} try: super(BaseModel, self).clean_fields(exclude) - except ValidationError, e: + except ValidationError as e: errors = e.update_error_dict(errors) for f in self._meta.fields: if f.name in exclude: @@ -145,7 +145,7 @@ class BaseModel(models.Model): if hasattr(self, 'clean_%s' % f.name): try: setattr(self, f.name, getattr(self, 'clean_%s' % f.name)()) - except ValidationError, e: + except ValidationError as e: errors[f.name] = e.messages if errors: raise ValidationError(errors) diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 6f223b0819..4b46666337 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -701,7 +701,7 @@ class Job(UnifiedJob, JobOptions): return try: extra_vars = json.loads(extra_data) - except Exception, e: + except Exception as e: logger.warn("Exception deserializing extra vars: " + str(e)) evars = self.extra_vars_dict evars.update(extra_vars) @@ -1316,7 +1316,7 @@ class SystemJob(UnifiedJob, SystemJobOptions): return try: extra_vars = json.loads(extra_data) - except Exception, e: + except Exception as e: logger.warn("Exception deserializing extra vars: " + str(e)) evars = self.extra_vars_dict evars.update(extra_vars) diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 457bed302b..93c4a42e36 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -115,7 +115,7 @@ class ProjectOptions(models.Model): try: scm_url = update_scm_url(self.scm_type, scm_url, check_special_cases=False) - except ValueError, e: + except ValueError as e: raise ValidationError((e.args or ('Invalid SCM URL.',))[0]) scm_url_parts = urlparse.urlsplit(scm_url) if self.scm_type and not any(scm_url_parts): @@ -142,7 +142,7 @@ class ProjectOptions(models.Model): try: update_scm_url(self.scm_type, self.scm_url, scm_username, scm_password) - except ValueError, e: + except ValueError as e: raise ValidationError((e.args or ('Invalid credential.',))[0]) except ValueError: pass diff --git a/awx/main/redact.py b/awx/main/redact.py index 10f7877faa..0e9ad0b5d1 100644 --- a/awx/main/redact.py +++ b/awx/main/redact.py @@ -29,12 +29,10 @@ class UriCleaner(object): username = o.username password = o.password - # Given a python MatchObject, with respect to redactedtext, find and + # Given a python MatchObject, with respect to redactedtext, find and # replace the first occurance of username and the first and second # occurance of password - # TODO: Ideally, we would replace username and password using the index - # that they were found at. uri_str = redactedtext[match.start():match.end()] if username: uri_str = uri_str.replace(username, UriCleaner.REPLACE_STR, 1) diff --git a/awx/main/signals.py b/awx/main/signals.py index a728f09a64..6138ee17bd 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -339,7 +339,6 @@ def activity_stream_create(sender, instance, created, **kwargs): # Skip recording any inventory source directly associated with a group. if isinstance(instance, InventorySource) and instance.group: return - # TODO: Rethink details of the new instance object1 = camelcase_to_underscore(instance.__class__.__name__) changes = model_to_dict(instance, model_serializer_mapping) # Special case where Job survey password variables need to be hidden diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 558e99291a..f174ffe37f 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -77,7 +77,7 @@ def celery_startup(conf=None, **kwargs): try: sch.update_computed_fields() sch.save() - except Exception, e: + except Exception as e: logger.error("Failed to rebuild schedule {}: {}".format(sch, e)) @task() @@ -142,8 +142,8 @@ def tower_periodic_scheduler(self): try: last_run = dateutil.parser.parse(fd.read()) return last_run - except Exception: - #TODO: LOG + except Exception as exc: + logger.error("get_last_run failed: {}".format(exc)) return None def write_last_run(last_run): @@ -1722,7 +1722,7 @@ class RunSystemJob(BaseTask): args.extend(['--older_than', str(json_vars['older_than'])]) if 'granularity' in json_vars: args.extend(['--granularity', str(json_vars['granularity'])]) - except Exception, e: + except Exception as e: logger.error("Failed to parse system job: " + str(e)) return args diff --git a/awx/main/tests/old/jobs/jobs_monolithic.py b/awx/main/tests/old/jobs/jobs_monolithic.py index fbb80fd786..b7321bb611 100644 --- a/awx/main/tests/old/jobs/jobs_monolithic.py +++ b/awx/main/tests/old/jobs/jobs_monolithic.py @@ -1058,7 +1058,7 @@ class JobTransactionTest(BaseJobTestMixin, django.test.LiveServerTestCase): data = json.loads(response.content) if data.get('status', '') not in ('new', 'pending', 'running'): break - except Exception, e: + except Exception as e: errors.append(e) break diff --git a/awx/main/utils.py b/awx/main/utils.py index f1d85f72b2..f1a71fbcfc 100644 --- a/awx/main/utils.py +++ b/awx/main/utils.py @@ -44,9 +44,9 @@ def get_object_or_400(klass, *args, **kwargs): queryset = _get_queryset(klass) try: return queryset.get(*args, **kwargs) - except queryset.model.DoesNotExist, e: + except queryset.model.DoesNotExist as e: raise ParseError(*e.args) - except queryset.model.MultipleObjectsReturned, e: + except queryset.model.MultipleObjectsReturned as e: raise ParseError(*e.args) @@ -59,9 +59,9 @@ def get_object_or_403(klass, *args, **kwargs): queryset = _get_queryset(klass) try: return queryset.get(*args, **kwargs) - except queryset.model.DoesNotExist, e: + except queryset.model.DoesNotExist as e: raise PermissionDenied(*e.args) - except queryset.model.MultipleObjectsReturned, e: + except queryset.model.MultipleObjectsReturned as e: raise PermissionDenied(*e.args) def to_python_boolean(value, allow_none=False): diff --git a/awx/settings/production.py b/awx/settings/production.py index 5460061bc8..1f33f2bbbd 100644 --- a/awx/settings/production.py +++ b/awx/settings/production.py @@ -127,7 +127,7 @@ except IOError: try: e = None open(settings_file) - except IOError, e: + except IOError as e: pass if e and e.errno == errno.EACCES: SECRET_KEY = 'permission-denied'