makes censor characters consistent

This commit is contained in:
adamscmRH
2018-04-10 10:52:59 -04:00
parent ea900b6f95
commit 8529f2b5eb
4 changed files with 13 additions and 12 deletions

View File

@@ -40,7 +40,7 @@ from polymorphic.models import PolymorphicModel
# AWX # AWX
from awx.main.constants import SCHEDULEABLE_PROVIDERS, ANSI_SGR_PATTERN from awx.main.constants import SCHEDULEABLE_PROVIDERS, ANSI_SGR_PATTERN
from awx.main.models import * # noqa from awx.main.models import * # noqa
from awx.main.constants import ACTIVE_STATES from awx.main.constants import ACTIVE_STATES, TOKEN_CENSOR
from awx.main.models.base import NEW_JOB_TYPE_CHOICES from awx.main.models.base import NEW_JOB_TYPE_CHOICES
from awx.main.access import get_user_capabilities from awx.main.access import get_user_capabilities
from awx.main.fields import ImplicitRoleField from awx.main.fields import ImplicitRoleField
@@ -999,7 +999,7 @@ class UserAuthorizedTokenSerializer(BaseSerializer):
if request.method == 'POST': if request.method == 'POST':
return obj.token return obj.token
else: else:
return '*************' return TOKEN_CENSOR
except ObjectDoesNotExist: except ObjectDoesNotExist:
return '' return ''
@@ -1009,7 +1009,7 @@ class UserAuthorizedTokenSerializer(BaseSerializer):
if request.method == 'POST': if request.method == 'POST':
return getattr(obj.refresh_token, 'token', '') return getattr(obj.refresh_token, 'token', '')
else: else:
return '**************' return TOKEN_CENSOR
except ObjectDoesNotExist: except ObjectDoesNotExist:
return '' return ''
@@ -1075,7 +1075,7 @@ class OAuth2ApplicationSerializer(BaseSerializer):
return ret return ret
def _summary_field_tokens(self, obj): def _summary_field_tokens(self, obj):
token_list = [{'id': x.pk, 'token': '**************', 'scope': x.scope} for x in obj.oauth2accesstoken_set.all()[:10]] token_list = [{'id': x.pk, 'token': TOKEN_CENSOR, 'scope': x.scope} for x in obj.oauth2accesstoken_set.all()[:10]]
if has_model_field_prefetched(obj, 'oauth2accesstoken_set'): if has_model_field_prefetched(obj, 'oauth2accesstoken_set'):
token_count = len(obj.oauth2accesstoken_set.all()) token_count = len(obj.oauth2accesstoken_set.all())
else: else:
@@ -1133,7 +1133,7 @@ class OAuth2TokenSerializer(BaseSerializer):
if request.method == 'POST': if request.method == 'POST':
return obj.token return obj.token
else: else:
return '*************' return TOKEN_CENSOR
except ObjectDoesNotExist: except ObjectDoesNotExist:
return '' return ''
@@ -1143,7 +1143,7 @@ class OAuth2TokenSerializer(BaseSerializer):
if request.method == 'POST': if request.method == 'POST':
return getattr(obj.refresh_token, 'token', '') return getattr(obj.refresh_token, 'token', '')
else: else:
return '**************' return TOKEN_CENSOR
except ObjectDoesNotExist: except ObjectDoesNotExist:
return '' return ''
@@ -1214,7 +1214,7 @@ class OAuth2AuthorizedTokenSerializer(BaseSerializer):
if request.method == 'POST': if request.method == 'POST':
return obj.token return obj.token
else: else:
return '*************' return TOKEN_CENSOR
except ObjectDoesNotExist: except ObjectDoesNotExist:
return '' return ''
@@ -1224,7 +1224,7 @@ class OAuth2AuthorizedTokenSerializer(BaseSerializer):
if request.method == 'POST': if request.method == 'POST':
return getattr(obj.refresh_token, 'token', '') return getattr(obj.refresh_token, 'token', '')
else: else:
return '**************' return TOKEN_CENSOR
except ObjectDoesNotExist: except ObjectDoesNotExist:
return '' return ''
@@ -1289,7 +1289,7 @@ class OAuth2PersonalTokenSerializer(BaseSerializer):
if request.method == 'POST': if request.method == 'POST':
return obj.token return obj.token
else: else:
return '*************' return TOKEN_CENSOR
except ObjectDoesNotExist: except ObjectDoesNotExist:
return '' return ''

View File

@@ -19,3 +19,4 @@ PRIVILEGE_ESCALATION_METHODS = [
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m') ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
CAN_CANCEL = ('new', 'pending', 'waiting', 'running') CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
ACTIVE_STATES = CAN_CANCEL ACTIVE_STATES = CAN_CANCEL
TOKEN_CENSOR = '************'

View File

@@ -115,7 +115,7 @@ def test_oauth_token_create(oauth_application, get, post, admin):
) )
assert response.data['summary_fields']['tokens']['count'] == 1 assert response.data['summary_fields']['tokens']['count'] == 1
assert response.data['summary_fields']['tokens']['results'][0] == { assert response.data['summary_fields']['tokens']['results'][0] == {
'id': token.pk, 'scope': token.scope, 'token': '**************' 'id': token.pk, 'scope': token.scope, 'token': '************'
} }

View File

@@ -41,7 +41,7 @@ Individual applications will be accessible via their primary keys:
"results": [ "results": [
{ {
"scope": "read", "scope": "read",
"token": "**************", "token": "*************",
"id": 2 "id": 2
} }
] ]
@@ -114,7 +114,7 @@ Individual tokens will be accessible via their primary keys:
"description": "App Token Test", "description": "App Token Test",
"user": 1, "user": 1,
"token": "*************", "token": "*************",
"refresh_token": "**************", "refresh_token": "*************",
"application": 1, "application": 1,
"expires": "2018-02-24T00:39:32.618279Z", "expires": "2018-02-24T00:39:32.618279Z",
"scope": "read" "scope": "read"