Cap page_size in pagination urls

Currently, even with a `max_page_size` of n, we can see urls
formed in pagination with `page_size` > n. API still caps the
number of results it returns, but the URL remain invalid there.
This is a bit messy solution to make string replacement in URL
if the query param exceeds `max_page_size`

Signed-off-by: Vismay Golwala <vgolwala@redhat.com>
This commit is contained in:
Vismay Golwala
2019-04-09 17:12:42 -04:00
parent fbc7d1a9f2
commit f4dc4d5849
2 changed files with 35 additions and 2 deletions

View File

@@ -18,7 +18,7 @@ class Pagination(pagination.PageNumberPagination):
url = self.request and self.request.get_full_path() or ''
url = url.encode('utf-8')
page_number = self.page.next_page_number()
return replace_query_param(url, self.page_query_param, page_number)
return replace_query_param(self.cap_page_size(url), self.page_query_param, page_number)
def get_previous_link(self):
if not self.page.has_previous():
@@ -26,4 +26,16 @@ class Pagination(pagination.PageNumberPagination):
url = self.request and self.request.get_full_path() or ''
url = url.encode('utf-8')
page_number = self.page.previous_page_number()
return replace_query_param(url, self.page_query_param, page_number)
return replace_query_param(self.cap_page_size(url), self.page_query_param, page_number)
def cap_page_size(self, url):
if int(self.request.query_params.get(self.page_size_query_param, 0)) > self.max_page_size:
url = replace_query_param(url, self.page_size_query_param, self.max_page_size)
return url
def get_html_context(self):
context = super().get_html_context()
context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url))
for pl in context['page_links']]
return context