2015-01-19 12:24:42 +03:00
|
|
|
# coding: utf-8
|
2013-04-25 15:47:34 +04:00
|
|
|
"""
|
|
|
|
Pagination serializers determine the structure of the output that should
|
|
|
|
be used for paginated responses.
|
|
|
|
"""
|
2013-02-05 00:55:35 +04:00
|
|
|
from __future__ import unicode_literals
|
2015-01-17 03:10:43 +03:00
|
|
|
from base64 import b64encode, b64decode
|
2015-01-14 19:51:26 +03:00
|
|
|
from collections import namedtuple
|
2015-01-09 18:30:36 +03:00
|
|
|
from django.core.paginator import InvalidPage, Paginator as DjangoPaginator
|
2015-01-14 19:51:26 +03:00
|
|
|
from django.template import Context, loader
|
2015-01-09 18:30:36 +03:00
|
|
|
from django.utils import six
|
2015-01-17 03:10:43 +03:00
|
|
|
from django.utils.six.moves.urllib import parse as urlparse
|
2015-01-09 18:30:36 +03:00
|
|
|
from django.utils.translation import ugettext as _
|
|
|
|
from rest_framework.compat import OrderedDict
|
|
|
|
from rest_framework.exceptions import NotFound
|
|
|
|
from rest_framework.response import Response
|
|
|
|
from rest_framework.settings import api_settings
|
2015-01-16 23:30:46 +03:00
|
|
|
from rest_framework.utils.urls import (
|
2015-01-14 19:51:26 +03:00
|
|
|
replace_query_param, remove_query_param
|
|
|
|
)
|
2015-03-04 18:51:00 +03:00
|
|
|
import warnings
|
2012-09-30 20:31:28 +04:00
|
|
|
|
|
|
|
|
2015-01-22 18:07:01 +03:00
|
|
|
def _positive_int(integer_string, strict=False, cutoff=None):
|
2012-10-01 18:49:19 +04:00
|
|
|
"""
|
2015-01-09 18:30:36 +03:00
|
|
|
Cast a string to a strictly positive integer.
|
2012-10-01 18:49:19 +04:00
|
|
|
"""
|
2015-01-09 18:30:36 +03:00
|
|
|
ret = int(integer_string)
|
2015-01-22 18:07:01 +03:00
|
|
|
if ret < 0 or (ret == 0 and strict):
|
2015-01-09 18:30:36 +03:00
|
|
|
raise ValueError()
|
|
|
|
if cutoff:
|
|
|
|
ret = min(ret, cutoff)
|
|
|
|
return ret
|
2012-09-30 20:31:28 +04:00
|
|
|
|
|
|
|
|
2015-01-15 19:52:07 +03:00
|
|
|
def _divide_with_ceil(a, b):
|
|
|
|
"""
|
|
|
|
Returns 'a' divded by 'b', with any remainder rounded up.
|
|
|
|
"""
|
|
|
|
if a % b:
|
2015-01-16 23:30:46 +03:00
|
|
|
return (a // b) + 1
|
|
|
|
return a // b
|
2015-01-15 19:52:07 +03:00
|
|
|
|
|
|
|
|
2015-01-13 20:14:13 +03:00
|
|
|
def _get_count(queryset):
|
|
|
|
"""
|
|
|
|
Determine an object count, supporting either querysets or regular lists.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return queryset.count()
|
2015-01-16 00:07:05 +03:00
|
|
|
except (AttributeError, TypeError):
|
2015-01-13 20:14:13 +03:00
|
|
|
return len(queryset)
|
|
|
|
|
|
|
|
|
2015-01-14 19:51:26 +03:00
|
|
|
def _get_displayed_page_numbers(current, final):
|
|
|
|
"""
|
|
|
|
This utility function determines a list of page numbers to display.
|
|
|
|
This gives us a nice contextually relevant set of page numbers.
|
|
|
|
|
|
|
|
For example:
|
|
|
|
current=14, final=16 -> [1, None, 13, 14, 15, 16]
|
2015-01-14 20:46:41 +03:00
|
|
|
|
|
|
|
This implementation gives one page to each side of the cursor,
|
2015-01-15 19:52:07 +03:00
|
|
|
or two pages to the side when the cursor is at the edge, then
|
|
|
|
ensures that any breaks between non-continous page numbers never
|
|
|
|
remove only a single page.
|
|
|
|
|
|
|
|
For an alernativative implementation which gives two pages to each side of
|
|
|
|
the cursor, eg. as in GitHub issue list pagination, see:
|
2015-01-14 20:46:41 +03:00
|
|
|
|
|
|
|
https://gist.github.com/tomchristie/321140cebb1c4a558b15
|
2015-01-14 19:51:26 +03:00
|
|
|
"""
|
|
|
|
assert current >= 1
|
|
|
|
assert final >= current
|
|
|
|
|
2015-01-15 19:52:07 +03:00
|
|
|
if final <= 5:
|
2015-01-16 23:30:46 +03:00
|
|
|
return list(range(1, final + 1))
|
2015-01-15 19:52:07 +03:00
|
|
|
|
2015-01-14 19:51:26 +03:00
|
|
|
# We always include the first two pages, last two pages, and
|
|
|
|
# two pages either side of the current page.
|
|
|
|
included = set((
|
|
|
|
1,
|
|
|
|
current - 1, current, current + 1,
|
|
|
|
final
|
|
|
|
))
|
|
|
|
|
|
|
|
# If the break would only exclude a single page number then we
|
|
|
|
# may as well include the page number instead of the break.
|
2015-01-14 20:46:41 +03:00
|
|
|
if current <= 4:
|
2015-01-14 19:51:26 +03:00
|
|
|
included.add(2)
|
2015-01-14 20:46:41 +03:00
|
|
|
included.add(3)
|
|
|
|
if current >= final - 3:
|
2015-01-14 19:51:26 +03:00
|
|
|
included.add(final - 1)
|
2015-01-14 20:46:41 +03:00
|
|
|
included.add(final - 2)
|
2015-01-14 19:51:26 +03:00
|
|
|
|
|
|
|
# Now sort the page numbers and drop anything outside the limits.
|
|
|
|
included = [
|
|
|
|
idx for idx in sorted(list(included))
|
|
|
|
if idx > 0 and idx <= final
|
|
|
|
]
|
|
|
|
|
|
|
|
# Finally insert any `...` breaks
|
|
|
|
if current > 4:
|
|
|
|
included.insert(1, None)
|
|
|
|
if current < final - 3:
|
|
|
|
included.insert(len(included) - 1, None)
|
|
|
|
return included
|
|
|
|
|
|
|
|
|
2015-01-15 19:52:07 +03:00
|
|
|
def _get_page_links(page_numbers, current, url_func):
|
|
|
|
"""
|
|
|
|
Given a list of page numbers and `None` page breaks,
|
|
|
|
return a list of `PageLink` objects.
|
|
|
|
"""
|
|
|
|
page_links = []
|
|
|
|
for page_number in page_numbers:
|
|
|
|
if page_number is None:
|
2015-01-16 00:07:05 +03:00
|
|
|
page_link = PAGE_BREAK
|
2015-01-15 19:52:07 +03:00
|
|
|
else:
|
|
|
|
page_link = PageLink(
|
|
|
|
url=url_func(page_number),
|
|
|
|
number=page_number,
|
|
|
|
is_active=(page_number == current),
|
|
|
|
is_break=False
|
|
|
|
)
|
|
|
|
page_links.append(page_link)
|
|
|
|
return page_links
|
|
|
|
|
|
|
|
|
2015-01-22 18:07:01 +03:00
|
|
|
def _decode_cursor(encoded):
|
|
|
|
"""
|
|
|
|
Given a string representing an encoded cursor, return a `Cursor` instance.
|
|
|
|
"""
|
2015-03-06 13:22:32 +03:00
|
|
|
|
|
|
|
# The offset in the cursor is used in situations where we have a
|
|
|
|
# nearly-unique index. (Eg millisecond precision creation timestamps)
|
|
|
|
# We guard against malicious users attempting to cause expensive database
|
|
|
|
# queries, by having a hard cap on the maximum possible size of the offset.
|
|
|
|
OFFSET_CUTOFF = 1000
|
|
|
|
|
2015-01-22 18:07:01 +03:00
|
|
|
try:
|
|
|
|
querystring = b64decode(encoded.encode('ascii')).decode('ascii')
|
|
|
|
tokens = urlparse.parse_qs(querystring, keep_blank_values=True)
|
2015-01-22 20:25:12 +03:00
|
|
|
|
|
|
|
offset = tokens.get('o', ['0'])[0]
|
2015-03-06 13:22:32 +03:00
|
|
|
offset = _positive_int(offset, cutoff=OFFSET_CUTOFF)
|
2015-01-22 20:25:12 +03:00
|
|
|
|
|
|
|
reverse = tokens.get('r', ['0'])[0]
|
|
|
|
reverse = bool(int(reverse))
|
|
|
|
|
|
|
|
position = tokens.get('p', [None])[0]
|
2015-01-22 18:07:01 +03:00
|
|
|
except (TypeError, ValueError):
|
|
|
|
return None
|
|
|
|
|
|
|
|
return Cursor(offset=offset, reverse=reverse, position=position)
|
|
|
|
|
|
|
|
|
|
|
|
def _encode_cursor(cursor):
|
|
|
|
"""
|
|
|
|
Given a Cursor instance, return an encoded string representation.
|
|
|
|
"""
|
2015-01-22 20:25:12 +03:00
|
|
|
tokens = {}
|
|
|
|
if cursor.offset != 0:
|
|
|
|
tokens['o'] = str(cursor.offset)
|
|
|
|
if cursor.reverse:
|
|
|
|
tokens['r'] = '1'
|
2015-01-22 18:07:01 +03:00
|
|
|
if cursor.position is not None:
|
2015-01-22 20:25:12 +03:00
|
|
|
tokens['p'] = cursor.position
|
2015-01-22 18:07:01 +03:00
|
|
|
|
|
|
|
querystring = urlparse.urlencode(tokens, doseq=True)
|
|
|
|
return b64encode(querystring.encode('ascii')).decode('ascii')
|
|
|
|
|
|
|
|
|
|
|
|
def _reverse_ordering(ordering_tuple):
|
|
|
|
"""
|
|
|
|
Given an order_by tuple such as `('-created', 'uuid')` reverse the
|
|
|
|
ordering and return a new tuple, eg. `('created', '-uuid')`.
|
|
|
|
"""
|
2015-02-09 23:43:50 +03:00
|
|
|
def invert(x):
|
|
|
|
return x[1:] if (x.startswith('-')) else '-' + x
|
|
|
|
|
2015-01-22 18:07:01 +03:00
|
|
|
return tuple([invert(item) for item in ordering_tuple])
|
|
|
|
|
|
|
|
|
|
|
|
Cursor = namedtuple('Cursor', ['offset', 'reverse', 'position'])
|
2015-01-14 19:51:26 +03:00
|
|
|
PageLink = namedtuple('PageLink', ['url', 'number', 'is_active', 'is_break'])
|
|
|
|
|
2015-01-16 00:07:05 +03:00
|
|
|
PAGE_BREAK = PageLink(url=None, number=None, is_active=False, is_break=True)
|
|
|
|
|
2015-01-14 19:51:26 +03:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
class BasePagination(object):
|
2015-01-15 19:52:07 +03:00
|
|
|
display_page_controls = False
|
|
|
|
|
2015-01-16 19:55:46 +03:00
|
|
|
def paginate_queryset(self, queryset, request, view=None): # pragma: no cover
|
2015-01-19 18:22:38 +03:00
|
|
|
raise NotImplementedError('paginate_queryset() must be implemented.')
|
2012-11-08 01:09:26 +04:00
|
|
|
|
2015-01-16 19:55:46 +03:00
|
|
|
def get_paginated_response(self, data): # pragma: no cover
|
2015-01-19 18:22:38 +03:00
|
|
|
raise NotImplementedError('get_paginated_response() must be implemented.')
|
2012-09-30 20:31:28 +04:00
|
|
|
|
2015-01-16 19:55:46 +03:00
|
|
|
def to_html(self): # pragma: no cover
|
2015-01-19 18:22:38 +03:00
|
|
|
raise NotImplementedError('to_html() must be implemented to display page controls.')
|
2015-01-15 19:52:07 +03:00
|
|
|
|
2012-09-30 20:31:28 +04:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
class PageNumberPagination(BasePagination):
|
2013-01-27 00:54:41 +04:00
|
|
|
"""
|
2015-01-09 18:30:36 +03:00
|
|
|
A simple page number based style that supports page numbers as
|
|
|
|
query parameters. For example:
|
|
|
|
|
|
|
|
http://api.example.org/accounts/?page=4
|
|
|
|
http://api.example.org/accounts/?page=4&page_size=100
|
2013-01-27 00:54:41 +04:00
|
|
|
"""
|
2015-01-09 18:30:36 +03:00
|
|
|
# The default page size.
|
|
|
|
# Defaults to `None`, meaning pagination is disabled.
|
2015-03-04 18:51:00 +03:00
|
|
|
page_size = api_settings.PAGE_SIZE
|
2013-01-27 00:54:41 +04:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
# Client can control the page using this query parameter.
|
|
|
|
page_query_param = 'page'
|
2013-01-27 00:54:41 +04:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
# Client can control the page size using this query parameter.
|
|
|
|
# Default is 'None'. Set to eg 'page_size' to enable usage.
|
2015-03-04 18:51:00 +03:00
|
|
|
page_size_query_param = None
|
2013-01-27 00:54:41 +04:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
# Set to an integer to limit the maximum page size the client may request.
|
2015-03-04 18:51:00 +03:00
|
|
|
# Only relevant if 'page_size_query_param' has also been set.
|
|
|
|
max_page_size = None
|
2012-10-01 18:49:19 +04:00
|
|
|
|
2015-01-16 00:07:05 +03:00
|
|
|
last_page_strings = ('last',)
|
|
|
|
|
2015-01-14 19:51:26 +03:00
|
|
|
template = 'rest_framework/pagination/numbers.html'
|
|
|
|
|
2015-01-22 15:14:52 +03:00
|
|
|
invalid_page_message = _('Invalid page "{page_number}": {message}.')
|
|
|
|
|
2015-01-16 19:55:46 +03:00
|
|
|
def _handle_backwards_compat(self, view):
|
2012-10-01 18:49:19 +04:00
|
|
|
"""
|
2015-01-16 19:55:46 +03:00
|
|
|
Prior to version 3.1, pagination was handled in the view, and the
|
|
|
|
attributes were set there. The attributes should now be set on
|
|
|
|
the pagination class, but the old style is still pending deprecation.
|
2012-10-01 18:49:19 +04:00
|
|
|
"""
|
2015-03-04 18:51:00 +03:00
|
|
|
assert not (
|
|
|
|
getattr(view, 'pagination_serializer_class', None) or
|
|
|
|
getattr(api_settings, 'DEFAULT_PAGINATION_SERIALIZER_CLASS', None)
|
|
|
|
), (
|
|
|
|
"The pagination_serializer_class attribute and "
|
|
|
|
"DEFAULT_PAGINATION_SERIALIZER_CLASS setting have been removed as "
|
|
|
|
"part of the 3.1 pagination API improvement. See the pagination "
|
|
|
|
"documentation for details on the new API."
|
|
|
|
)
|
|
|
|
|
|
|
|
for (settings_key, attr_name) in (
|
|
|
|
('PAGINATE_BY', 'page_size'),
|
|
|
|
('PAGINATE_BY_PARAM', 'page_size_query_param'),
|
|
|
|
('MAX_PAGINATE_BY', 'max_page_size')
|
|
|
|
):
|
|
|
|
value = getattr(api_settings, settings_key, None)
|
|
|
|
if value is not None:
|
|
|
|
setattr(self, attr_name, value)
|
|
|
|
warnings.warn(
|
|
|
|
"The `%s` settings key is pending deprecation. "
|
|
|
|
"Use the `%s` attribute on the pagination class instead." % (
|
|
|
|
settings_key, attr_name
|
|
|
|
),
|
|
|
|
PendingDeprecationWarning,
|
|
|
|
)
|
|
|
|
|
|
|
|
for (view_attr, attr_name) in (
|
|
|
|
('paginate_by', 'page_size'),
|
|
|
|
('page_query_param', 'page_query_param'),
|
|
|
|
('paginate_by_param', 'page_size_query_param'),
|
|
|
|
('max_paginate_by', 'max_page_size')
|
2015-01-09 18:30:36 +03:00
|
|
|
):
|
2015-03-04 18:51:00 +03:00
|
|
|
value = getattr(view, view_attr, None)
|
|
|
|
if value is not None:
|
|
|
|
setattr(self, attr_name, value)
|
|
|
|
warnings.warn(
|
|
|
|
"The `%s` view attribute is pending deprecation. "
|
|
|
|
"Use the `%s` attribute on the pagination class instead." % (
|
|
|
|
view_attr, attr_name
|
|
|
|
),
|
|
|
|
PendingDeprecationWarning,
|
|
|
|
)
|
2015-01-09 18:30:36 +03:00
|
|
|
|
2015-01-16 19:55:46 +03:00
|
|
|
def paginate_queryset(self, queryset, request, view=None):
|
|
|
|
"""
|
|
|
|
Paginate a queryset if required, either returning a
|
|
|
|
page object, or `None` if pagination is not configured for this view.
|
|
|
|
"""
|
|
|
|
self._handle_backwards_compat(view)
|
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
page_size = self.get_page_size(request)
|
|
|
|
if not page_size:
|
|
|
|
return None
|
2013-01-23 11:38:13 +04:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
paginator = DjangoPaginator(queryset, page_size)
|
2015-01-16 00:07:05 +03:00
|
|
|
page_number = request.query_params.get(self.page_query_param, 1)
|
|
|
|
if page_number in self.last_page_strings:
|
|
|
|
page_number = paginator.num_pages
|
2014-09-02 20:41:23 +04:00
|
|
|
|
2014-12-20 19:32:07 +03:00
|
|
|
try:
|
2015-01-09 18:30:36 +03:00
|
|
|
self.page = paginator.page(page_number)
|
|
|
|
except InvalidPage as exc:
|
2015-01-22 15:14:52 +03:00
|
|
|
msg = self.invalid_page_message.format(
|
2015-01-09 18:30:36 +03:00
|
|
|
page_number=page_number, message=six.text_type(exc)
|
|
|
|
)
|
|
|
|
raise NotFound(msg)
|
|
|
|
|
2015-02-26 15:48:34 +03:00
|
|
|
if paginator.count > 1 and self.template is not None:
|
2015-01-15 19:52:07 +03:00
|
|
|
# The browsable API should display pagination controls.
|
|
|
|
self.display_page_controls = True
|
2015-01-16 00:07:05 +03:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
self.request = request
|
2015-03-04 18:51:00 +03:00
|
|
|
return list(self.page)
|
2015-01-09 18:30:36 +03:00
|
|
|
|
2015-01-13 20:14:13 +03:00
|
|
|
def get_paginated_response(self, data):
|
2015-01-09 18:30:36 +03:00
|
|
|
return Response(OrderedDict([
|
|
|
|
('count', self.page.paginator.count),
|
|
|
|
('next', self.get_next_link()),
|
|
|
|
('previous', self.get_previous_link()),
|
2015-01-13 20:14:13 +03:00
|
|
|
('results', data)
|
2015-01-09 18:30:36 +03:00
|
|
|
]))
|
|
|
|
|
|
|
|
def get_page_size(self, request):
|
2015-03-04 18:51:00 +03:00
|
|
|
if self.page_size_query_param:
|
2015-01-09 18:30:36 +03:00
|
|
|
try:
|
2015-01-22 18:07:01 +03:00
|
|
|
return _positive_int(
|
2015-03-04 18:51:00 +03:00
|
|
|
request.query_params[self.page_size_query_param],
|
2015-01-22 18:07:01 +03:00
|
|
|
strict=True,
|
2015-03-04 18:51:00 +03:00
|
|
|
cutoff=self.max_page_size
|
2015-01-09 18:30:36 +03:00
|
|
|
)
|
|
|
|
except (KeyError, ValueError):
|
|
|
|
pass
|
|
|
|
|
2015-03-04 18:51:00 +03:00
|
|
|
return self.page_size
|
2015-01-09 18:30:36 +03:00
|
|
|
|
|
|
|
def get_next_link(self):
|
|
|
|
if not self.page.has_next():
|
|
|
|
return None
|
|
|
|
url = self.request.build_absolute_uri()
|
|
|
|
page_number = self.page.next_page_number()
|
|
|
|
return replace_query_param(url, self.page_query_param, page_number)
|
2014-12-20 19:32:07 +03:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
def get_previous_link(self):
|
|
|
|
if not self.page.has_previous():
|
|
|
|
return None
|
|
|
|
url = self.request.build_absolute_uri()
|
|
|
|
page_number = self.page.previous_page_number()
|
2015-01-14 19:51:26 +03:00
|
|
|
if page_number == 1:
|
|
|
|
return remove_query_param(url, self.page_query_param)
|
2015-01-09 18:30:36 +03:00
|
|
|
return replace_query_param(url, self.page_query_param, page_number)
|
2012-09-30 20:31:28 +04:00
|
|
|
|
2015-01-16 00:07:05 +03:00
|
|
|
def get_html_context(self):
|
2015-01-14 19:51:26 +03:00
|
|
|
base_url = self.request.build_absolute_uri()
|
2015-01-15 19:55:04 +03:00
|
|
|
|
2015-01-15 19:52:07 +03:00
|
|
|
def page_number_to_url(page_number):
|
|
|
|
if page_number == 1:
|
|
|
|
return remove_query_param(base_url, self.page_query_param)
|
2015-01-14 19:51:26 +03:00
|
|
|
else:
|
2015-01-15 19:52:07 +03:00
|
|
|
return replace_query_param(base_url, self.page_query_param, page_number)
|
|
|
|
|
|
|
|
current = self.page.number
|
|
|
|
final = self.page.paginator.num_pages
|
|
|
|
page_numbers = _get_displayed_page_numbers(current, final)
|
|
|
|
page_links = _get_page_links(page_numbers, current, page_number_to_url)
|
2015-01-14 19:51:26 +03:00
|
|
|
|
2015-01-16 00:07:05 +03:00
|
|
|
return {
|
2015-01-14 19:51:26 +03:00
|
|
|
'previous_url': self.get_previous_link(),
|
|
|
|
'next_url': self.get_next_link(),
|
|
|
|
'page_links': page_links
|
2015-01-16 00:07:05 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
def to_html(self):
|
|
|
|
template = loader.get_template(self.template)
|
|
|
|
context = Context(self.get_html_context())
|
2015-01-14 19:51:26 +03:00
|
|
|
return template.render(context)
|
|
|
|
|
2012-10-01 18:49:19 +04:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
class LimitOffsetPagination(BasePagination):
|
2012-10-01 18:49:19 +04:00
|
|
|
"""
|
2015-01-09 18:30:36 +03:00
|
|
|
A limit/offset based style. For example:
|
|
|
|
|
|
|
|
http://api.example.org/accounts/?limit=100
|
|
|
|
http://api.example.org/accounts/?offset=400&limit=100
|
2012-10-01 18:49:19 +04:00
|
|
|
"""
|
2015-03-04 18:51:00 +03:00
|
|
|
default_limit = api_settings.PAGE_SIZE
|
2015-01-09 18:30:36 +03:00
|
|
|
limit_query_param = 'limit'
|
|
|
|
offset_query_param = 'offset'
|
|
|
|
max_limit = None
|
2015-01-15 19:52:07 +03:00
|
|
|
template = 'rest_framework/pagination/numbers.html'
|
|
|
|
|
2015-01-16 00:07:05 +03:00
|
|
|
def paginate_queryset(self, queryset, request, view=None):
|
2015-01-09 18:30:36 +03:00
|
|
|
self.limit = self.get_limit(request)
|
|
|
|
self.offset = self.get_offset(request)
|
2015-01-13 20:14:13 +03:00
|
|
|
self.count = _get_count(queryset)
|
2015-01-09 18:30:36 +03:00
|
|
|
self.request = request
|
2015-02-26 15:48:34 +03:00
|
|
|
if self.count > self.limit and self.template is not None:
|
2015-01-15 19:52:07 +03:00
|
|
|
self.display_page_controls = True
|
2015-03-04 18:51:00 +03:00
|
|
|
return list(queryset[self.offset:self.offset + self.limit])
|
2015-01-09 18:30:36 +03:00
|
|
|
|
2015-01-13 20:14:13 +03:00
|
|
|
def get_paginated_response(self, data):
|
2015-01-09 18:30:36 +03:00
|
|
|
return Response(OrderedDict([
|
|
|
|
('count', self.count),
|
|
|
|
('next', self.get_next_link()),
|
|
|
|
('previous', self.get_previous_link()),
|
2015-01-13 20:14:13 +03:00
|
|
|
('results', data)
|
2015-01-09 18:30:36 +03:00
|
|
|
]))
|
|
|
|
|
|
|
|
def get_limit(self, request):
|
|
|
|
if self.limit_query_param:
|
|
|
|
try:
|
2015-01-22 18:07:01 +03:00
|
|
|
return _positive_int(
|
2015-01-09 18:30:36 +03:00
|
|
|
request.query_params[self.limit_query_param],
|
|
|
|
cutoff=self.max_limit
|
|
|
|
)
|
|
|
|
except (KeyError, ValueError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
return self.default_limit
|
|
|
|
|
|
|
|
def get_offset(self, request):
|
|
|
|
try:
|
2015-01-22 18:07:01 +03:00
|
|
|
return _positive_int(
|
2015-01-09 18:30:36 +03:00
|
|
|
request.query_params[self.offset_query_param],
|
|
|
|
)
|
|
|
|
except (KeyError, ValueError):
|
|
|
|
return 0
|
|
|
|
|
2015-01-15 19:52:07 +03:00
|
|
|
def get_next_link(self):
|
2015-01-09 18:30:36 +03:00
|
|
|
if self.offset + self.limit >= self.count:
|
|
|
|
return None
|
2015-01-15 19:52:07 +03:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
url = self.request.build_absolute_uri()
|
|
|
|
offset = self.offset + self.limit
|
|
|
|
return replace_query_param(url, self.offset_query_param, offset)
|
|
|
|
|
2015-01-15 19:52:07 +03:00
|
|
|
def get_previous_link(self):
|
|
|
|
if self.offset <= 0:
|
2015-01-09 18:30:36 +03:00
|
|
|
return None
|
2015-01-15 19:52:07 +03:00
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
url = self.request.build_absolute_uri()
|
2015-01-15 19:52:07 +03:00
|
|
|
|
|
|
|
if self.offset - self.limit <= 0:
|
|
|
|
return remove_query_param(url, self.offset_query_param)
|
|
|
|
|
2015-01-09 18:30:36 +03:00
|
|
|
offset = self.offset - self.limit
|
|
|
|
return replace_query_param(url, self.offset_query_param, offset)
|
2015-01-15 19:52:07 +03:00
|
|
|
|
2015-01-16 00:07:05 +03:00
|
|
|
def get_html_context(self):
|
2015-01-15 19:52:07 +03:00
|
|
|
base_url = self.request.build_absolute_uri()
|
|
|
|
current = _divide_with_ceil(self.offset, self.limit) + 1
|
2015-01-16 19:55:46 +03:00
|
|
|
# The number of pages is a little bit fiddly.
|
|
|
|
# We need to sum both the number of pages from current offset to end
|
|
|
|
# plus the number of pages up to the current offset.
|
|
|
|
# When offset is not strictly divisible by the limit then we may
|
|
|
|
# end up introducing an extra page as an artifact.
|
|
|
|
final = (
|
|
|
|
_divide_with_ceil(self.count - self.offset, self.limit) +
|
|
|
|
_divide_with_ceil(self.offset, self.limit)
|
|
|
|
)
|
2015-01-15 19:52:07 +03:00
|
|
|
|
|
|
|
def page_number_to_url(page_number):
|
|
|
|
if page_number == 1:
|
|
|
|
return remove_query_param(base_url, self.offset_query_param)
|
|
|
|
else:
|
|
|
|
offset = self.offset + ((page_number - current) * self.limit)
|
|
|
|
return replace_query_param(base_url, self.offset_query_param, offset)
|
|
|
|
|
|
|
|
page_numbers = _get_displayed_page_numbers(current, final)
|
|
|
|
page_links = _get_page_links(page_numbers, current, page_number_to_url)
|
|
|
|
|
2015-01-16 00:07:05 +03:00
|
|
|
return {
|
2015-01-15 19:52:07 +03:00
|
|
|
'previous_url': self.get_previous_link(),
|
|
|
|
'next_url': self.get_next_link(),
|
|
|
|
'page_links': page_links
|
2015-01-16 00:07:05 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
def to_html(self):
|
|
|
|
template = loader.get_template(self.template)
|
|
|
|
context = Context(self.get_html_context())
|
2015-01-15 19:55:04 +03:00
|
|
|
return template.render(context)
|
2015-01-17 03:10:43 +03:00
|
|
|
|
|
|
|
|
|
|
|
class CursorPagination(BasePagination):
|
2015-03-06 13:22:32 +03:00
|
|
|
"""
|
|
|
|
The cursor pagination implementation is neccessarily complex.
|
|
|
|
For an overview of the position/offset style we use, see this post:
|
|
|
|
http://cramer.io/2011/03/08/building-cursors-for-the-disqus-api/
|
|
|
|
"""
|
2015-01-17 03:10:43 +03:00
|
|
|
cursor_query_param = 'cursor'
|
2015-03-04 18:51:00 +03:00
|
|
|
page_size = api_settings.PAGE_SIZE
|
2015-01-22 15:14:52 +03:00
|
|
|
invalid_cursor_message = _('Invalid cursor')
|
2015-03-06 13:22:32 +03:00
|
|
|
ordering = '-created'
|
2015-01-22 20:25:12 +03:00
|
|
|
template = 'rest_framework/pagination/previous_and_next.html'
|
2015-01-17 03:10:43 +03:00
|
|
|
|
|
|
|
def paginate_queryset(self, queryset, request, view=None):
|
|
|
|
self.base_url = request.build_absolute_uri()
|
2015-01-22 19:12:05 +03:00
|
|
|
self.ordering = self.get_ordering(request, queryset, view)
|
2015-01-17 03:10:43 +03:00
|
|
|
|
2015-01-22 13:28:19 +03:00
|
|
|
# Determine if we have a cursor, and if so then decode it.
|
|
|
|
encoded = request.query_params.get(self.cursor_query_param)
|
2015-01-17 03:10:43 +03:00
|
|
|
if encoded is None:
|
2015-01-19 12:24:42 +03:00
|
|
|
self.cursor = None
|
2015-01-22 18:07:01 +03:00
|
|
|
(offset, reverse, current_position) = (0, False, None)
|
2015-01-17 03:10:43 +03:00
|
|
|
else:
|
2015-01-22 18:07:01 +03:00
|
|
|
self.cursor = _decode_cursor(encoded)
|
2015-01-22 15:14:52 +03:00
|
|
|
if self.cursor is None:
|
|
|
|
raise NotFound(self.invalid_cursor_message)
|
2015-01-22 13:28:19 +03:00
|
|
|
(offset, reverse, current_position) = self.cursor
|
2015-01-17 03:10:43 +03:00
|
|
|
|
2015-01-22 13:28:19 +03:00
|
|
|
# Cursor pagination always enforces an ordering.
|
|
|
|
if reverse:
|
2015-01-22 20:25:12 +03:00
|
|
|
queryset = queryset.order_by(*_reverse_ordering(self.ordering))
|
2015-01-22 13:28:19 +03:00
|
|
|
else:
|
2015-01-22 20:25:12 +03:00
|
|
|
queryset = queryset.order_by(*self.ordering)
|
2015-01-17 03:10:43 +03:00
|
|
|
|
2015-01-22 13:28:19 +03:00
|
|
|
# If we have a cursor with a fixed position then filter by that.
|
2015-01-22 18:07:01 +03:00
|
|
|
if current_position is not None:
|
2015-01-22 20:25:12 +03:00
|
|
|
order = self.ordering[0]
|
|
|
|
is_reversed = order.startswith('-')
|
|
|
|
order_attr = order.lstrip('-')
|
|
|
|
|
|
|
|
# Test for: (cursor reversed) XOR (queryset reversed)
|
|
|
|
if self.cursor.reverse != is_reversed:
|
|
|
|
kwargs = {order_attr + '__lt': current_position}
|
2015-01-22 13:28:19 +03:00
|
|
|
else:
|
2015-01-22 20:25:12 +03:00
|
|
|
kwargs = {order_attr + '__gt': current_position}
|
|
|
|
|
2015-01-22 13:28:19 +03:00
|
|
|
queryset = queryset.filter(**kwargs)
|
2015-01-19 12:24:42 +03:00
|
|
|
|
2015-01-22 13:28:19 +03:00
|
|
|
# If we have an offset cursor then offset the entire page by that amount.
|
|
|
|
# We also always fetch an extra item in order to determine if there is a
|
|
|
|
# page following on from this one.
|
2015-01-19 12:24:42 +03:00
|
|
|
results = list(queryset[offset:offset + self.page_size + 1])
|
2015-03-04 18:51:00 +03:00
|
|
|
self.page = list(results[:self.page_size])
|
2015-01-22 13:28:19 +03:00
|
|
|
|
|
|
|
# Determine the position of the final item following the page.
|
|
|
|
if len(results) > len(self.page):
|
|
|
|
has_following_postion = True
|
|
|
|
following_position = self._get_position_from_instance(results[-1], self.ordering)
|
|
|
|
else:
|
|
|
|
has_following_postion = False
|
|
|
|
following_position = None
|
|
|
|
|
|
|
|
# If we have a reverse queryset, then the query ordering was in reverse
|
|
|
|
# so we need to reverse the items again before returning them to the user.
|
|
|
|
if reverse:
|
2015-01-22 15:14:52 +03:00
|
|
|
self.page = list(reversed(self.page))
|
2015-01-22 13:28:19 +03:00
|
|
|
|
|
|
|
if reverse:
|
|
|
|
# Determine next and previous positions for reverse cursors.
|
2015-01-22 18:07:01 +03:00
|
|
|
self.has_next = (current_position is not None) or (offset > 0)
|
2015-01-22 13:28:19 +03:00
|
|
|
self.has_previous = has_following_postion
|
|
|
|
if self.has_next:
|
|
|
|
self.next_position = current_position
|
|
|
|
if self.has_previous:
|
|
|
|
self.previous_position = following_position
|
|
|
|
else:
|
|
|
|
# Determine next and previous positions for forward cursors.
|
|
|
|
self.has_next = has_following_postion
|
2015-01-22 18:07:01 +03:00
|
|
|
self.has_previous = (current_position is not None) or (offset > 0)
|
2015-01-22 13:28:19 +03:00
|
|
|
if self.has_next:
|
|
|
|
self.next_position = following_position
|
|
|
|
if self.has_previous:
|
|
|
|
self.previous_position = current_position
|
|
|
|
|
2015-01-22 20:25:12 +03:00
|
|
|
# Display page controls in the browsable API if there is more
|
|
|
|
# than one page.
|
2015-02-26 15:48:34 +03:00
|
|
|
if (self.has_previous or self.has_next) and self.template is not None:
|
2015-01-22 20:25:12 +03:00
|
|
|
self.display_page_controls = True
|
|
|
|
|
2015-01-17 03:10:43 +03:00
|
|
|
return self.page
|
|
|
|
|
|
|
|
def get_next_link(self):
|
|
|
|
if not self.has_next:
|
|
|
|
return None
|
2015-01-19 12:24:42 +03:00
|
|
|
|
2015-01-22 15:14:52 +03:00
|
|
|
if self.cursor and self.cursor.reverse and self.cursor.offset != 0:
|
|
|
|
# If we're reversing direction and we have an offset cursor
|
|
|
|
# then we cannot use the first position we find as a marker.
|
|
|
|
compare = self._get_position_from_instance(self.page[-1], self.ordering)
|
|
|
|
else:
|
|
|
|
compare = self.next_position
|
2015-01-19 12:24:42 +03:00
|
|
|
offset = 0
|
2015-01-22 15:14:52 +03:00
|
|
|
|
2015-01-19 12:24:42 +03:00
|
|
|
for item in reversed(self.page):
|
2015-01-22 13:28:19 +03:00
|
|
|
position = self._get_position_from_instance(item, self.ordering)
|
2015-01-19 12:24:42 +03:00
|
|
|
if position != compare:
|
|
|
|
# The item in this position and the item following it
|
|
|
|
# have different positions. We can use this position as
|
|
|
|
# our marker.
|
|
|
|
break
|
|
|
|
|
|
|
|
# The item in this postion has the same position as the item
|
|
|
|
# following it, we can't use it as a marker position, so increment
|
|
|
|
# the offset and keep seeking to the previous item.
|
|
|
|
compare = position
|
|
|
|
offset += 1
|
|
|
|
|
|
|
|
else:
|
2015-01-22 13:28:19 +03:00
|
|
|
# There were no unique positions in the page.
|
|
|
|
if not self.has_previous:
|
|
|
|
# We are on the first page.
|
2015-01-19 12:24:42 +03:00
|
|
|
# Our cursor will have an offset equal to the page size,
|
|
|
|
# but no position to filter against yet.
|
|
|
|
offset = self.page_size
|
2015-01-22 18:07:01 +03:00
|
|
|
position = None
|
2015-01-22 13:28:19 +03:00
|
|
|
elif self.cursor.reverse:
|
|
|
|
# The change in direction will introduce a paging artifact,
|
|
|
|
# where we end up skipping forward a few extra items.
|
|
|
|
offset = 0
|
|
|
|
position = self.previous_position
|
2015-01-19 12:24:42 +03:00
|
|
|
else:
|
|
|
|
# Use the position from the existing cursor and increment
|
|
|
|
# it's offset by the page size.
|
|
|
|
offset = self.cursor.offset + self.page_size
|
2015-01-22 13:28:19 +03:00
|
|
|
position = self.previous_position
|
2015-01-19 12:24:42 +03:00
|
|
|
|
|
|
|
cursor = Cursor(offset=offset, reverse=False, position=position)
|
2015-01-22 18:07:01 +03:00
|
|
|
encoded = _encode_cursor(cursor)
|
2015-01-17 03:10:43 +03:00
|
|
|
return replace_query_param(self.base_url, self.cursor_query_param, encoded)
|
|
|
|
|
2015-01-22 13:28:19 +03:00
|
|
|
def get_previous_link(self):
|
|
|
|
if not self.has_previous:
|
|
|
|
return None
|
|
|
|
|
2015-01-22 15:14:52 +03:00
|
|
|
if self.cursor and not self.cursor.reverse and self.cursor.offset != 0:
|
|
|
|
# If we're reversing direction and we have an offset cursor
|
|
|
|
# then we cannot use the first position we find as a marker.
|
|
|
|
compare = self._get_position_from_instance(self.page[0], self.ordering)
|
|
|
|
else:
|
|
|
|
compare = self.previous_position
|
2015-01-22 13:28:19 +03:00
|
|
|
offset = 0
|
2015-01-22 15:14:52 +03:00
|
|
|
|
2015-01-22 13:28:19 +03:00
|
|
|
for item in self.page:
|
|
|
|
position = self._get_position_from_instance(item, self.ordering)
|
|
|
|
if position != compare:
|
|
|
|
# The item in this position and the item following it
|
|
|
|
# have different positions. We can use this position as
|
|
|
|
# our marker.
|
|
|
|
break
|
|
|
|
|
|
|
|
# The item in this postion has the same position as the item
|
|
|
|
# following it, we can't use it as a marker position, so increment
|
|
|
|
# the offset and keep seeking to the previous item.
|
|
|
|
compare = position
|
|
|
|
offset += 1
|
|
|
|
|
|
|
|
else:
|
|
|
|
# There were no unique positions in the page.
|
|
|
|
if not self.has_next:
|
|
|
|
# We are on the final page.
|
|
|
|
# Our cursor will have an offset equal to the page size,
|
|
|
|
# but no position to filter against yet.
|
|
|
|
offset = self.page_size
|
2015-01-22 18:07:01 +03:00
|
|
|
position = None
|
2015-01-22 13:28:19 +03:00
|
|
|
elif self.cursor.reverse:
|
|
|
|
# Use the position from the existing cursor and increment
|
|
|
|
# it's offset by the page size.
|
|
|
|
offset = self.cursor.offset + self.page_size
|
|
|
|
position = self.next_position
|
|
|
|
else:
|
|
|
|
# The change in direction will introduce a paging artifact,
|
|
|
|
# where we end up skipping back a few extra items.
|
|
|
|
offset = 0
|
|
|
|
position = self.next_position
|
|
|
|
|
|
|
|
cursor = Cursor(offset=offset, reverse=True, position=position)
|
2015-01-22 18:07:01 +03:00
|
|
|
encoded = _encode_cursor(cursor)
|
2015-01-22 13:28:19 +03:00
|
|
|
return replace_query_param(self.base_url, self.cursor_query_param, encoded)
|
|
|
|
|
2015-01-22 19:12:05 +03:00
|
|
|
def get_ordering(self, request, queryset, view):
|
2015-01-22 18:07:01 +03:00
|
|
|
"""
|
|
|
|
Return a tuple of strings, that may be used in an `order_by` method.
|
|
|
|
"""
|
2015-01-22 19:12:05 +03:00
|
|
|
ordering_filters = [
|
|
|
|
filter_cls for filter_cls in getattr(view, 'filter_backends', [])
|
|
|
|
if hasattr(filter_cls, 'get_ordering')
|
|
|
|
]
|
|
|
|
|
|
|
|
if ordering_filters:
|
|
|
|
# If a filter exists on the view that implements `get_ordering`
|
|
|
|
# then we defer to that filter to determine the ordering.
|
|
|
|
filter_cls = ordering_filters[0]
|
|
|
|
filter_instance = filter_cls()
|
|
|
|
ordering = filter_instance.get_ordering(request, queryset, view)
|
|
|
|
assert ordering is not None, (
|
|
|
|
'Using cursor pagination, but filter class {filter_cls} '
|
|
|
|
'returned a `None` ordering.'.format(
|
|
|
|
filter_cls=filter_cls.__name__
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
2015-03-06 13:22:32 +03:00
|
|
|
# The default case is to check for an `ordering` attribute
|
|
|
|
# on this pagination instance.
|
|
|
|
ordering = self.ordering
|
2015-01-22 19:12:05 +03:00
|
|
|
assert ordering is not None, (
|
|
|
|
'Using cursor pagination, but no ordering attribute was declared '
|
2015-03-06 13:22:32 +03:00
|
|
|
'on the pagination class.'
|
2015-01-22 19:12:05 +03:00
|
|
|
)
|
2015-01-22 18:15:52 +03:00
|
|
|
|
|
|
|
assert isinstance(ordering, (six.string_types, list, tuple)), (
|
|
|
|
'Invalid ordering. Expected string or tuple, but got {type}'.format(
|
|
|
|
type=type(ordering).__name__
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
if isinstance(ordering, six.string_types):
|
|
|
|
return (ordering,)
|
2015-01-22 19:12:05 +03:00
|
|
|
return tuple(ordering)
|
2015-01-17 03:10:43 +03:00
|
|
|
|
2015-01-22 13:28:19 +03:00
|
|
|
def _get_position_from_instance(self, instance, ordering):
|
2015-01-22 20:25:12 +03:00
|
|
|
attr = getattr(instance, ordering[0].lstrip('-'))
|
2015-01-22 18:07:01 +03:00
|
|
|
return six.text_type(attr)
|
2015-01-22 20:25:12 +03:00
|
|
|
|
|
|
|
def get_paginated_response(self, data):
|
|
|
|
return Response(OrderedDict([
|
|
|
|
('next', self.get_next_link()),
|
|
|
|
('previous', self.get_previous_link()),
|
|
|
|
('results', data)
|
|
|
|
]))
|
|
|
|
|
|
|
|
def get_html_context(self):
|
|
|
|
return {
|
|
|
|
'previous_url': self.get_previous_link(),
|
|
|
|
'next_url': self.get_next_link()
|
|
|
|
}
|
|
|
|
|
|
|
|
def to_html(self):
|
|
|
|
template = loader.get_template(self.template)
|
|
|
|
context = Context(self.get_html_context())
|
|
|
|
return template.render(context)
|