2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2012-10-18 01:39:07 +04:00
|
|
|
Parsers are used to parse the content of incoming HTTP requests.
|
2012-09-20 16:06:27 +04:00
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
They give us a generic way of being able to handle various media types
|
|
|
|
on the request, such as form content or json encoded data.
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2013-02-05 00:55:35 +04:00
|
|
|
from __future__ import unicode_literals
|
2014-10-01 16:09:14 +04:00
|
|
|
|
2017-05-25 03:56:49 +03:00
|
|
|
import codecs
|
2015-06-18 16:38:29 +03:00
|
|
|
|
2013-02-05 01:16:34 +04:00
|
|
|
from django.conf import settings
|
2015-06-18 16:38:29 +03:00
|
|
|
from django.core.files.uploadhandler import StopFutureHandlers
|
2015-06-25 23:55:51 +03:00
|
|
|
from django.http import QueryDict
|
|
|
|
from django.http.multipartparser import \
|
2015-06-18 16:38:29 +03:00
|
|
|
MultiPartParser as DjangoMultiPartParser
|
2015-06-25 23:55:51 +03:00
|
|
|
from django.http.multipartparser import (
|
|
|
|
ChunkIter, MultiPartParserError, parse_header
|
2015-06-18 16:38:29 +03:00
|
|
|
)
|
2015-06-25 23:55:51 +03:00
|
|
|
from django.utils import six
|
|
|
|
from django.utils.encoding import force_text
|
|
|
|
from django.utils.six.moves.urllib import parse as urlparse
|
2015-06-18 16:38:29 +03:00
|
|
|
|
2013-08-29 00:52:56 +04:00
|
|
|
from rest_framework import renderers
|
2015-06-18 16:38:29 +03:00
|
|
|
from rest_framework.exceptions import ParseError
|
2017-07-10 22:23:12 +03:00
|
|
|
from rest_framework.settings import api_settings
|
2017-07-07 19:47:08 +03:00
|
|
|
from rest_framework.utils import json
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
|
|
|
|
class DataAndFiles(object):
|
|
|
|
def __init__(self, data, files):
|
|
|
|
self.data = data
|
|
|
|
self.files = files
|
|
|
|
|
|
|
|
|
|
|
|
class BaseParser(object):
|
|
|
|
"""
|
2012-09-26 15:40:11 +04:00
|
|
|
All parsers should extend `BaseParser`, specifying a `media_type`
|
2012-10-18 01:07:56 +04:00
|
|
|
attribute, and overriding the `.parse()` method.
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
|
|
|
media_type = None
|
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2012-10-18 01:39:07 +04:00
|
|
|
Given a stream to read from, return the parsed representation.
|
|
|
|
Should return parsed data, or a `DataAndFiles` object consisting of the
|
2012-09-20 16:06:27 +04:00
|
|
|
parsed data and files.
|
|
|
|
"""
|
2012-10-18 01:07:56 +04:00
|
|
|
raise NotImplementedError(".parse() must be overridden.")
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
|
|
|
|
class JSONParser(BaseParser):
|
|
|
|
"""
|
|
|
|
Parses JSON-serialized data.
|
|
|
|
"""
|
|
|
|
media_type = 'application/json'
|
2014-09-12 14:38:22 +04:00
|
|
|
renderer_class = renderers.JSONRenderer
|
2017-07-10 22:23:12 +03:00
|
|
|
strict = api_settings.STRICT_JSON
|
2012-09-20 16:06:27 +04:00
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2013-07-04 15:47:35 +04:00
|
|
|
Parses the incoming bytestream as JSON and returns the resulting data.
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2013-02-05 01:16:34 +04:00
|
|
|
parser_context = parser_context or {}
|
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
|
|
|
|
2012-09-20 16:06:27 +04:00
|
|
|
try:
|
2017-05-25 03:56:49 +03:00
|
|
|
decoded_stream = codecs.getreader(encoding)(stream)
|
2017-07-10 22:23:12 +03:00
|
|
|
parse_constant = json.strict_constant if self.strict else None
|
|
|
|
return json.load(decoded_stream, parse_constant=parse_constant)
|
2012-11-22 03:20:49 +04:00
|
|
|
except ValueError as exc:
|
2012-11-23 04:12:33 +04:00
|
|
|
raise ParseError('JSON parse error - %s' % six.text_type(exc))
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
|
|
|
|
class FormParser(BaseParser):
|
|
|
|
"""
|
|
|
|
Parser for form data.
|
|
|
|
"""
|
|
|
|
media_type = 'application/x-www-form-urlencoded'
|
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2013-07-04 15:47:35 +04:00
|
|
|
Parses the incoming bytestream as a URL encoded form,
|
|
|
|
and returns the resulting QueryDict.
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2013-02-05 01:16:34 +04:00
|
|
|
parser_context = parser_context or {}
|
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
|
|
|
data = QueryDict(stream.read(), encoding=encoding)
|
2012-09-20 16:06:27 +04:00
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
class MultiPartParser(BaseParser):
|
|
|
|
"""
|
|
|
|
Parser for multipart form data, which may include file data.
|
|
|
|
"""
|
|
|
|
media_type = 'multipart/form-data'
|
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2013-07-04 15:47:35 +04:00
|
|
|
Parses the incoming bytestream as a multipart encoded form,
|
|
|
|
and returns a DataAndFiles object.
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
`.data` will be a `QueryDict` containing all the form parameters.
|
|
|
|
`.files` will be a `QueryDict` containing all the form files.
|
|
|
|
"""
|
2012-10-15 16:27:50 +04:00
|
|
|
parser_context = parser_context or {}
|
2012-10-18 01:19:59 +04:00
|
|
|
request = parser_context['request']
|
2013-02-05 01:16:34 +04:00
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
2013-08-30 00:24:29 +04:00
|
|
|
meta = request.META.copy()
|
|
|
|
meta['CONTENT_TYPE'] = media_type
|
2012-10-18 01:19:59 +04:00
|
|
|
upload_handlers = request.upload_handlers
|
|
|
|
|
2012-09-20 16:06:27 +04:00
|
|
|
try:
|
2013-02-05 01:16:34 +04:00
|
|
|
parser = DjangoMultiPartParser(meta, stream, upload_handlers, encoding)
|
2012-09-20 16:06:27 +04:00
|
|
|
data, files = parser.parse()
|
|
|
|
return DataAndFiles(data, files)
|
2012-11-22 03:20:49 +04:00
|
|
|
except MultiPartParserError as exc:
|
2014-10-01 16:09:14 +04:00
|
|
|
raise ParseError('Multipart form parse error - %s' % six.text_type(exc))
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
|
2013-05-02 23:37:25 +04:00
|
|
|
class FileUploadParser(BaseParser):
|
|
|
|
"""
|
|
|
|
Parser for file upload data.
|
|
|
|
"""
|
|
|
|
media_type = '*/*'
|
2016-08-01 20:44:58 +03:00
|
|
|
errors = {
|
|
|
|
'unhandled': 'FileUpload parse error - none of upload handlers can handle the stream',
|
|
|
|
'no_filename': 'Missing filename. Request should include a Content-Disposition header with a filename parameter.',
|
|
|
|
}
|
2013-05-02 23:37:25 +04:00
|
|
|
|
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2013-05-04 12:58:21 +04:00
|
|
|
"""
|
2013-07-04 15:47:35 +04:00
|
|
|
Treats the incoming bytestream as a raw file upload and returns
|
2015-12-16 21:42:10 +03:00
|
|
|
a `DataAndFiles` object.
|
2013-05-04 12:58:21 +04:00
|
|
|
|
|
|
|
`.data` will be None (we expect request body to be a file content).
|
2013-05-08 23:07:51 +04:00
|
|
|
`.files` will be a `QueryDict` containing one 'file' element.
|
2013-05-04 12:58:21 +04:00
|
|
|
"""
|
2013-05-02 23:37:25 +04:00
|
|
|
parser_context = parser_context or {}
|
|
|
|
request = parser_context['request']
|
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
|
|
|
meta = request.META
|
2013-05-04 12:58:21 +04:00
|
|
|
upload_handlers = request.upload_handlers
|
|
|
|
filename = self.get_filename(stream, media_type, parser_context)
|
2013-05-02 23:37:25 +04:00
|
|
|
|
2016-08-01 20:44:58 +03:00
|
|
|
if not filename:
|
|
|
|
raise ParseError(self.errors['no_filename'])
|
|
|
|
|
2013-05-08 23:07:51 +04:00
|
|
|
# Note that this code is extracted from Django's handling of
|
|
|
|
# file uploads in MultiPartParser.
|
|
|
|
content_type = meta.get('HTTP_CONTENT_TYPE',
|
|
|
|
meta.get('CONTENT_TYPE', ''))
|
2013-05-02 23:37:25 +04:00
|
|
|
try:
|
2013-05-08 23:07:51 +04:00
|
|
|
content_length = int(meta.get('HTTP_CONTENT_LENGTH',
|
|
|
|
meta.get('CONTENT_LENGTH', 0)))
|
2013-05-02 23:37:25 +04:00
|
|
|
except (ValueError, TypeError):
|
|
|
|
content_length = None
|
|
|
|
|
|
|
|
# See if the handler will want to take care of the parsing.
|
2013-05-04 12:58:21 +04:00
|
|
|
for handler in upload_handlers:
|
2016-08-01 20:44:58 +03:00
|
|
|
result = handler.handle_raw_input(stream,
|
2013-05-02 23:37:25 +04:00
|
|
|
meta,
|
|
|
|
content_length,
|
|
|
|
None,
|
|
|
|
encoding)
|
|
|
|
if result is not None:
|
2015-01-23 19:48:23 +03:00
|
|
|
return DataAndFiles({}, {'file': result[1]})
|
2013-05-02 23:37:25 +04:00
|
|
|
|
2013-05-08 23:07:51 +04:00
|
|
|
# This is the standard case.
|
2013-05-04 12:58:21 +04:00
|
|
|
possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size]
|
2013-05-07 16:27:27 +04:00
|
|
|
chunk_size = min([2 ** 31 - 4] + possible_sizes)
|
2013-05-02 23:37:25 +04:00
|
|
|
chunks = ChunkIter(stream, chunk_size)
|
2013-05-04 12:58:21 +04:00
|
|
|
counters = [0] * len(upload_handlers)
|
2013-05-02 23:37:25 +04:00
|
|
|
|
2014-12-02 16:52:46 +03:00
|
|
|
for index, handler in enumerate(upload_handlers):
|
2013-05-02 23:37:25 +04:00
|
|
|
try:
|
2013-05-08 23:07:51 +04:00
|
|
|
handler.new_file(None, filename, content_type,
|
|
|
|
content_length, encoding)
|
2013-05-02 23:37:25 +04:00
|
|
|
except StopFutureHandlers:
|
2014-12-02 16:52:46 +03:00
|
|
|
upload_handlers = upload_handlers[:index + 1]
|
2013-05-02 23:37:25 +04:00
|
|
|
break
|
|
|
|
|
|
|
|
for chunk in chunks:
|
2014-12-02 16:52:46 +03:00
|
|
|
for index, handler in enumerate(upload_handlers):
|
2013-05-02 23:37:25 +04:00
|
|
|
chunk_length = len(chunk)
|
2014-12-02 16:52:46 +03:00
|
|
|
chunk = handler.receive_data_chunk(chunk, counters[index])
|
|
|
|
counters[index] += chunk_length
|
2013-05-02 23:37:25 +04:00
|
|
|
if chunk is None:
|
|
|
|
break
|
|
|
|
|
2014-12-02 16:52:46 +03:00
|
|
|
for index, handler in enumerate(upload_handlers):
|
|
|
|
file_obj = handler.file_complete(counters[index])
|
2016-08-01 20:44:58 +03:00
|
|
|
if file_obj is not None:
|
2015-01-10 20:15:21 +03:00
|
|
|
return DataAndFiles({}, {'file': file_obj})
|
2016-08-01 20:44:58 +03:00
|
|
|
|
|
|
|
raise ParseError(self.errors['unhandled'])
|
2013-05-04 12:58:21 +04:00
|
|
|
|
|
|
|
def get_filename(self, stream, media_type, parser_context):
|
|
|
|
"""
|
|
|
|
Detects the uploaded file name. First searches a 'filename' url kwarg.
|
|
|
|
Then tries to parse Content-Disposition header.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return parser_context['kwargs']['filename']
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2013-05-07 16:27:27 +04:00
|
|
|
|
2013-05-04 12:58:21 +04:00
|
|
|
try:
|
|
|
|
meta = parser_context['request'].META
|
2014-09-06 03:27:55 +04:00
|
|
|
disposition = parse_header(meta['HTTP_CONTENT_DISPOSITION'].encode('utf-8'))
|
|
|
|
filename_parm = disposition[1]
|
|
|
|
if 'filename*' in filename_parm:
|
|
|
|
return self.get_encoded_filename(filename_parm)
|
|
|
|
return force_text(filename_parm['filename'])
|
2015-01-23 15:26:44 +03:00
|
|
|
except (AttributeError, KeyError, ValueError):
|
2013-05-08 23:07:51 +04:00
|
|
|
pass
|
2014-09-06 01:56:54 +04:00
|
|
|
|
2014-09-06 03:27:55 +04:00
|
|
|
def get_encoded_filename(self, filename_parm):
|
2014-09-06 01:56:54 +04:00
|
|
|
"""
|
|
|
|
Handle encoded filenames per RFC6266. See also:
|
2018-01-08 18:22:32 +03:00
|
|
|
https://tools.ietf.org/html/rfc2231#section-4
|
2014-09-06 01:56:54 +04:00
|
|
|
"""
|
2014-09-06 03:27:55 +04:00
|
|
|
encoded_filename = force_text(filename_parm['filename*'])
|
2014-09-06 01:56:54 +04:00
|
|
|
try:
|
2014-09-06 02:22:43 +04:00
|
|
|
charset, lang, filename = encoded_filename.split('\'', 2)
|
2014-09-06 01:56:54 +04:00
|
|
|
filename = urlparse.unquote(filename)
|
|
|
|
except (ValueError, LookupError):
|
2014-09-06 03:27:55 +04:00
|
|
|
filename = force_text(filename_parm['filename'])
|
2014-09-06 01:56:54 +04:00
|
|
|
return filename
|