2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2012-10-18 01:39:07 +04:00
|
|
|
Parsers are used to parse the content of incoming HTTP requests.
|
2012-09-20 16:06:27 +04:00
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
They give us a generic way of being able to handle various media types
|
|
|
|
on the request, such as form content or json encoded data.
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2013-02-05 00:55:35 +04:00
|
|
|
from __future__ import unicode_literals
|
2013-02-05 01:16:34 +04:00
|
|
|
from django.conf import settings
|
2013-05-02 23:37:25 +04:00
|
|
|
from django.core.files.uploadhandler import StopFutureHandlers
|
2012-09-20 16:06:27 +04:00
|
|
|
from django.http import QueryDict
|
|
|
|
from django.http.multipartparser import MultiPartParser as DjangoMultiPartParser
|
2013-05-02 23:37:25 +04:00
|
|
|
from django.http.multipartparser import MultiPartParserError, parse_header, ChunkIter
|
2013-02-22 17:17:22 +04:00
|
|
|
from rest_framework.compat import yaml, etree
|
2012-09-20 16:06:27 +04:00
|
|
|
from rest_framework.exceptions import ParseError
|
2013-01-03 14:41:07 +04:00
|
|
|
from rest_framework.compat import six
|
2013-01-05 16:40:02 +04:00
|
|
|
import json
|
2012-09-20 16:06:27 +04:00
|
|
|
import datetime
|
|
|
|
import decimal
|
|
|
|
|
|
|
|
|
|
|
|
class DataAndFiles(object):
|
|
|
|
def __init__(self, data, files):
|
|
|
|
self.data = data
|
|
|
|
self.files = files
|
|
|
|
|
|
|
|
|
|
|
|
class BaseParser(object):
|
|
|
|
"""
|
2012-09-26 15:40:11 +04:00
|
|
|
All parsers should extend `BaseParser`, specifying a `media_type`
|
2012-10-18 01:07:56 +04:00
|
|
|
attribute, and overriding the `.parse()` method.
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
|
|
|
|
|
|
|
media_type = None
|
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
2012-10-18 01:39:07 +04:00
|
|
|
Given a stream to read from, return the parsed representation.
|
|
|
|
Should return parsed data, or a `DataAndFiles` object consisting of the
|
2012-09-20 16:06:27 +04:00
|
|
|
parsed data and files.
|
|
|
|
"""
|
2012-10-18 01:07:56 +04:00
|
|
|
raise NotImplementedError(".parse() must be overridden.")
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
|
|
|
|
class JSONParser(BaseParser):
|
|
|
|
"""
|
|
|
|
Parses JSON-serialized data.
|
|
|
|
"""
|
|
|
|
|
|
|
|
media_type = 'application/json'
|
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
|
|
|
Returns a 2-tuple of `(data, files)`.
|
|
|
|
|
|
|
|
`data` will be an object which is the parsed content of the response.
|
|
|
|
`files` will always be `None`.
|
|
|
|
"""
|
2013-02-05 01:16:34 +04:00
|
|
|
parser_context = parser_context or {}
|
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
|
|
|
|
2012-09-20 16:06:27 +04:00
|
|
|
try:
|
2013-02-05 01:16:34 +04:00
|
|
|
data = stream.read().decode(encoding)
|
2012-11-23 04:12:33 +04:00
|
|
|
return json.loads(data)
|
2012-11-22 03:20:49 +04:00
|
|
|
except ValueError as exc:
|
2012-11-23 04:12:33 +04:00
|
|
|
raise ParseError('JSON parse error - %s' % six.text_type(exc))
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
|
|
|
|
class YAMLParser(BaseParser):
|
|
|
|
"""
|
|
|
|
Parses YAML-serialized data.
|
|
|
|
"""
|
|
|
|
|
|
|
|
media_type = 'application/yaml'
|
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
|
|
|
Returns a 2-tuple of `(data, files)`.
|
|
|
|
|
|
|
|
`data` will be an object which is the parsed content of the response.
|
|
|
|
`files` will always be `None`.
|
|
|
|
"""
|
2013-02-22 17:17:22 +04:00
|
|
|
assert yaml, 'YAMLParser requires pyyaml to be installed'
|
|
|
|
|
2013-02-05 01:16:34 +04:00
|
|
|
parser_context = parser_context or {}
|
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
|
|
|
|
2012-09-20 16:06:27 +04:00
|
|
|
try:
|
2013-02-05 01:16:34 +04:00
|
|
|
data = stream.read().decode(encoding)
|
2012-11-23 04:12:33 +04:00
|
|
|
return yaml.safe_load(data)
|
2012-11-22 03:20:49 +04:00
|
|
|
except (ValueError, yaml.parser.ParserError) as exc:
|
2012-11-23 04:12:33 +04:00
|
|
|
raise ParseError('YAML parse error - %s' % six.u(exc))
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
|
|
|
|
class FormParser(BaseParser):
|
|
|
|
"""
|
|
|
|
Parser for form data.
|
|
|
|
"""
|
|
|
|
|
|
|
|
media_type = 'application/x-www-form-urlencoded'
|
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
|
|
|
Returns a 2-tuple of `(data, files)`.
|
|
|
|
|
|
|
|
`data` will be a :class:`QueryDict` containing all the form parameters.
|
|
|
|
`files` will always be :const:`None`.
|
|
|
|
"""
|
2013-02-05 01:16:34 +04:00
|
|
|
parser_context = parser_context or {}
|
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
|
|
|
data = QueryDict(stream.read(), encoding=encoding)
|
2012-09-20 16:06:27 +04:00
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
class MultiPartParser(BaseParser):
|
|
|
|
"""
|
|
|
|
Parser for multipart form data, which may include file data.
|
|
|
|
"""
|
|
|
|
|
|
|
|
media_type = 'multipart/form-data'
|
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2012-09-20 16:06:27 +04:00
|
|
|
"""
|
|
|
|
Returns a DataAndFiles object.
|
|
|
|
|
|
|
|
`.data` will be a `QueryDict` containing all the form parameters.
|
|
|
|
`.files` will be a `QueryDict` containing all the form files.
|
|
|
|
"""
|
2012-10-15 16:27:50 +04:00
|
|
|
parser_context = parser_context or {}
|
2012-10-18 01:19:59 +04:00
|
|
|
request = parser_context['request']
|
2013-02-05 01:16:34 +04:00
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
2012-10-18 01:19:59 +04:00
|
|
|
meta = request.META
|
|
|
|
upload_handlers = request.upload_handlers
|
|
|
|
|
2012-09-20 16:06:27 +04:00
|
|
|
try:
|
2013-02-05 01:16:34 +04:00
|
|
|
parser = DjangoMultiPartParser(meta, stream, upload_handlers, encoding)
|
2012-09-20 16:06:27 +04:00
|
|
|
data, files = parser.parse()
|
|
|
|
return DataAndFiles(data, files)
|
2012-11-22 03:20:49 +04:00
|
|
|
except MultiPartParserError as exc:
|
2012-11-23 04:12:33 +04:00
|
|
|
raise ParseError('Multipart form parse error - %s' % six.u(exc))
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
|
|
|
|
class XMLParser(BaseParser):
|
|
|
|
"""
|
|
|
|
XML parser.
|
|
|
|
"""
|
|
|
|
|
|
|
|
media_type = 'application/xml'
|
|
|
|
|
2012-10-18 01:39:07 +04:00
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2013-02-22 17:17:22 +04:00
|
|
|
assert etree, 'XMLParser requires defusedxml to be installed'
|
|
|
|
|
2013-02-05 01:16:34 +04:00
|
|
|
parser_context = parser_context or {}
|
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
2013-02-22 17:17:22 +04:00
|
|
|
parser = etree.DefusedXMLParser(encoding=encoding)
|
2012-09-20 16:06:27 +04:00
|
|
|
try:
|
2013-02-22 23:41:09 +04:00
|
|
|
tree = etree.parse(stream, parser=parser, forbid_dtd=True)
|
2013-02-22 17:17:22 +04:00
|
|
|
except (etree.ParseError, ValueError) as exc:
|
2012-11-23 04:12:33 +04:00
|
|
|
raise ParseError('XML parse error - %s' % six.u(exc))
|
2012-09-20 16:06:27 +04:00
|
|
|
data = self._xml_convert(tree.getroot())
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
def _xml_convert(self, element):
|
|
|
|
"""
|
|
|
|
convert the xml `element` into the corresponding python object
|
|
|
|
"""
|
|
|
|
|
2013-02-04 23:51:50 +04:00
|
|
|
children = list(element)
|
2012-09-20 16:06:27 +04:00
|
|
|
|
|
|
|
if len(children) == 0:
|
|
|
|
return self._type_convert(element.text)
|
|
|
|
else:
|
|
|
|
# if the fist child tag is list-item means all children are list-item
|
|
|
|
if children[0].tag == "list-item":
|
|
|
|
data = []
|
|
|
|
for child in children:
|
|
|
|
data.append(self._xml_convert(child))
|
|
|
|
else:
|
|
|
|
data = {}
|
|
|
|
for child in children:
|
|
|
|
data[child.tag] = self._xml_convert(child)
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
def _type_convert(self, value):
|
|
|
|
"""
|
|
|
|
Converts the value returned by the XMl parse into the equivalent
|
|
|
|
Python type
|
|
|
|
"""
|
|
|
|
if value is None:
|
|
|
|
return value
|
|
|
|
|
|
|
|
try:
|
|
|
|
return datetime.datetime.strptime(value, '%Y-%m-%d %H:%M:%S')
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
return int(value)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
return decimal.Decimal(value)
|
|
|
|
except decimal.InvalidOperation:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return value
|
2013-05-02 23:37:25 +04:00
|
|
|
|
|
|
|
|
|
|
|
class FileUploadParser(BaseParser):
|
|
|
|
"""
|
|
|
|
Parser for file upload data.
|
|
|
|
"""
|
|
|
|
media_type = '*/*'
|
|
|
|
|
|
|
|
def parse(self, stream, media_type=None, parser_context=None):
|
2013-05-04 12:58:21 +04:00
|
|
|
"""
|
|
|
|
Returns a DataAndFiles object.
|
|
|
|
|
|
|
|
`.data` will be None (we expect request body to be a file content).
|
|
|
|
`.files` will be a `QueryDict` containing one 'file' elemnt - a parsed file.
|
|
|
|
"""
|
|
|
|
|
2013-05-02 23:37:25 +04:00
|
|
|
parser_context = parser_context or {}
|
|
|
|
request = parser_context['request']
|
|
|
|
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
|
|
|
meta = request.META
|
2013-05-04 12:58:21 +04:00
|
|
|
upload_handlers = request.upload_handlers
|
|
|
|
filename = self.get_filename(stream, media_type, parser_context)
|
2013-05-02 23:37:25 +04:00
|
|
|
|
|
|
|
content_type = meta.get('HTTP_CONTENT_TYPE', meta.get('CONTENT_TYPE', ''))
|
|
|
|
try:
|
|
|
|
content_length = int(meta.get('HTTP_CONTENT_LENGTH', meta.get('CONTENT_LENGTH', 0)))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
content_length = None
|
|
|
|
|
|
|
|
# See if the handler will want to take care of the parsing.
|
2013-05-04 12:58:21 +04:00
|
|
|
for handler in upload_handlers:
|
2013-05-02 23:37:25 +04:00
|
|
|
result = handler.handle_raw_input(None,
|
|
|
|
meta,
|
|
|
|
content_length,
|
|
|
|
None,
|
|
|
|
encoding)
|
|
|
|
if result is not None:
|
2013-05-04 12:58:21 +04:00
|
|
|
return DataAndFiles(None, {'file': result[1]})
|
2013-05-02 23:37:25 +04:00
|
|
|
|
2013-05-04 12:58:21 +04:00
|
|
|
possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size]
|
2013-05-02 23:37:25 +04:00
|
|
|
chunk_size = min([2**31-4] + possible_sizes)
|
|
|
|
chunks = ChunkIter(stream, chunk_size)
|
2013-05-04 12:58:21 +04:00
|
|
|
counters = [0] * len(upload_handlers)
|
2013-05-02 23:37:25 +04:00
|
|
|
|
2013-05-04 12:58:21 +04:00
|
|
|
for handler in upload_handlers:
|
2013-05-02 23:37:25 +04:00
|
|
|
try:
|
|
|
|
handler.new_file(None, filename, content_type, content_length, encoding)
|
|
|
|
except StopFutureHandlers:
|
|
|
|
break
|
|
|
|
|
|
|
|
for chunk in chunks:
|
2013-05-04 12:58:21 +04:00
|
|
|
for i, handler in enumerate(upload_handlers):
|
2013-05-02 23:37:25 +04:00
|
|
|
chunk_length = len(chunk)
|
|
|
|
chunk = handler.receive_data_chunk(chunk, counters[i])
|
|
|
|
counters[i] += chunk_length
|
|
|
|
if chunk is None:
|
|
|
|
# If the chunk received by the handler is None, then don't continue.
|
|
|
|
break
|
|
|
|
|
2013-05-04 12:58:21 +04:00
|
|
|
for i, handler in enumerate(upload_handlers):
|
2013-05-02 23:37:25 +04:00
|
|
|
file_obj = handler.file_complete(counters[i])
|
|
|
|
if file_obj:
|
|
|
|
return DataAndFiles(None, {'file': file_obj})
|
2013-05-04 12:58:21 +04:00
|
|
|
|
|
|
|
def get_filename(self, stream, media_type, parser_context):
|
|
|
|
"""
|
|
|
|
Detects the uploaded file name. First searches a 'filename' url kwarg.
|
|
|
|
Then tries to parse Content-Disposition header.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return parser_context['kwargs']['filename']
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
meta = parser_context['request'].META
|
|
|
|
disposition = parse_header(meta['HTTP_CONTENT_DISPOSITION'])
|
|
|
|
return disposition[1]['filename']
|
|
|
|
except (AttributeError, KeyError):
|
|
|
|
pass
|