Avoid exceeding maximum container size

This issue would likely be triggered when automatically
merging multiple requests into a single one while having
their size exceed 1044456 bytes like SaveFilePartRequest.

This commit avoids such issue by keeping track of the
current size, and if it exceeds the limit, avoid merge.
This commit is contained in:
Lonami Exo 2018-07-07 11:58:48 +02:00
parent 33ce702ab9
commit 393e1966c7
3 changed files with 13 additions and 2 deletions

View File

@ -749,14 +749,17 @@ class _ContainerQueue(asyncio.Queue):
isinstance(result.obj, MessageContainer):
return result
size = result.size()
result = [result]
while not self.empty():
item = self.get_nowait()
if item == _reconnect_sentinel or\
isinstance(item.obj, MessageContainer):
if (item == _reconnect_sentinel or
isinstance(item.obj, MessageContainer)
or size + item.size() > MessageContainer.MAXIMUM_SIZE):
self.put_nowait(item)
break
else:
size += item.size()
result.append(item)
return result

View File

@ -10,6 +10,11 @@ __log__ = logging.getLogger(__name__)
class MessageContainer(TLObject):
CONSTRUCTOR_ID = 0x73f1f8dc
# Maximum size in bytes for the inner payload of the container.
# Telegram will close the connection if the payload is bigger.
# The overhead of the container itself is subtracted.
MAXIMUM_SIZE = 1044456 - 8
def __init__(self, messages):
self.messages = messages

View File

@ -82,3 +82,6 @@ class TLMessage(TLObject):
raise TypeError('Incoming messages should not be bytes()-ed')
return self._body
def size(self):
return len(self._body)