2016-09-05 19:35:12 +03:00
|
|
|
import gzip
|
2017-09-04 18:18:33 +03:00
|
|
|
import logging
|
2017-09-27 22:01:20 +03:00
|
|
|
import struct
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-05-21 14:02:54 +03:00
|
|
|
from .. import helpers as utils
|
|
|
|
from ..crypto import AES
|
2017-09-22 14:32:00 +03:00
|
|
|
from ..errors import (
|
|
|
|
BadMessageError, InvalidChecksumError, BrokenAuthKeyError,
|
|
|
|
rpc_message_to_error
|
|
|
|
)
|
2017-09-27 22:01:20 +03:00
|
|
|
from ..extensions import BinaryReader
|
2017-09-28 12:59:24 +03:00
|
|
|
from ..tl import TLMessage, MessageContainer, GzipPacked
|
2017-05-21 14:02:54 +03:00
|
|
|
from ..tl.all_tlobjects import tlobjects
|
|
|
|
from ..tl.types import MsgsAck
|
2017-09-29 14:58:15 +03:00
|
|
|
from ..tl.functions.auth import LogOutRequest
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-05-20 12:49:09 +03:00
|
|
|
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
|
|
|
|
2016-08-26 13:58:53 +03:00
|
|
|
|
|
|
|
class MtProtoSender:
|
2017-09-02 22:27:11 +03:00
|
|
|
"""MTProto Mobile Protocol sender
|
2017-09-30 12:49:38 +03:00
|
|
|
(https://core.telegram.org/mtproto/description).
|
|
|
|
|
|
|
|
Note that this class is not thread-safe, and calling send/receive
|
|
|
|
from two or more threads at the same time is undefined behaviour.
|
|
|
|
Rationale: a new connection should be spawned to send/receive requests
|
|
|
|
in parallel, so thread-safety (hence locking) isn't needed.
|
2017-09-02 22:27:11 +03:00
|
|
|
"""
|
|
|
|
|
2017-09-21 14:43:33 +03:00
|
|
|
def __init__(self, session, connection):
|
2017-09-02 22:27:11 +03:00
|
|
|
"""Creates a new MtProtoSender configured to send messages through
|
|
|
|
'connection' and using the parameters from 'session'.
|
|
|
|
"""
|
2016-08-28 14:43:00 +03:00
|
|
|
self.session = session
|
2017-09-21 14:43:33 +03:00
|
|
|
self.connection = connection
|
2017-05-29 18:06:48 +03:00
|
|
|
self._logger = logging.getLogger(__name__)
|
2016-09-07 12:36:34 +03:00
|
|
|
|
2017-09-27 22:01:20 +03:00
|
|
|
# Message IDs that need confirmation
|
|
|
|
self._need_confirmation = []
|
|
|
|
|
|
|
|
# Requests (as msg_id: Message) sent waiting to be received
|
|
|
|
self._pending_receive = {}
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2017-04-29 12:07:32 +03:00
|
|
|
def connect(self):
|
|
|
|
"""Connects to the server"""
|
2017-09-03 10:56:10 +03:00
|
|
|
self.connection.connect()
|
2017-02-19 17:20:21 +03:00
|
|
|
|
2017-06-19 10:58:03 +03:00
|
|
|
def is_connected(self):
|
2017-08-28 22:23:31 +03:00
|
|
|
return self.connection.is_connected()
|
2017-06-19 10:58:03 +03:00
|
|
|
|
2016-09-09 12:47:37 +03:00
|
|
|
def disconnect(self):
|
2017-05-29 22:24:47 +03:00
|
|
|
"""Disconnects from the server"""
|
2017-09-03 10:56:10 +03:00
|
|
|
self.connection.close()
|
2017-09-21 14:43:33 +03:00
|
|
|
self._need_confirmation.clear()
|
|
|
|
self._clear_all_pending()
|
2016-09-09 12:47:37 +03:00
|
|
|
|
2017-09-30 13:08:06 +03:00
|
|
|
def clone(self):
|
|
|
|
"""Creates a copy of this MtProtoSender as a new connection"""
|
|
|
|
return MtProtoSender(self.session, self.connection.clone())
|
|
|
|
|
2016-08-28 14:43:00 +03:00
|
|
|
# region Send and receive
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-25 21:52:27 +03:00
|
|
|
def send(self, *requests):
|
2016-09-09 12:47:37 +03:00
|
|
|
"""Sends the specified MTProtoRequest, previously sending any message
|
2017-05-29 22:24:47 +03:00
|
|
|
which needed confirmation."""
|
2016-09-10 12:01:03 +03:00
|
|
|
|
2017-09-07 22:32:46 +03:00
|
|
|
# If any message needs confirmation send an AckRequest first
|
|
|
|
self._send_acknowledges()
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-25 21:52:27 +03:00
|
|
|
# Finally send our packed request(s)
|
2017-09-28 12:59:24 +03:00
|
|
|
messages = [TLMessage(self.session, r) for r in requests]
|
2017-09-27 22:01:20 +03:00
|
|
|
self._pending_receive.update({m.msg_id: m for m in messages})
|
|
|
|
|
|
|
|
if len(messages) == 1:
|
|
|
|
message = messages[0]
|
2017-09-25 21:52:27 +03:00
|
|
|
else:
|
2017-09-28 12:59:24 +03:00
|
|
|
message = TLMessage(self.session, MessageContainer(messages))
|
2017-09-25 21:52:27 +03:00
|
|
|
|
2017-09-27 22:01:20 +03:00
|
|
|
self._send_message(message)
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-06-09 11:46:39 +03:00
|
|
|
def _send_acknowledges(self):
|
|
|
|
"""Sends a messages acknowledge for all those who _need_confirmation"""
|
|
|
|
if self._need_confirmation:
|
2017-09-27 22:01:20 +03:00
|
|
|
self._send_message(
|
2017-09-28 12:59:24 +03:00
|
|
|
TLMessage(self.session, MsgsAck(self._need_confirmation))
|
2017-09-27 22:01:20 +03:00
|
|
|
)
|
2017-06-09 11:46:39 +03:00
|
|
|
del self._need_confirmation[:]
|
|
|
|
|
2017-09-07 19:49:08 +03:00
|
|
|
def receive(self, update_state):
|
2017-09-02 22:27:11 +03:00
|
|
|
"""Receives a single message from the connected endpoint.
|
|
|
|
|
|
|
|
This method returns nothing, and will only affect other parts
|
|
|
|
of the MtProtoSender such as the updates callback being fired
|
|
|
|
or a pending request being confirmed.
|
2017-09-07 19:49:08 +03:00
|
|
|
|
|
|
|
Any unhandled object (likely updates) will be passed to
|
|
|
|
update_state.process(TLObject).
|
2017-09-02 22:27:11 +03:00
|
|
|
"""
|
2017-09-30 12:49:38 +03:00
|
|
|
try:
|
|
|
|
body = self.connection.recv()
|
|
|
|
except (BufferError, InvalidChecksumError):
|
|
|
|
# TODO BufferError, we should spot the cause...
|
|
|
|
# "No more bytes left"; something wrong happened, clear
|
|
|
|
# everything to be on the safe side, or:
|
|
|
|
#
|
|
|
|
# "This packet should be skipped"; since this may have
|
|
|
|
# been a result for a request, invalidate every request
|
|
|
|
# and just re-invoke them to avoid problems
|
|
|
|
self._clear_all_pending()
|
|
|
|
return
|
2016-09-09 12:47:37 +03:00
|
|
|
|
2017-09-07 22:32:46 +03:00
|
|
|
message, remote_msg_id, remote_seq = self._decode_msg(body)
|
2017-09-02 19:27:22 +03:00
|
|
|
with BinaryReader(message) as reader:
|
2017-09-07 19:49:08 +03:00
|
|
|
self._process_msg(remote_msg_id, remote_seq, reader, update_state)
|
2017-09-02 19:27:22 +03:00
|
|
|
|
2016-08-28 14:43:00 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Low level processing
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-27 22:01:20 +03:00
|
|
|
def _send_message(self, message):
|
|
|
|
"""Sends the given Message(TLObject) encrypted through the network"""
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-27 22:01:20 +03:00
|
|
|
plain_text = \
|
|
|
|
struct.pack('<QQ', self.session.salt, self.session.id) \
|
|
|
|
+ message.to_bytes()
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-27 22:01:20 +03:00
|
|
|
msg_key = utils.calc_msg_key(plain_text)
|
2017-09-27 22:04:52 +03:00
|
|
|
key_id = struct.pack('<Q', self.session.auth_key.key_id)
|
2017-09-27 22:01:20 +03:00
|
|
|
key, iv = utils.calc_key(self.session.auth_key.key, msg_key, True)
|
|
|
|
cipher_text = AES.encrypt_ige(plain_text, key, iv)
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-27 22:01:20 +03:00
|
|
|
result = key_id + msg_key + cipher_text
|
2017-09-30 12:49:38 +03:00
|
|
|
self.connection.send(result)
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-05-29 18:06:48 +03:00
|
|
|
def _decode_msg(self, body):
|
2016-08-28 14:43:00 +03:00
|
|
|
"""Decodes an received encrypted message body bytes"""
|
2016-08-26 13:58:53 +03:00
|
|
|
message = None
|
2016-08-28 14:43:00 +03:00
|
|
|
remote_msg_id = None
|
2016-08-26 13:58:53 +03:00
|
|
|
remote_sequence = None
|
|
|
|
|
|
|
|
with BinaryReader(body) as reader:
|
|
|
|
if len(body) < 8:
|
2017-09-22 14:32:00 +03:00
|
|
|
if body == b'l\xfe\xff\xff':
|
|
|
|
raise BrokenAuthKeyError()
|
|
|
|
else:
|
|
|
|
raise BufferError("Can't decode packet ({})".format(body))
|
2016-08-26 13:58:53 +03:00
|
|
|
|
|
|
|
# TODO Check for both auth key ID and msg_key correctness
|
2017-01-13 23:22:53 +03:00
|
|
|
reader.read_long() # remote_auth_key_id
|
2016-08-26 13:58:53 +03:00
|
|
|
msg_key = reader.read(16)
|
|
|
|
|
2016-09-04 22:07:09 +03:00
|
|
|
key, iv = utils.calc_key(self.session.auth_key.key, msg_key, False)
|
2016-11-30 00:29:42 +03:00
|
|
|
plain_text = AES.decrypt_ige(
|
|
|
|
reader.read(len(body) - reader.tell_position()), key, iv)
|
2016-08-26 13:58:53 +03:00
|
|
|
|
|
|
|
with BinaryReader(plain_text) as plain_text_reader:
|
2017-01-13 23:22:53 +03:00
|
|
|
plain_text_reader.read_long() # remote_salt
|
|
|
|
plain_text_reader.read_long() # remote_session_id
|
2016-08-28 14:43:00 +03:00
|
|
|
remote_msg_id = plain_text_reader.read_long()
|
2016-08-26 13:58:53 +03:00
|
|
|
remote_sequence = plain_text_reader.read_int()
|
|
|
|
msg_len = plain_text_reader.read_int()
|
|
|
|
message = plain_text_reader.read(msg_len)
|
|
|
|
|
2016-08-28 14:43:00 +03:00
|
|
|
return message, remote_msg_id, remote_sequence
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-07 19:49:08 +03:00
|
|
|
def _process_msg(self, msg_id, sequence, reader, state):
|
2017-06-11 18:53:53 +03:00
|
|
|
"""Processes and handles a Telegram message.
|
|
|
|
|
|
|
|
Returns True if the message was handled correctly and doesn't
|
|
|
|
need to be skipped. Returns False otherwise.
|
|
|
|
"""
|
2016-09-09 12:47:37 +03:00
|
|
|
|
2016-08-26 13:58:53 +03:00
|
|
|
# TODO Check salt, session_id and sequence_number
|
2017-05-29 18:06:48 +03:00
|
|
|
self._need_confirmation.append(msg_id)
|
2016-08-26 13:58:53 +03:00
|
|
|
|
|
|
|
code = reader.read_int(signed=False)
|
|
|
|
reader.seek(-4)
|
|
|
|
|
2016-09-10 19:05:20 +03:00
|
|
|
# The following codes are "parsed manually"
|
2017-09-04 18:10:04 +03:00
|
|
|
if code == 0xf35c6d01: # rpc_result, (response of an RPC call)
|
2017-06-11 15:58:16 +03:00
|
|
|
return self._handle_rpc_result(msg_id, sequence, reader)
|
2016-09-10 19:05:20 +03:00
|
|
|
|
2017-01-19 17:54:28 +03:00
|
|
|
if code == 0x347773c5: # pong
|
2017-06-11 15:58:16 +03:00
|
|
|
return self._handle_pong(msg_id, sequence, reader)
|
2017-05-29 21:41:03 +03:00
|
|
|
|
2016-09-10 19:05:20 +03:00
|
|
|
if code == 0x73f1f8dc: # msg_container
|
2017-09-07 19:49:08 +03:00
|
|
|
return self._handle_container(msg_id, sequence, reader, state)
|
2017-05-29 21:41:03 +03:00
|
|
|
|
2016-09-10 19:05:20 +03:00
|
|
|
if code == 0x3072cfa1: # gzip_packed
|
2017-09-07 19:49:08 +03:00
|
|
|
return self._handle_gzip_packed(msg_id, sequence, reader, state)
|
2017-05-29 21:41:03 +03:00
|
|
|
|
2016-09-10 19:05:20 +03:00
|
|
|
if code == 0xedab447b: # bad_server_salt
|
2017-06-11 15:58:16 +03:00
|
|
|
return self._handle_bad_server_salt(msg_id, sequence, reader)
|
2017-05-29 21:41:03 +03:00
|
|
|
|
2016-09-10 19:05:20 +03:00
|
|
|
if code == 0xa7eff811: # bad_msg_notification
|
2017-06-11 15:58:16 +03:00
|
|
|
return self._handle_bad_msg_notification(msg_id, sequence, reader)
|
2016-09-09 12:47:37 +03:00
|
|
|
|
2016-09-26 18:16:15 +03:00
|
|
|
# msgs_ack, it may handle the request we wanted
|
2017-04-11 10:52:44 +03:00
|
|
|
if code == 0x62d6b459:
|
2016-09-26 14:13:11 +03:00
|
|
|
ack = reader.tgread_object()
|
2017-09-29 14:58:15 +03:00
|
|
|
# Ignore every ack request *unless* when logging out, when it's
|
|
|
|
# when it seems to only make sense. We also need to set a non-None
|
|
|
|
# result since Telegram doesn't send the response for these.
|
|
|
|
for msg_id in ack.msg_ids:
|
|
|
|
r = self._pop_request_of_type(msg_id, LogOutRequest)
|
|
|
|
if r:
|
|
|
|
r.result = True # Telegram won't send this value
|
|
|
|
r.confirm_received()
|
|
|
|
self._logger.debug('Message ack confirmed', r)
|
2017-04-11 10:52:44 +03:00
|
|
|
|
2017-06-11 18:53:53 +03:00
|
|
|
return True
|
2016-09-26 14:13:11 +03:00
|
|
|
|
2017-09-02 22:45:27 +03:00
|
|
|
# If the code is not parsed manually then it should be a TLObject.
|
2017-05-29 22:27:20 +03:00
|
|
|
if code in tlobjects:
|
2017-05-29 21:41:03 +03:00
|
|
|
result = reader.tgread_object()
|
2017-10-01 17:02:29 +03:00
|
|
|
self.session.process_entities(result)
|
|
|
|
if state:
|
2017-09-07 19:49:08 +03:00
|
|
|
state.process(result)
|
2017-05-29 21:41:03 +03:00
|
|
|
|
2017-06-11 18:53:53 +03:00
|
|
|
return True
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-07-10 16:21:20 +03:00
|
|
|
self._logger.debug('Unknown message: {}'.format(hex(code)))
|
2016-08-26 13:58:53 +03:00
|
|
|
return False
|
|
|
|
|
2016-08-28 14:43:00 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Message handling
|
|
|
|
|
2017-09-27 22:01:20 +03:00
|
|
|
def _pop_request(self, msg_id):
|
|
|
|
"""Pops a pending REQUEST from self._pending_receive, or
|
|
|
|
returns None if it's not found.
|
2017-09-03 11:50:55 +03:00
|
|
|
"""
|
2017-09-27 22:01:20 +03:00
|
|
|
message = self._pending_receive.pop(msg_id, None)
|
|
|
|
if message:
|
|
|
|
return message.request
|
2017-09-03 11:50:55 +03:00
|
|
|
|
2017-09-29 14:58:15 +03:00
|
|
|
def _pop_request_of_type(self, msg_id, t):
|
|
|
|
"""Pops a pending REQUEST from self._pending_receive if it matches
|
|
|
|
the given type, or returns None if it's not found/doesn't match.
|
|
|
|
"""
|
|
|
|
message = self._pending_receive.get(msg_id, None)
|
|
|
|
if isinstance(message.request, t):
|
|
|
|
return self._pending_receive.pop(msg_id).request
|
|
|
|
|
2017-09-21 14:43:33 +03:00
|
|
|
def _clear_all_pending(self):
|
2017-09-27 22:01:20 +03:00
|
|
|
for r in self._pending_receive.values():
|
2017-09-21 14:43:33 +03:00
|
|
|
r.confirm_received.set()
|
|
|
|
self._pending_receive.clear()
|
|
|
|
|
2017-06-11 15:58:16 +03:00
|
|
|
def _handle_pong(self, msg_id, sequence, reader):
|
2017-05-29 18:06:48 +03:00
|
|
|
self._logger.debug('Handling pong')
|
2017-01-19 17:54:28 +03:00
|
|
|
reader.read_int(signed=False) # code
|
2017-06-27 19:45:52 +03:00
|
|
|
received_msg_id = reader.read_long()
|
2017-01-19 17:54:28 +03:00
|
|
|
|
2017-09-03 11:50:55 +03:00
|
|
|
request = self._pop_request(received_msg_id)
|
|
|
|
if request:
|
2017-07-10 16:21:20 +03:00
|
|
|
self._logger.debug('Pong confirmed a request')
|
2017-09-02 21:41:00 +03:00
|
|
|
request.confirm_received.set()
|
2017-01-19 17:54:28 +03:00
|
|
|
|
2017-06-11 18:53:53 +03:00
|
|
|
return True
|
2017-01-19 17:54:28 +03:00
|
|
|
|
2017-09-07 19:49:08 +03:00
|
|
|
def _handle_container(self, msg_id, sequence, reader, state):
|
2017-05-29 18:06:48 +03:00
|
|
|
self._logger.debug('Handling container')
|
2017-09-25 21:52:27 +03:00
|
|
|
for inner_msg_id, _, inner_len in MessageContainer.iter_read(reader):
|
2016-08-26 13:58:53 +03:00
|
|
|
begin_position = reader.tell_position()
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2017-05-29 21:41:03 +03:00
|
|
|
# Note that this code is IMPORTANT for skipping RPC results of
|
|
|
|
# lost requests (i.e., ones from the previous connection session)
|
2017-07-26 17:10:45 +03:00
|
|
|
try:
|
2017-09-07 19:49:08 +03:00
|
|
|
if not self._process_msg(inner_msg_id, sequence, reader, state):
|
2017-09-25 21:52:27 +03:00
|
|
|
reader.set_position(begin_position + inner_len)
|
2017-07-26 17:10:45 +03:00
|
|
|
except:
|
|
|
|
# If any error is raised, something went wrong; skip the packet
|
2017-09-25 21:52:27 +03:00
|
|
|
reader.set_position(begin_position + inner_len)
|
2017-07-26 17:10:45 +03:00
|
|
|
raise
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-06-11 18:53:53 +03:00
|
|
|
return True
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-06-11 15:58:16 +03:00
|
|
|
def _handle_bad_server_salt(self, msg_id, sequence, reader):
|
2017-05-29 18:06:48 +03:00
|
|
|
self._logger.debug('Handling bad server salt')
|
2017-01-13 23:22:53 +03:00
|
|
|
reader.read_int(signed=False) # code
|
2017-06-27 19:45:52 +03:00
|
|
|
bad_msg_id = reader.read_long()
|
2017-01-13 23:22:53 +03:00
|
|
|
reader.read_int() # bad_msg_seq_no
|
|
|
|
reader.read_int() # error_code
|
2016-08-26 13:58:53 +03:00
|
|
|
new_salt = reader.read_long(signed=False)
|
2016-08-28 14:43:00 +03:00
|
|
|
self.session.salt = new_salt
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-03 11:50:55 +03:00
|
|
|
request = self._pop_request(bad_msg_id)
|
|
|
|
if request:
|
2017-06-11 15:58:16 +03:00
|
|
|
self.send(request)
|
2016-08-26 13:58:53 +03:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
2017-05-29 18:06:48 +03:00
|
|
|
def _handle_bad_msg_notification(self, msg_id, sequence, reader):
|
|
|
|
self._logger.debug('Handling bad message notification')
|
2017-01-13 23:22:53 +03:00
|
|
|
reader.read_int(signed=False) # code
|
2017-06-21 11:20:39 +03:00
|
|
|
reader.read_long() # request_id
|
2017-01-13 23:22:53 +03:00
|
|
|
reader.read_int() # request_sequence
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2016-09-05 19:35:12 +03:00
|
|
|
error_code = reader.read_int()
|
2017-05-26 17:39:59 +03:00
|
|
|
error = BadMessageError(error_code)
|
|
|
|
if error_code in (16, 17):
|
|
|
|
# sent msg_id too low or too high (respectively).
|
|
|
|
# Use the current msg_id to determine the right time offset.
|
|
|
|
self.session.update_time_offset(correct_msg_id=msg_id)
|
2017-07-10 16:21:20 +03:00
|
|
|
self._logger.debug('Read Bad Message error: ' + str(error))
|
|
|
|
self._logger.debug('Attempting to use the correct time offset.')
|
2017-06-11 18:53:53 +03:00
|
|
|
return True
|
2017-09-14 12:50:38 +03:00
|
|
|
elif error_code == 32:
|
|
|
|
# msg_seqno too low, so just pump it up by some "large" amount
|
|
|
|
# TODO A better fix would be to start with a new fresh session ID
|
|
|
|
self.session._sequence += 64
|
|
|
|
return True
|
|
|
|
elif error_code == 33:
|
|
|
|
# msg_seqno too high never seems to happen but just in case
|
|
|
|
self.session._sequence -= 16
|
|
|
|
return True
|
2017-05-26 17:39:59 +03:00
|
|
|
else:
|
|
|
|
raise error
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-06-11 15:58:16 +03:00
|
|
|
def _handle_rpc_result(self, msg_id, sequence, reader):
|
|
|
|
self._logger.debug('Handling RPC result')
|
2017-01-13 23:22:53 +03:00
|
|
|
reader.read_int(signed=False) # code
|
2017-06-21 11:20:39 +03:00
|
|
|
request_id = reader.read_long()
|
2016-09-06 19:54:49 +03:00
|
|
|
inner_code = reader.read_int(signed=False)
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-03 11:50:55 +03:00
|
|
|
request = self._pop_request(request_id)
|
2016-08-26 13:58:53 +03:00
|
|
|
|
|
|
|
if inner_code == 0x2144ca19: # RPC Error
|
2017-08-25 16:34:20 +03:00
|
|
|
if self.session.report_errors and request:
|
|
|
|
error = rpc_message_to_error(
|
|
|
|
reader.read_int(), reader.tgread_string(),
|
2017-09-29 14:11:33 +03:00
|
|
|
report_method=type(request).CONSTRUCTOR_ID
|
2017-08-25 16:34:20 +03:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
error = rpc_message_to_error(
|
|
|
|
reader.read_int(), reader.tgread_string()
|
|
|
|
)
|
2017-04-09 14:14:04 +03:00
|
|
|
|
2017-06-09 11:46:39 +03:00
|
|
|
# Acknowledge that we received the error
|
|
|
|
self._need_confirmation.append(request_id)
|
|
|
|
self._send_acknowledges()
|
|
|
|
|
2017-09-02 21:41:00 +03:00
|
|
|
if request:
|
2017-09-03 12:19:28 +03:00
|
|
|
request.rpc_error = error
|
2017-09-02 21:41:00 +03:00
|
|
|
request.confirm_received.set()
|
|
|
|
# else TODO Where should this error be reported?
|
|
|
|
# Read may be async. Can an error not-belong to a request?
|
2017-07-10 16:21:20 +03:00
|
|
|
self._logger.debug('Read RPC error: %s', str(error))
|
2016-08-26 13:58:53 +03:00
|
|
|
else:
|
2017-06-11 18:53:53 +03:00
|
|
|
if request:
|
|
|
|
self._logger.debug('Reading request response')
|
|
|
|
if inner_code == 0x3072cfa1: # GZip packed
|
|
|
|
unpacked_data = gzip.decompress(reader.tgread_bytes())
|
|
|
|
with BinaryReader(unpacked_data) as compressed_reader:
|
|
|
|
request.on_response(compressed_reader)
|
2017-05-19 06:09:22 +03:00
|
|
|
else:
|
2017-06-11 18:53:53 +03:00
|
|
|
reader.seek(-4)
|
|
|
|
request.on_response(reader)
|
|
|
|
|
2017-10-01 17:02:29 +03:00
|
|
|
self.session.process_entities(request.result)
|
2017-09-02 21:41:00 +03:00
|
|
|
request.confirm_received.set()
|
2017-06-11 18:53:53 +03:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
# If it's really a result for RPC from previous connection
|
|
|
|
# session, it will be skipped by the handle_container()
|
2017-07-10 16:21:20 +03:00
|
|
|
self._logger.debug('Lost request will be skipped.')
|
2017-06-11 18:53:53 +03:00
|
|
|
return False
|
2016-08-26 13:58:53 +03:00
|
|
|
|
2017-09-07 19:49:08 +03:00
|
|
|
def _handle_gzip_packed(self, msg_id, sequence, reader, state):
|
2017-05-29 18:06:48 +03:00
|
|
|
self._logger.debug('Handling gzip packed data')
|
2017-09-27 14:46:53 +03:00
|
|
|
with BinaryReader(GzipPacked.read(reader)) as compressed_reader:
|
2017-09-07 19:49:08 +03:00
|
|
|
return self._process_msg(msg_id, sequence, compressed_reader, state)
|
2016-08-28 14:43:00 +03:00
|
|
|
|
|
|
|
# endregion
|