mirror of
https://github.com/LonamiWebs/Telethon.git
synced 2024-11-25 10:53:44 +03:00
Port mtproto from grammers
This commit is contained in:
parent
9636ef35c1
commit
269ee4f05f
|
@ -90,13 +90,15 @@ def decrypt_data_v2(ciphertext: bytes, auth_key: AuthKey) -> bytes:
|
|||
return plaintext
|
||||
|
||||
|
||||
def generate_key_data_from_nonce(server_nonce: bytes, new_nonce: bytes) -> CalcKey:
|
||||
hash1 = sha1(new_nonce + server_nonce).digest()
|
||||
hash2 = sha1(server_nonce + new_nonce).digest()
|
||||
hash3 = sha1(new_nonce + new_nonce).digest()
|
||||
def generate_key_data_from_nonce(server_nonce: int, new_nonce: int) -> CalcKey:
|
||||
server_bytes = server_nonce.to_bytes(16)
|
||||
new_bytes = new_nonce.to_bytes(32)
|
||||
hash1 = sha1(new_bytes + server_bytes).digest()
|
||||
hash2 = sha1(server_bytes + new_bytes).digest()
|
||||
hash3 = sha1(new_bytes + new_bytes).digest()
|
||||
|
||||
key = hash1 + hash2[:12]
|
||||
iv = hash2[12:20] + hash3 + new_nonce[:4]
|
||||
iv = hash2[12:20] + hash3 + new_bytes[:4]
|
||||
return CalcKey(key, iv)
|
||||
|
||||
|
||||
|
@ -108,3 +110,6 @@ def encrypt_ige(plaintext: bytes, key: bytes, iv: bytes) -> bytes:
|
|||
|
||||
def decrypt_ige(padded_ciphertext: bytes, key: bytes, iv: bytes) -> bytes:
|
||||
return ige_decrypt(padded_ciphertext, key, iv)
|
||||
|
||||
|
||||
__all__ = ["AuthKey", "encrypt_data_v2", "decrypt_data_v2"]
|
||||
|
|
|
@ -19,5 +19,9 @@ class AuthKey:
|
|||
def __bytes__(self) -> bytes:
|
||||
return self.data
|
||||
|
||||
def calc_new_nonce_hash(self, new_nonce: bytes, number: int) -> bytes:
|
||||
return sha1(new_nonce + bytes((number,)) + self.aux_hash).digest()[4:]
|
||||
def calc_new_nonce_hash(self, new_nonce: int, number: int) -> int:
|
||||
return int.from_bytes(
|
||||
sha1(new_nonce.to_bytes(32) + number.to_bytes(1) + self.aux_hash).digest()[
|
||||
4:
|
||||
]
|
||||
)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import os
|
||||
import struct
|
||||
from hashlib import sha1
|
||||
|
||||
|
@ -8,15 +9,21 @@ from ..tl.core import serialize_bytes_to
|
|||
|
||||
def compute_fingerprint(key: PublicKey) -> int:
|
||||
buffer = bytearray()
|
||||
serialize_bytes_to(buffer, int.to_bytes(key.n, (key.n.bit_length() + 7) // 8))
|
||||
serialize_bytes_to(buffer, int.to_bytes(key.e, (key.e.bit_length() + 7) // 8))
|
||||
serialize_bytes_to(buffer, key.n.to_bytes((key.n.bit_length() + 7) // 8))
|
||||
serialize_bytes_to(buffer, key.e.to_bytes((key.e.bit_length() + 7) // 8))
|
||||
fingerprint = struct.unpack("<q", sha1(buffer).digest()[-8:])[0]
|
||||
assert isinstance(fingerprint, int)
|
||||
return fingerprint
|
||||
|
||||
|
||||
def encrypt_hashed(data: bytes, key: PublicKey) -> bytes:
|
||||
return encrypt(sha1(data).digest() + data, key)
|
||||
def encrypt_hashed(data: bytes, key: PublicKey, random_data: bytes) -> bytes:
|
||||
# Cannot use `rsa.encrypt` because it's not deterministic and requires its own padding.
|
||||
padding_length = 235 - len(data)
|
||||
assert padding_length >= 0 and len(random_data) >= padding_length
|
||||
to_encrypt = sha1(data).digest() + data + random_data[:padding_length]
|
||||
payload = int.from_bytes(to_encrypt)
|
||||
encrypted = pow(payload, key.e, key.n)
|
||||
return encrypted.to_bytes(256)
|
||||
|
||||
|
||||
# From my.telegram.org.
|
||||
|
|
0
client/src/telethon/_impl/mtproto/__init__.py
Normal file
0
client/src/telethon/_impl/mtproto/__init__.py
Normal file
307
client/src/telethon/_impl/mtproto/authentication.py
Normal file
307
client/src/telethon/_impl/mtproto/authentication.py
Normal file
|
@ -0,0 +1,307 @@
|
|||
import os
|
||||
import struct
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from hashlib import sha1
|
||||
from typing import Tuple
|
||||
|
||||
from telethon._impl.crypto import decrypt_ige, encrypt_ige, generate_key_data_from_nonce
|
||||
from telethon._impl.crypto.auth_key import AuthKey
|
||||
from telethon._impl.crypto.factorize import factorize
|
||||
from telethon._impl.crypto.rsa import RSA_KEYS, encrypt_hashed
|
||||
from telethon._impl.tl.core.reader import Reader
|
||||
|
||||
from ..tl.mtproto.abcs import ServerDhInnerData as AbcServerDhInnerData
|
||||
from ..tl.mtproto.abcs import ServerDhParams, SetClientDhParamsAnswer
|
||||
from ..tl.mtproto.functions import req_dh_params, req_pq_multi, set_client_dh_params
|
||||
from ..tl.mtproto.types import (
|
||||
ClientDhInnerData,
|
||||
DhGenFail,
|
||||
DhGenOk,
|
||||
DhGenRetry,
|
||||
PQInnerData,
|
||||
ResPq,
|
||||
ServerDhInnerData,
|
||||
ServerDhParamsFail,
|
||||
ServerDhParamsOk,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Step1:
|
||||
nonce: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class Step2:
|
||||
nonce: int
|
||||
server_nonce: int
|
||||
new_nonce: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class Step3:
|
||||
nonce: int
|
||||
server_nonce: int
|
||||
new_nonce: int
|
||||
gab: int
|
||||
time_offset: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class CreatedKey:
|
||||
auth_key: AuthKey
|
||||
time_offset: int
|
||||
first_salt: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class DhGenData:
|
||||
nonce: int
|
||||
server_nonce: int
|
||||
new_nonce_hash: int
|
||||
nonce_number: int
|
||||
|
||||
|
||||
def _do_step1(random_bytes: bytes) -> Tuple[bytes, Step1]:
|
||||
assert len(random_bytes) == 16
|
||||
nonce = int.from_bytes(random_bytes)
|
||||
return req_pq_multi(nonce=nonce), Step1(nonce=nonce)
|
||||
|
||||
|
||||
def step1() -> Tuple[bytes, Step1]:
|
||||
return _do_step1(os.urandom(16))
|
||||
|
||||
|
||||
def _do_step2(data: Step1, response: bytes, random_bytes: bytes) -> Tuple[bytes, Step2]:
|
||||
assert len(random_bytes) == 288
|
||||
nonce = data.nonce
|
||||
res_pq = ResPq.from_bytes(response)
|
||||
|
||||
if len(res_pq.pq) != 8:
|
||||
raise ValueError(f"invalid pq size: {len(res_pq.pq)}")
|
||||
|
||||
pq = struct.unpack(">Q", res_pq.pq)[0]
|
||||
p, q = factorize(pq)
|
||||
|
||||
new_nonce = int.from_bytes(random_bytes[:32])
|
||||
random_bytes = random_bytes[32:]
|
||||
|
||||
# https://core.telegram.org/mtproto/auth_key#dh-exchange-initiation
|
||||
p_bytes = p.to_bytes((p.bit_length() + 7) // 8)
|
||||
q_bytes = q.to_bytes((q.bit_length() + 7) // 8)
|
||||
|
||||
pq_inner_data = bytes(
|
||||
PQInnerData(
|
||||
pq=res_pq.pq,
|
||||
p=p_bytes,
|
||||
q=q_bytes,
|
||||
nonce=nonce,
|
||||
server_nonce=res_pq.server_nonce,
|
||||
new_nonce=new_nonce,
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
fingerprint = next(
|
||||
fp for fp in res_pq.server_public_key_fingerprints if fp in RSA_KEYS
|
||||
)
|
||||
except StopIteration:
|
||||
raise ValueError(
|
||||
f"unknown fingerprints: {res_pq.server_public_key_fingerprints}"
|
||||
)
|
||||
|
||||
key = RSA_KEYS[fingerprint]
|
||||
ciphertext = encrypt_hashed(pq_inner_data, key, random_bytes)
|
||||
|
||||
return req_dh_params(
|
||||
nonce=nonce,
|
||||
server_nonce=res_pq.server_nonce,
|
||||
p=p_bytes,
|
||||
q=q_bytes,
|
||||
public_key_fingerprint=fingerprint,
|
||||
encrypted_data=ciphertext,
|
||||
), Step2(nonce=nonce, server_nonce=res_pq.server_nonce, new_nonce=new_nonce)
|
||||
|
||||
|
||||
def step2(data: Step1, response: bytes) -> Tuple[bytes, Step2]:
|
||||
return _do_step2(data, response, os.urandom(288))
|
||||
|
||||
|
||||
def _do_step3(
|
||||
data: Step2, response: bytes, random_bytes: bytes, now: int
|
||||
) -> Tuple[bytes, Step3]:
|
||||
assert len(random_bytes) == 272
|
||||
|
||||
nonce = data.nonce
|
||||
server_nonce = data.server_nonce
|
||||
new_nonce = data.new_nonce
|
||||
|
||||
server_dh_params = ServerDhParams.from_bytes(response)
|
||||
if isinstance(server_dh_params, ServerDhParamsFail):
|
||||
check_nonce(server_dh_params.nonce, nonce)
|
||||
check_server_nonce(server_dh_params.server_nonce, server_nonce)
|
||||
|
||||
new_nonce_hash = int.from_bytes(sha1(new_nonce.to_bytes(16)).digest()[4:])
|
||||
check_new_nonce_hash(server_dh_params.new_nonce_hash, new_nonce_hash)
|
||||
|
||||
raise ValueError("server failed to provide dh params")
|
||||
else:
|
||||
assert isinstance(server_dh_params, ServerDhParamsOk)
|
||||
|
||||
check_nonce(server_dh_params.nonce, nonce)
|
||||
check_server_nonce(server_dh_params.server_nonce, server_nonce)
|
||||
|
||||
if len(server_dh_params.encrypted_answer) % 16 != 0:
|
||||
raise ValueError(
|
||||
f"encrypted response not padded with size: {len(server_dh_params.encrypted_answer)}"
|
||||
)
|
||||
|
||||
key, iv = generate_key_data_from_nonce(server_nonce, new_nonce)
|
||||
plain_text_answer = decrypt_ige(server_dh_params.encrypted_answer, key, iv)
|
||||
|
||||
got_answer_hash = plain_text_answer[:20]
|
||||
plain_text_reader = Reader(plain_text_answer[20:])
|
||||
|
||||
server_dh_inner = AbcServerDhInnerData._read_from(plain_text_reader)
|
||||
assert isinstance(server_dh_inner, ServerDhInnerData)
|
||||
|
||||
expected_answer_hash = sha1(
|
||||
plain_text_answer[20 : 20 + plain_text_reader._pos]
|
||||
).digest()
|
||||
|
||||
if got_answer_hash != expected_answer_hash:
|
||||
raise ValueError("invalid answer hash")
|
||||
|
||||
check_nonce(server_dh_inner.nonce, nonce)
|
||||
check_server_nonce(server_dh_inner.server_nonce, server_nonce)
|
||||
|
||||
dh_prime = int.from_bytes(server_dh_inner.dh_prime)
|
||||
g = server_dh_inner.g
|
||||
g_a = int.from_bytes(server_dh_inner.g_a)
|
||||
|
||||
time_offset = server_dh_inner.server_time - now
|
||||
|
||||
b = int.from_bytes(random_bytes[:256])
|
||||
g_b = pow(g, b, dh_prime)
|
||||
gab = pow(g_a, b, dh_prime)
|
||||
|
||||
random_bytes = random_bytes[256:]
|
||||
|
||||
# https://core.telegram.org/mtproto/auth_key#dh-key-exchange-complete
|
||||
check_g_in_range(g, 1, dh_prime - 1)
|
||||
check_g_in_range(g_a, 1, dh_prime - 1)
|
||||
check_g_in_range(g_b, 1, dh_prime - 1)
|
||||
|
||||
safety_range = 1 << (2048 - 64)
|
||||
check_g_in_range(g_a, safety_range, dh_prime - safety_range)
|
||||
check_g_in_range(g_b, safety_range, dh_prime - safety_range)
|
||||
|
||||
client_dh_inner = bytes(
|
||||
ClientDhInnerData(
|
||||
nonce=nonce,
|
||||
server_nonce=server_nonce,
|
||||
retry_id=0, # TODO use an actual retry_id
|
||||
g_b=g_b.to_bytes((g_b.bit_length() + 7) // 8),
|
||||
)
|
||||
)
|
||||
|
||||
client_dh_inner_hashed = sha1(client_dh_inner).digest() + client_dh_inner
|
||||
client_dh_inner_hashed += random_bytes[
|
||||
: (16 - (len(client_dh_inner_hashed) % 16)) % 16
|
||||
]
|
||||
|
||||
client_dh_encrypted = encrypt_ige(client_dh_inner_hashed, key, iv)
|
||||
|
||||
return set_client_dh_params(
|
||||
nonce=nonce, server_nonce=server_nonce, encrypted_data=client_dh_encrypted
|
||||
), Step3(
|
||||
nonce=nonce,
|
||||
server_nonce=server_nonce,
|
||||
new_nonce=new_nonce,
|
||||
gab=gab,
|
||||
time_offset=time_offset,
|
||||
)
|
||||
|
||||
|
||||
def step3(data: Step2, response: bytes) -> Tuple[bytes, Step3]:
|
||||
return _do_step3(data, response, os.urandom(272), int(time.time()))
|
||||
|
||||
|
||||
def create_key(data: Step3, response: bytes) -> CreatedKey:
|
||||
nonce = data.nonce
|
||||
server_nonce = data.server_nonce
|
||||
new_nonce = data.new_nonce
|
||||
gab = data.gab
|
||||
time_offset = data.time_offset
|
||||
|
||||
dh_gen_answer = SetClientDhParamsAnswer.from_bytes(response)
|
||||
|
||||
if isinstance(dh_gen_answer, DhGenOk):
|
||||
dh_gen = DhGenData(
|
||||
nonce=dh_gen_answer.nonce,
|
||||
server_nonce=dh_gen_answer.server_nonce,
|
||||
new_nonce_hash=dh_gen_answer.new_nonce_hash1,
|
||||
nonce_number=1,
|
||||
)
|
||||
elif isinstance(dh_gen_answer, DhGenRetry):
|
||||
dh_gen = DhGenData(
|
||||
nonce=dh_gen_answer.nonce,
|
||||
server_nonce=dh_gen_answer.server_nonce,
|
||||
new_nonce_hash=dh_gen_answer.new_nonce_hash2,
|
||||
nonce_number=2,
|
||||
)
|
||||
elif isinstance(dh_gen_answer, DhGenFail):
|
||||
dh_gen = DhGenData(
|
||||
nonce=dh_gen_answer.nonce,
|
||||
server_nonce=dh_gen_answer.server_nonce,
|
||||
new_nonce_hash=dh_gen_answer.new_nonce_hash3,
|
||||
nonce_number=3,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"unknown dh gen answer type: {dh_gen_answer}")
|
||||
|
||||
check_nonce(dh_gen.nonce, nonce)
|
||||
check_server_nonce(dh_gen.server_nonce, server_nonce)
|
||||
|
||||
auth_key = AuthKey.from_bytes(gab.to_bytes(256))
|
||||
|
||||
new_nonce_hash = auth_key.calc_new_nonce_hash(new_nonce, dh_gen.nonce_number)
|
||||
check_new_nonce_hash(dh_gen.new_nonce_hash, new_nonce_hash)
|
||||
|
||||
first_salt = struct.unpack(
|
||||
"<q",
|
||||
bytes(
|
||||
a ^ b
|
||||
for a, b in zip(new_nonce.to_bytes(32)[:8], server_nonce.to_bytes(16)[:8])
|
||||
),
|
||||
)[0]
|
||||
|
||||
if dh_gen.nonce_number == 1:
|
||||
return CreatedKey(
|
||||
auth_key=auth_key,
|
||||
time_offset=time_offset,
|
||||
first_salt=first_salt,
|
||||
)
|
||||
else:
|
||||
raise ValueError("dh gen fail")
|
||||
|
||||
|
||||
def check_nonce(got: int, expected: int) -> None:
|
||||
if got != expected:
|
||||
raise ValueError(f"invalid nonce, expected: {expected}, got: {got}")
|
||||
|
||||
|
||||
def check_server_nonce(got: int, expected: int) -> None:
|
||||
if got != expected:
|
||||
raise ValueError(f"invalid server nonce, expected: {expected}, got: {got}")
|
||||
|
||||
|
||||
def check_new_nonce_hash(got: int, expected: int) -> None:
|
||||
if got != expected:
|
||||
raise ValueError(f"invalid new nonce, expected: {expected}, got: {got}")
|
||||
|
||||
|
||||
def check_g_in_range(value: int, low: int, high: int) -> None:
|
||||
if not (low < value < high):
|
||||
raise ValueError(f"g parameter {value} not in range({low+1}, {high})")
|
12
client/src/telethon/_impl/mtproto/mtp/__init__.py
Normal file
12
client/src/telethon/_impl/mtproto/mtp/__init__.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
from .encrypted import Encrypted
|
||||
from .plain import Plain
|
||||
from .types import Deserialization, MsgId, Mtp, RpcError
|
||||
|
||||
__all__ = [
|
||||
"Encrypted",
|
||||
"Plain",
|
||||
"Deserialization",
|
||||
"MsgId",
|
||||
"Mtp",
|
||||
"RpcError",
|
||||
]
|
409
client/src/telethon/_impl/mtproto/mtp/encrypted.py
Normal file
409
client/src/telethon/_impl/mtproto/mtp/encrypted.py
Normal file
|
@ -0,0 +1,409 @@
|
|||
import os
|
||||
import struct
|
||||
import time
|
||||
from typing import List, Optional, Tuple, Union
|
||||
|
||||
from ...crypto import AuthKey, decrypt_data_v2, encrypt_data_v2
|
||||
from ...tl.mtproto.abcs import BadMsgNotification as AbcBadMsgNotification
|
||||
from ...tl.mtproto.abcs import DestroySessionRes
|
||||
from ...tl.mtproto.abcs import MsgDetailedInfo as AbcMsgDetailedInfo
|
||||
from ...tl.mtproto.functions import get_future_salts
|
||||
from ...tl.mtproto.types import (
|
||||
BadMsgNotification,
|
||||
BadServerSalt,
|
||||
DestroySessionNone,
|
||||
DestroySessionOk,
|
||||
FutureSalt,
|
||||
FutureSalts,
|
||||
GzipPacked,
|
||||
HttpWait,
|
||||
Message,
|
||||
MsgContainer,
|
||||
MsgDetailedInfo,
|
||||
MsgNewDetailedInfo,
|
||||
MsgResendReq,
|
||||
MsgsAck,
|
||||
MsgsAllInfo,
|
||||
MsgsStateInfo,
|
||||
MsgsStateReq,
|
||||
NewSessionCreated,
|
||||
Pong,
|
||||
RpcAnswerDropped,
|
||||
RpcAnswerDroppedRunning,
|
||||
RpcAnswerUnknown,
|
||||
)
|
||||
from ...tl.mtproto.types import RpcError as GeneratedRpcError
|
||||
from ...tl.mtproto.types import RpcResult
|
||||
from ...tl.types import (
|
||||
Updates,
|
||||
UpdatesCombined,
|
||||
UpdateShort,
|
||||
UpdateShortChatMessage,
|
||||
UpdateShortMessage,
|
||||
UpdateShortSentMessage,
|
||||
UpdatesTooLong,
|
||||
)
|
||||
from ..utils import (
|
||||
CONTAINER_MAX_LENGTH,
|
||||
CONTAINER_MAX_SIZE,
|
||||
DEFAULT_COMPRESSION_THRESHOLD,
|
||||
MESSAGE_SIZE_OVERHEAD,
|
||||
check_message_buffer,
|
||||
gzip_compress,
|
||||
gzip_decompress,
|
||||
message_requires_ack,
|
||||
)
|
||||
from .types import Deserialization, MsgId, Mtp, RpcError
|
||||
|
||||
NUM_FUTURE_SALTS = 64
|
||||
|
||||
SALT_USE_DELAY = 60
|
||||
|
||||
UPDATE_IDS = {
|
||||
Updates.constructor_id(),
|
||||
UpdatesCombined.constructor_id(),
|
||||
UpdateShort.constructor_id(),
|
||||
UpdateShortChatMessage.constructor_id(),
|
||||
UpdateShortMessage.constructor_id(),
|
||||
UpdateShortSentMessage.constructor_id(),
|
||||
UpdatesTooLong.constructor_id(),
|
||||
}
|
||||
|
||||
HEADER_LEN = 8 + 8 # salt, client_id
|
||||
|
||||
CONTAINER_HEADER_LEN = (8 + 4 + 4) + (4 + 4) # msg_id, seq_no, size, constructor, len
|
||||
|
||||
|
||||
class Encrypted(Mtp):
|
||||
def __init__(
|
||||
self,
|
||||
auth_key: bytes,
|
||||
*,
|
||||
time_offset: Optional[int] = None,
|
||||
first_salt: Optional[int] = None,
|
||||
compression_threshold: Optional[int] = DEFAULT_COMPRESSION_THRESHOLD,
|
||||
) -> None:
|
||||
self._auth_key: AuthKey = AuthKey.from_bytes(auth_key)
|
||||
self._time_offset: int = time_offset or 0
|
||||
self._salts: List[FutureSalt] = [
|
||||
FutureSalt(valid_since=0, valid_until=0x7FFFFFFF, salt=first_salt or 0)
|
||||
]
|
||||
self._start_salt_time: Optional[Tuple[int, int]] = None
|
||||
self._client_id: int = struct.unpack("<q", os.urandom(8))[0]
|
||||
self._sequence: int = 0
|
||||
self._last_msg_id: int = 0
|
||||
self._pending_ack: List[int] = []
|
||||
self._compression_threshold = compression_threshold
|
||||
self._rpc_results: List[Tuple[MsgId, Union[bytes, ValueError]]] = []
|
||||
self._updates: List[bytes] = []
|
||||
self._buffer = bytearray()
|
||||
self._msg_count: int = 0
|
||||
|
||||
self._handlers = {
|
||||
RpcResult.constructor_id(): self._handle_rpc_result,
|
||||
MsgsAck.constructor_id(): self._handle_ack,
|
||||
BadMsgNotification.constructor_id(): self._handle_bad_notification,
|
||||
BadServerSalt.constructor_id(): self._handle_bad_notification,
|
||||
MsgsStateReq.constructor_id(): self._handle_state_req,
|
||||
MsgsStateInfo.constructor_id(): self._handle_state_info,
|
||||
MsgsAllInfo.constructor_id(): self._handle_msg_all,
|
||||
MsgDetailedInfo.constructor_id(): self._handle_detailed_info,
|
||||
MsgNewDetailedInfo.constructor_id(): self._handle_detailed_info,
|
||||
MsgResendReq.constructor_id(): self._handle_msg_resend,
|
||||
FutureSalt.constructor_id(): self._handle_future_salt,
|
||||
FutureSalts.constructor_id(): self._handle_future_salts,
|
||||
Pong.constructor_id(): self._handle_pong,
|
||||
DestroySessionOk.constructor_id(): self._handle_destroy_session,
|
||||
DestroySessionNone.constructor_id(): self._handle_destroy_session,
|
||||
NewSessionCreated.constructor_id(): self._handle_new_session_created,
|
||||
GzipPacked.constructor_id(): self._handle_gzip_packed,
|
||||
HttpWait.constructor_id(): self._handle_http_wait,
|
||||
}
|
||||
|
||||
@property
|
||||
def auth_key(self) -> bytes:
|
||||
return self._auth_key.data
|
||||
|
||||
def _correct_time_offset(self, msg_id: int) -> None:
|
||||
now = time.time()
|
||||
correct = msg_id >> 32
|
||||
self._time_offset = correct - int(now)
|
||||
|
||||
def _get_new_msg_id(self) -> int:
|
||||
now = time.time()
|
||||
|
||||
new_msg_id = int((now + self._time_offset) * 0x100000000)
|
||||
if self._last_msg_id >= new_msg_id:
|
||||
new_msg_id = self._last_msg_id + 4
|
||||
|
||||
self._last_msg_id = new_msg_id
|
||||
return new_msg_id
|
||||
|
||||
def _get_seq_no(self, content_related: bool) -> int:
|
||||
if content_related:
|
||||
self._sequence += 2
|
||||
return self._sequence - 1
|
||||
else:
|
||||
return self._sequence
|
||||
|
||||
def _serialize_msg(self, body: bytes, content_related: bool) -> MsgId:
|
||||
msg_id = self._get_new_msg_id()
|
||||
seq_no = self._get_seq_no(content_related)
|
||||
self._buffer += struct.pack("<qii", msg_id, seq_no, len(body))
|
||||
self._buffer += body
|
||||
self._msg_count += 1
|
||||
return MsgId(msg_id)
|
||||
|
||||
def _finalize_plain(self) -> bytes:
|
||||
if not self._msg_count:
|
||||
return b""
|
||||
|
||||
if self._msg_count == 1:
|
||||
del self._buffer[:CONTAINER_HEADER_LEN]
|
||||
|
||||
self._buffer[:HEADER_LEN] = struct.pack(
|
||||
"<qq", self._salts[-1].salt if self._salts else 0, self._client_id
|
||||
)
|
||||
|
||||
if self._msg_count != 1:
|
||||
self._buffer[HEADER_LEN : HEADER_LEN + CONTAINER_HEADER_LEN] = struct.pack(
|
||||
"<qiiIi",
|
||||
self._get_new_msg_id(),
|
||||
self._get_seq_no(False),
|
||||
len(self._buffer) - HEADER_LEN - CONTAINER_HEADER_LEN + 8,
|
||||
MsgContainer.constructor_id(),
|
||||
self._msg_count,
|
||||
)
|
||||
|
||||
self._msg_count = 0
|
||||
result = bytes(self._buffer)
|
||||
self._buffer.clear()
|
||||
return result
|
||||
|
||||
def _process_message(self, message: Message) -> None:
|
||||
if message_requires_ack(message):
|
||||
self._pending_ack.append(message.msg_id)
|
||||
|
||||
# https://core.telegram.org/mtproto/service_messages
|
||||
# https://core.telegram.org/mtproto/service_messages_about_messages
|
||||
# TODO verify what needs ack and what doesn't
|
||||
constructor_id = struct.unpack_from("<I", message.body)[0]
|
||||
self._handlers.get(constructor_id, self._handle_update)(message)
|
||||
|
||||
def _handle_rpc_result(self, message: Message) -> None:
|
||||
assert isinstance(message.body, RpcResult)
|
||||
req_msg_id = message.body.req_msg_id
|
||||
result = message.body.result
|
||||
|
||||
msg_id = MsgId(req_msg_id)
|
||||
inner_constructor = struct.unpack_from("<I", result)[0]
|
||||
|
||||
if inner_constructor == GeneratedRpcError.constructor_id():
|
||||
self._rpc_results.append(
|
||||
(
|
||||
msg_id,
|
||||
RpcError.from_mtproto_error(GeneratedRpcError.from_bytes(result)),
|
||||
)
|
||||
)
|
||||
elif inner_constructor == RpcAnswerUnknown.constructor_id():
|
||||
pass # msg_id = rpc_drop_answer.msg_id
|
||||
elif inner_constructor == RpcAnswerDroppedRunning.constructor_id():
|
||||
pass # msg_id = rpc_drop_answer.msg_id, original_request.msg_id
|
||||
elif inner_constructor == RpcAnswerDropped.constructor_id():
|
||||
pass # dropped
|
||||
elif inner_constructor == GzipPacked.constructor_id():
|
||||
body = gzip_decompress(GzipPacked.from_bytes(result))
|
||||
self._store_own_updates(body)
|
||||
self._rpc_results.append((msg_id, body))
|
||||
else:
|
||||
self._store_own_updates(result)
|
||||
self._rpc_results.append((msg_id, result))
|
||||
|
||||
def _store_own_updates(self, body: bytes) -> None:
|
||||
constructor_id = struct.unpack_from("I", body)[0]
|
||||
if constructor_id in UPDATE_IDS:
|
||||
self._updates.append(body)
|
||||
|
||||
def _handle_ack(self, message: Message) -> None:
|
||||
# TODO notify about this somehow
|
||||
MsgsAck.from_bytes(message.body)
|
||||
|
||||
def _handle_bad_notification(self, message: Message) -> None:
|
||||
# TODO notify about this somehow
|
||||
bad_msg = AbcBadMsgNotification.from_bytes(message.body)
|
||||
if isinstance(bad_msg, BadServerSalt):
|
||||
self._rpc_results.append(
|
||||
(
|
||||
MsgId(bad_msg.bad_msg_id),
|
||||
ValueError(f"bad msg: {bad_msg.error_code}"),
|
||||
)
|
||||
)
|
||||
|
||||
self._salts.clear()
|
||||
self._salts.append(
|
||||
FutureSalt(
|
||||
valid_since=0, valid_until=0x7FFFFFFF, salt=bad_msg.new_server_salt
|
||||
)
|
||||
)
|
||||
|
||||
self.push(get_future_salts(num=NUM_FUTURE_SALTS))
|
||||
return
|
||||
|
||||
assert isinstance(bad_msg, BadMsgNotification)
|
||||
self._rpc_results.append(
|
||||
(MsgId(bad_msg.bad_msg_id), ValueError(f"bad msg: {bad_msg.error_code}"))
|
||||
)
|
||||
|
||||
if bad_msg.error_code in (16, 17):
|
||||
self._correct_time_offset(message.msg_id)
|
||||
elif bad_msg.error_code == 32:
|
||||
# TODO start with a fresh session rather than guessing
|
||||
self._sequence += 64
|
||||
elif bad_msg.error_code == 33:
|
||||
# TODO start with a fresh session rather than guessing
|
||||
self._sequence -= 16
|
||||
|
||||
def _handle_state_req(self, message: Message) -> None:
|
||||
# TODO implement
|
||||
MsgsStateReq.from_bytes(message.body)
|
||||
|
||||
def _handle_state_info(self, message: Message) -> None:
|
||||
# TODO implement
|
||||
MsgsStateInfo.from_bytes(message.body)
|
||||
|
||||
def _handle_msg_all(self, message: Message) -> None:
|
||||
# TODO implement
|
||||
MsgsAllInfo.from_bytes(message.body)
|
||||
|
||||
def _handle_detailed_info(self, message: Message) -> None:
|
||||
# TODO properly implement
|
||||
msg_detailed = AbcMsgDetailedInfo.from_bytes(message.body)
|
||||
if isinstance(msg_detailed, MsgDetailedInfo):
|
||||
self._pending_ack.append(msg_detailed.answer_msg_id)
|
||||
elif isinstance(msg_detailed, MsgNewDetailedInfo):
|
||||
self._pending_ack.append(msg_detailed.answer_msg_id)
|
||||
else:
|
||||
assert False
|
||||
|
||||
def _handle_msg_resend(self, message: Message) -> None:
|
||||
# TODO implement
|
||||
MsgResendReq.from_bytes(message.body)
|
||||
|
||||
def _handle_future_salts(self, message: Message) -> None:
|
||||
# TODO implement
|
||||
salts = FutureSalts.from_bytes(message.body)
|
||||
self._rpc_results.append((MsgId(salts.req_msg_id), message.body))
|
||||
|
||||
self._start_salt_time = (salts.now, int(time.time()))
|
||||
self._salts = salts.salts
|
||||
self._salts.sort(key=lambda salt: -salt.valid_since)
|
||||
|
||||
def _handle_future_salt(self, message: Message) -> None:
|
||||
FutureSalt.from_bytes(message.body)
|
||||
assert False # no request should cause this
|
||||
|
||||
def _handle_pong(self, message: Message) -> None:
|
||||
pong = Pong.from_bytes(message.body)
|
||||
self._rpc_results.append((MsgId(pong.msg_id), message.body))
|
||||
|
||||
def _handle_destroy_session(self, message: Message) -> None:
|
||||
# TODO implement
|
||||
DestroySessionRes.from_bytes(message.body)
|
||||
|
||||
def _handle_new_session_created(self, message: Message) -> None:
|
||||
# TODO implement
|
||||
new_session = NewSessionCreated.from_bytes(message.body)
|
||||
self._salts.clear()
|
||||
self._salts.append(
|
||||
FutureSalt(
|
||||
valid_since=0, valid_until=0x7FFFFFFF, salt=new_session.server_salt
|
||||
)
|
||||
)
|
||||
|
||||
def _handle_container(self, message: Message) -> None:
|
||||
container = MsgContainer.from_bytes(message.body)
|
||||
for inner_message in container.messages:
|
||||
self._process_message(inner_message)
|
||||
|
||||
def _handle_gzip_packed(self, message: Message) -> None:
|
||||
container = GzipPacked.from_bytes(message.body)
|
||||
inner_body = gzip_decompress(container)
|
||||
self._process_message(
|
||||
Message(
|
||||
msg_id=message.msg_id,
|
||||
seqno=message.seqno,
|
||||
bytes=len(inner_body),
|
||||
body=inner_body,
|
||||
)
|
||||
)
|
||||
|
||||
def _handle_http_wait(self, message: Message) -> None:
|
||||
# TODO implement
|
||||
HttpWait.from_bytes(message.body)
|
||||
|
||||
def _handle_update(self, message: Message) -> None:
|
||||
# TODO if this `Updates` cannot be deserialized, `getDifference` should be used
|
||||
self._updates.append(message.body)
|
||||
|
||||
def push(self, request: bytes) -> Optional[MsgId]:
|
||||
if not self._buffer:
|
||||
# Reserve space for `finalize`
|
||||
self._buffer += bytes(HEADER_LEN + CONTAINER_HEADER_LEN)
|
||||
|
||||
if self._pending_ack:
|
||||
self._serialize_msg(bytes(MsgsAck(msg_ids=self._pending_ack)), False)
|
||||
self._pending_ack = []
|
||||
|
||||
if self._start_salt_time:
|
||||
start_secs, start_instant = self._start_salt_time
|
||||
if len(self._salts) >= 2:
|
||||
salt = self._salts[-2]
|
||||
now = start_secs + (start_instant - int(time.time()))
|
||||
if now >= salt.valid_since + SALT_USE_DELAY:
|
||||
self._salts.pop()
|
||||
if len(self._salts) == 1:
|
||||
self._serialize_msg(
|
||||
bytes(get_future_salts(num=NUM_FUTURE_SALTS)), True
|
||||
)
|
||||
|
||||
if self._msg_count == CONTAINER_MAX_LENGTH:
|
||||
return None
|
||||
|
||||
assert len(request) + MESSAGE_SIZE_OVERHEAD <= CONTAINER_MAX_SIZE
|
||||
assert len(request) % 4 == 0
|
||||
|
||||
body = request
|
||||
if self._compression_threshold is not None:
|
||||
if len(request) >= self._compression_threshold:
|
||||
compressed = bytes(GzipPacked(packed_data=gzip_compress(request)))
|
||||
if len(compressed) < len(request):
|
||||
body = compressed
|
||||
|
||||
new_size = len(self._buffer) + len(body) + MESSAGE_SIZE_OVERHEAD
|
||||
if new_size >= CONTAINER_MAX_SIZE:
|
||||
return None
|
||||
|
||||
return self._serialize_msg(body, True)
|
||||
|
||||
def finalize(self) -> bytes:
|
||||
buffer = self._finalize_plain()
|
||||
if not buffer:
|
||||
return buffer
|
||||
else:
|
||||
return encrypt_data_v2(buffer, self._auth_key)
|
||||
|
||||
def deserialize(self, payload: bytes) -> Deserialization:
|
||||
check_message_buffer(payload)
|
||||
|
||||
plaintext = decrypt_data_v2(payload, self._auth_key)
|
||||
|
||||
_, client_id = struct.unpack_from("<qq", plaintext) # salt, client_id
|
||||
if client_id != self._client_id:
|
||||
raise RuntimeError("wrong session id")
|
||||
|
||||
self._process_message(Message.from_bytes(memoryview(plaintext)[16:]))
|
||||
|
||||
result = Deserialization(rpc_results=self._rpc_results, updates=self._updates)
|
||||
self._rpc_results = []
|
||||
self._updates = []
|
||||
return result
|
52
client/src/telethon/_impl/mtproto/mtp/plain.py
Normal file
52
client/src/telethon/_impl/mtproto/mtp/plain.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
import struct
|
||||
from typing import Optional
|
||||
|
||||
from ..utils import check_message_buffer
|
||||
from .types import Deserialization, MsgId, Mtp
|
||||
|
||||
|
||||
class Plain(Mtp):
|
||||
def __init__(self) -> None:
|
||||
self._buffer = bytearray()
|
||||
|
||||
# https://core.telegram.org/mtproto/description#unencrypted-message
|
||||
def push(self, request: bytes) -> Optional[MsgId]:
|
||||
if self._buffer:
|
||||
return None
|
||||
|
||||
# https://core.telegram.org/mtproto/samples-auth_key seems to
|
||||
# imply a need to generate a valid `message_id`, but 0 works too.
|
||||
msg_id = MsgId(0)
|
||||
|
||||
# auth_key_id = 0, message_id, message_data_length.
|
||||
self._buffer += struct.pack("<qqi", 0, msg_id, len(request))
|
||||
self._buffer += request # message_data
|
||||
return msg_id
|
||||
|
||||
def finalize(self) -> bytes:
|
||||
result = bytes(self._buffer)
|
||||
self._buffer.clear()
|
||||
return result
|
||||
|
||||
def deserialize(self, payload: bytes) -> Deserialization:
|
||||
check_message_buffer(payload)
|
||||
|
||||
auth_key_id, msg_id, length = struct.unpack_from("<qqi", payload)
|
||||
if auth_key_id != 0:
|
||||
raise ValueError(f"bad auth key, expected: 0, got: {auth_key_id}")
|
||||
|
||||
# https://core.telegram.org/mtproto/description#message-identifier-msg-id
|
||||
if msg_id <= 0 or (msg_id % 4) != 1:
|
||||
raise ValueError(f"bad msg id, got: {msg_id}")
|
||||
|
||||
if length < 0:
|
||||
raise ValueError(f"bad length: expected >= 0, got: {length}")
|
||||
|
||||
if 20 + length > len(payload):
|
||||
raise ValueError(
|
||||
f"message too short, expected: {20 + length}, got {len(payload)}"
|
||||
)
|
||||
|
||||
return Deserialization(
|
||||
rpc_results=[(MsgId(0), payload[20 : 20 + length])], updates=[]
|
||||
)
|
76
client/src/telethon/_impl/mtproto/mtp/types.py
Normal file
76
client/src/telethon/_impl/mtproto/mtp/types.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from typing import List, NewType, Optional, Self, Tuple, Union
|
||||
|
||||
from ...tl.mtproto.types import RpcError as GeneratedRpcError
|
||||
|
||||
MsgId = NewType("MsgId", int)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Deserialization:
|
||||
rpc_results: List[Tuple[MsgId, Union[bytes, ValueError]]]
|
||||
updates: List[bytes]
|
||||
|
||||
|
||||
class RpcError(ValueError):
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
code: int = 0,
|
||||
name: str = "",
|
||||
value: Optional[int] = None,
|
||||
caused_by: Optional[int] = None,
|
||||
) -> None:
|
||||
append_value = f" ({value})" if value else None
|
||||
super().__init__(f"rpc error {code}: {name}{append_value}")
|
||||
|
||||
self.code = code
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.caused_by = caused_by
|
||||
|
||||
@classmethod
|
||||
def from_mtproto_error(cls, error: GeneratedRpcError) -> Self:
|
||||
if m := re.search(r"-?\d+", error.error_message):
|
||||
name = re.sub(
|
||||
r"_{2,}",
|
||||
"_",
|
||||
error.error_message[: m.start()] + error.error_message[m.end() :],
|
||||
).strip("_")
|
||||
value = int(m[0])
|
||||
else:
|
||||
name = error.error_message
|
||||
value = None
|
||||
|
||||
return cls(
|
||||
code=error.error_code,
|
||||
name=name,
|
||||
value=value,
|
||||
caused_by=None,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
return (
|
||||
self.code == other.code
|
||||
and self.name == other.name
|
||||
and self.value == other.value
|
||||
)
|
||||
|
||||
|
||||
# https://core.telegram.org/mtproto/description
|
||||
class Mtp(ABC):
|
||||
@abstractmethod
|
||||
def push(self, request: bytes) -> Optional[MsgId]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def finalize(self) -> bytes:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def deserialize(self, payload: bytes) -> Deserialization:
|
||||
pass
|
3
client/src/telethon/_impl/mtproto/transport/__init__.py
Normal file
3
client/src/telethon/_impl/mtproto/transport/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
from .abcs import Transport
|
||||
|
||||
__all__ = ["Transport"]
|
11
client/src/telethon/_impl/mtproto/transport/abcs.py
Normal file
11
client/src/telethon/_impl/mtproto/transport/abcs.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Transport(ABC):
|
||||
@abstractmethod
|
||||
def pack(self, input: bytes, output: bytearray) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def unpack(self, input: bytes, output: bytearray) -> None:
|
||||
pass
|
58
client/src/telethon/_impl/mtproto/transport/abridged.py
Normal file
58
client/src/telethon/_impl/mtproto/transport/abridged.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
import struct
|
||||
|
||||
from .abcs import Transport
|
||||
|
||||
|
||||
class Abridged(Transport):
|
||||
__slots__ = ("_init",)
|
||||
|
||||
"""
|
||||
Implementation of the [abridged transport]:
|
||||
|
||||
```text
|
||||
+----+----...----+
|
||||
| len| payload |
|
||||
+----+----...----+
|
||||
^^^^ 1 or 4 bytes
|
||||
```
|
||||
|
||||
[abridged transport]: https://core.telegram.org/mtproto/mtproto-transports#abridged
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._init = False
|
||||
|
||||
def pack(self, input: bytes, output: bytearray) -> None:
|
||||
assert len(input) % 4 == 0
|
||||
|
||||
if not self._init:
|
||||
output += b"\xef"
|
||||
self._init = True
|
||||
|
||||
length = len(input) // 4
|
||||
if length < 127:
|
||||
output += struct.pack("<b", length)
|
||||
else:
|
||||
output += struct.pack("<i", 0x7F | (length << 8))
|
||||
output += input
|
||||
|
||||
def unpack(self, input: bytes, output: bytearray) -> None:
|
||||
if not input:
|
||||
raise ValueError("missing bytes, expected: 1, got: 0")
|
||||
|
||||
length = input[0]
|
||||
if length < 127:
|
||||
header_len = 1
|
||||
elif len(input) < 4:
|
||||
raise ValueError(f"missing bytes, expected: 4, got: {len(input)}")
|
||||
else:
|
||||
header_len = 4
|
||||
length = struct.unpack_from("<i", input)[0] >> 8
|
||||
|
||||
length *= 4
|
||||
if len(input) < header_len + length:
|
||||
raise ValueError(
|
||||
f"missing bytes, expected: {header_len + length}, got: {len(input)}"
|
||||
)
|
||||
|
||||
output += memoryview(input)[header_len : header_len + length]
|
57
client/src/telethon/_impl/mtproto/transport/full.py
Normal file
57
client/src/telethon/_impl/mtproto/transport/full.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
import struct
|
||||
from zlib import crc32
|
||||
|
||||
from .abcs import Transport
|
||||
|
||||
|
||||
class Full(Transport):
|
||||
__slots__ = ("_send_seq", "_recv_seq")
|
||||
|
||||
"""
|
||||
Implementation of the [full transport]:
|
||||
|
||||
```text
|
||||
+----+----+----...----+----+
|
||||
| len| seq| payload | crc|
|
||||
+----+----+----...----+----+
|
||||
^^^^ 4 bytes
|
||||
```
|
||||
|
||||
[full transport]: https://core.telegram.org/mtproto/mtproto-transports#full
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._send_seq = 0
|
||||
self._recv_seq = 0
|
||||
|
||||
def pack(self, input: bytes, output: bytearray) -> None:
|
||||
assert len(input) % 4 == 0
|
||||
|
||||
length = len(input) + 12
|
||||
output += struct.pack("<ii", length, self._send_seq)
|
||||
output += input
|
||||
output += struct.pack("<i", crc32(memoryview(output)[-(length - 4) :]))
|
||||
self._send_seq += 1
|
||||
|
||||
def unpack(self, input: bytes, output: bytearray) -> None:
|
||||
if len(input) < 4:
|
||||
raise ValueError(f"missing bytes, expected: 4, got: {len(input)}")
|
||||
|
||||
length = struct.unpack_from("<i", input)[0]
|
||||
if length < 12:
|
||||
raise ValueError(f"bad length, expected > 12, got: {length}")
|
||||
|
||||
if len(input) < length:
|
||||
raise ValueError(f"missing bytes, expected: {length}, got: {len(input)}")
|
||||
|
||||
seq = struct.unpack_from("<i", input, 4)[0]
|
||||
if seq != self._recv_seq:
|
||||
raise ValueError(f"bad seq, expected: {self._recv_seq}, got: {seq}")
|
||||
|
||||
crc = struct.unpack_from("<I", input, length - 4)[0]
|
||||
valid_crc = crc32(memoryview(input)[:-4])
|
||||
if crc != valid_crc:
|
||||
raise ValueError(f"bad crc, expected: {valid_crc}, got: {crc}")
|
||||
|
||||
self._recv_seq += 1
|
||||
output += memoryview(input)[8:-4]
|
43
client/src/telethon/_impl/mtproto/transport/intermediate.py
Normal file
43
client/src/telethon/_impl/mtproto/transport/intermediate.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
import struct
|
||||
|
||||
from .abcs import Transport
|
||||
|
||||
|
||||
class Intermediate(Transport):
|
||||
__slots__ = ("_init",)
|
||||
|
||||
"""
|
||||
Implementation of the [intermediate transport]:
|
||||
|
||||
```text
|
||||
+----+----...----+
|
||||
| len| payload |
|
||||
+----+----...----+
|
||||
^^^^ 4 bytes
|
||||
```
|
||||
|
||||
[intermediate transport]: https://core.telegram.org/mtproto/mtproto-transports#intermediate
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._init = False
|
||||
|
||||
def pack(self, input: bytes, output: bytearray) -> None:
|
||||
assert len(input) % 4 == 0
|
||||
|
||||
if not self._init:
|
||||
output += b"\xee\xee\xee\xee"
|
||||
self._init = True
|
||||
|
||||
output += struct.pack("<i", len(input))
|
||||
output += input
|
||||
|
||||
def unpack(self, input: bytes, output: bytearray) -> None:
|
||||
if len(input) < 4:
|
||||
raise ValueError(f"missing bytes, expected: {4}, got: {len(input)}")
|
||||
|
||||
length = struct.unpack_from("<i", input)[0]
|
||||
if len(input) < length:
|
||||
raise ValueError(f"missing bytes, expected: {length}, got: {len(input)}")
|
||||
|
||||
output += memoryview(input)[4 : 4 + length]
|
34
client/src/telethon/_impl/mtproto/utils.py
Normal file
34
client/src/telethon/_impl/mtproto/utils.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
import gzip
|
||||
import struct
|
||||
from typing import Optional
|
||||
|
||||
from ..tl.mtproto.types import GzipPacked, Message
|
||||
|
||||
DEFAULT_COMPRESSION_THRESHOLD: Optional[int] = 512
|
||||
CONTAINER_SIZE_OVERHEAD = 4 + 4 # constructor_id, inner vec length
|
||||
CONTAINER_MAX_SIZE = 1_044_456 - CONTAINER_SIZE_OVERHEAD
|
||||
CONTAINER_MAX_LENGTH = 100
|
||||
MESSAGE_SIZE_OVERHEAD = 8 + 4 + 4 # msg_id, seq_no, bytes
|
||||
|
||||
|
||||
def check_message_buffer(message: bytes) -> None:
|
||||
if len(message) == 4:
|
||||
neg_http_code = struct.unpack("<i", message)[0]
|
||||
raise ValueError(f"transport error: {neg_http_code}")
|
||||
elif len(message) < 20:
|
||||
raise ValueError(
|
||||
f"server payload is too small to be a valid message: {message.hex()}"
|
||||
)
|
||||
|
||||
|
||||
# https://core.telegram.org/mtproto/description#content-related-message
|
||||
def message_requires_ack(message: Message) -> bool:
|
||||
return message.seqno % 2 == 1
|
||||
|
||||
|
||||
def gzip_decompress(gzip_packed: GzipPacked) -> bytes:
|
||||
return gzip.decompress(gzip_packed.packed_data)
|
||||
|
||||
|
||||
def gzip_compress(unpacked_data: bytes) -> bytes:
|
||||
return gzip.compress(unpacked_data)
|
|
@ -29,35 +29,41 @@ def _bootstrap_get_ty(constructor_id: int) -> Optional[Type["Serializable"]]:
|
|||
|
||||
|
||||
class Reader:
|
||||
__slots__ = ("_buffer", "_pos", "_view")
|
||||
__slots__ = ("_view", "_pos", "_len")
|
||||
|
||||
def __init__(self, buffer: bytes) -> None:
|
||||
self._buffer = buffer
|
||||
self._view = (
|
||||
memoryview(buffer) if not isinstance(buffer, memoryview) else buffer
|
||||
)
|
||||
self._pos = 0
|
||||
self._view = memoryview(self._buffer)
|
||||
self._len = len(self._view)
|
||||
|
||||
def read_remaining(self) -> bytes:
|
||||
return self.read(self._len - self._pos)
|
||||
|
||||
def read(self, n: int) -> bytes:
|
||||
self._pos += n
|
||||
return self._view[self._pos - n : n]
|
||||
assert self._pos <= self._len
|
||||
return self._view[self._pos - n : self._pos]
|
||||
|
||||
def read_fmt(self, fmt: str, size: int) -> tuple[Any, ...]:
|
||||
assert struct.calcsize(fmt) == size
|
||||
self._pos += size
|
||||
assert self._pos <= self._len
|
||||
return struct.unpack(fmt, self._view[self._pos - size : self._pos])
|
||||
|
||||
def read_bytes(self) -> bytes:
|
||||
if self._buffer[self._pos] == 254:
|
||||
if self._view[self._pos] == 254:
|
||||
self._pos += 4
|
||||
(length,) = struct.unpack(
|
||||
"<i", self._buffer[self._pos - 3 : self._pos] + b"\0"
|
||||
)
|
||||
length = struct.unpack("<i", self._view[self._pos - 4 : self._pos])[0] >> 8
|
||||
padding = length % 4
|
||||
else:
|
||||
length = self._buffer[self._pos]
|
||||
length = self._view[self._pos]
|
||||
padding = (length + 1) % 4
|
||||
self._pos += 1
|
||||
|
||||
self._pos += length
|
||||
assert self._pos <= self._len
|
||||
data = self._view[self._pos - length : self._pos]
|
||||
if padding > 0:
|
||||
self._pos += 4 - padding
|
||||
|
@ -72,6 +78,7 @@ class Reader:
|
|||
# Unfortunately `typing.cast` would add a tiny amount of runtime overhead
|
||||
# which cannot be removed with optimization enabled.
|
||||
self._pos += 4
|
||||
assert self._pos <= self._len
|
||||
cid = struct.unpack("<I", self._view[self._pos - 4 : self._pos])[0]
|
||||
ty = self._get_ty(cid)
|
||||
if ty is None:
|
||||
|
|
|
@ -1,16 +1,13 @@
|
|||
import struct
|
||||
|
||||
|
||||
class Request:
|
||||
__slots__ = "_body"
|
||||
|
||||
def __init__(self, body: bytes):
|
||||
self._body = body
|
||||
class Request(bytes):
|
||||
__slots__ = ()
|
||||
|
||||
@property
|
||||
def constructor_id(self) -> int:
|
||||
try:
|
||||
cid = struct.unpack("<i", self._body[:4])[0]
|
||||
cid = struct.unpack("<i", self[:4])[0]
|
||||
assert isinstance(cid, int)
|
||||
return cid
|
||||
except struct.error:
|
||||
|
|
|
@ -35,7 +35,12 @@ class Serializable(abc.ABC):
|
|||
return bytes(buffer)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
attrs = ", ".join(repr(getattr(self, attr)) for attr in self.__slots__)
|
||||
fields = ((attr, getattr(self, attr)) for attr in self.__slots__)
|
||||
fields = (
|
||||
(name, bytes(field) if isinstance(field, memoryview) else field)
|
||||
for name, field in fields
|
||||
)
|
||||
attrs = ", ".join(f"{name}={field!r}" for name, field in fields)
|
||||
return f"{self.__class__.__name__}({attrs})"
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
|
|
|
@ -5,8 +5,8 @@ def get_auth_key() -> AuthKey:
|
|||
return AuthKey.from_bytes(bytes(range(256)))
|
||||
|
||||
|
||||
def get_new_nonce() -> bytes:
|
||||
return bytes(range(32))
|
||||
def get_new_nonce() -> int:
|
||||
return int.from_bytes(bytes(range(32)))
|
||||
|
||||
|
||||
def test_auth_key_aux_hash() -> None:
|
||||
|
@ -28,7 +28,7 @@ def test_calc_new_nonce_hash1() -> None:
|
|||
new_nonce = get_new_nonce()
|
||||
assert (
|
||||
auth_key.calc_new_nonce_hash(new_nonce, 1)
|
||||
== b"\xc2\xce\xd2\xb3>Y:U\xd2\x7fJ]\xab\xee|g"
|
||||
== 258944117842285651226187582903746985063
|
||||
)
|
||||
|
||||
|
||||
|
@ -37,7 +37,7 @@ def test_calc_new_nonce_hash2() -> None:
|
|||
new_nonce = get_new_nonce()
|
||||
assert (
|
||||
auth_key.calc_new_nonce_hash(new_nonce, 2)
|
||||
== b"\xf41\x8e\x85\xbd/\xf3\xbe\x84\xd9\xfe\xfc\xe3\xdc\xe3\x9f"
|
||||
== 324588944215647649895949797213421233055
|
||||
)
|
||||
|
||||
|
||||
|
@ -46,5 +46,5 @@ def test_calc_new_nonce_hash3() -> None:
|
|||
new_nonce = get_new_nonce()
|
||||
assert (
|
||||
auth_key.calc_new_nonce_hash(new_nonce, 3)
|
||||
== b"K\xf9\xd7\xb3}\xb4\x13\xeeC\x1d(Qv1\xcb="
|
||||
== 100989356540453064705070297823778556733
|
||||
)
|
||||
|
|
64
client/tests/authentication_test.py
Normal file
64
client/tests/authentication_test.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
from contextlib import contextmanager
|
||||
from typing import Iterator
|
||||
|
||||
from rsa import PublicKey
|
||||
from telethon._impl.crypto.auth_key import AuthKey
|
||||
from telethon._impl.mtproto.authentication import (
|
||||
CreatedKey,
|
||||
_do_step1,
|
||||
_do_step2,
|
||||
_do_step3,
|
||||
create_key,
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def old_rsa_keys() -> Iterator[None]:
|
||||
import telethon._impl.mtproto.authentication
|
||||
|
||||
# Temporarily replace the imported global.
|
||||
orig_rsa = telethon._impl.mtproto.authentication.RSA_KEYS # type: ignore [attr-defined]
|
||||
try:
|
||||
telethon._impl.mtproto.authentication.RSA_KEYS = { # type: ignore [attr-defined]
|
||||
-4344800451088585951: PublicKey(
|
||||
n=24403446649145068056824081744112065346446136066297307473868293895086332508101251964919587745984311372853053253457835208829824428441874946556659953519213382748319518214765985662663680818277989736779506318868003755216402538945900388706898101286548187286716959100102939636333452457308619454821845196109544157601096359148241435922125602449263164512290854366930013825808102403072317738266383237191313714482187326643144603633877219028262697593882410403273959074350849923041765639673335775605842311578109726403165298875058941765362622936097839775380070572921007586266115476975819175319995527916042178582540628652481530373407,
|
||||
e=65537,
|
||||
)
|
||||
}
|
||||
yield
|
||||
finally:
|
||||
telethon._impl.mtproto.authentication.RSA_KEYS = orig_rsa # type: ignore [attr-defined]
|
||||
|
||||
|
||||
def test_successful_auth_key_gen_flow() -> None:
|
||||
step1_random = b"\x86\xd4%\xe6F\r\xe2\xa0H&3\x11_\x8fw\xf1"
|
||||
step1_request = b"\xf1\x8e~\xbe\x86\xd4%\xe6F\r\xe2\xa0H&3\x11_\x8fw\xf1"
|
||||
step1_response = b"c$\x16\x05\x86\xd4%\xe6F\r\xe2\xa0H&3\x11_\x8fw\xf1\xe4\xb1\xfeR+vIQh\x91t#W\xc9j\x1a\x08 \xcd<\xb0X{\xddq\x00\x00\x00\x15\xc4\xb5\x1c\x02\x00\x00\x00\x02\x9fK\xa1m\x10\x92\x96!k\xe8l\x02+\xb4\xc3"
|
||||
step2_random = b"\xc3\x94\x9bxu\xf2\xf6\x13\xadn\x01\x12%\x85\x1f\x84u\xa2\xe2{\x06\xd4~\xec\x94v(\xba\x15\xaazX{\xfe.\xd2\x92\x0b\xa6\xcd\xe7\x92\x84\x87\xcc\xf57\xee;\x9f\xc9c\xde\xbdE\xdc+\x85q\xa3\xbe\x83_@\x04W)\x16\xc5\xe7\xac`\xc4y\xd3lj\xba\xda\xe7\xb2\xd6os:\xd9F,\r4K\x86\x96\xfc\x95\xaag\x80)\xbf\x1a\x11\xad\xc8:^\xfcx)\xa1\xa3r\xcd\xda>)-\xf2\x87\x06\xee\x10U\x17\xd2\xd7\x9c\r\x9fB\x9ef\xd0\x8e3\xc0\x10\x06~\xca\x9b\x87\x83\x998#\xb4\x19mF\xf6\xd2\x10\x1a\x07\xf6\x83\x03\x07R\xef\x83\xab\xde\x98\xe8\xbd\xa3\xb8x\x84\xf6{+f\xeb\x031\xa5\x16A\xec\x1a\x90Xe(<\x8c\xb6\xbe\xcaNm\xe0\x8b\xf3\xf4\x83\x85\xfc=\xfdv8%\x9f50=\xb5\xca\xd9\x7f\xb3\x93\x8f\x00\xd9N\xaa\x1f\x88@<G\x1e\x13V(1;5\x9bg@\x94[\x82\xab\x9d\xa6\x1f\xfaz^\xec\x94jd\x869G\x91\xa0\x9c\n\xb9T\x0e\x01V\xb4\x1c\xfa\xbft\xf1G\x7f.\x92n6\x06\x928~\xba\x7f\xe9\x1f\x18\xf4a\xb63\x87"
|
||||
step2_request = b'\xbe\xe4\x12\xd7\x86\xd4%\xe6F\r\xe2\xa0H&3\x11_\x8fw\xf1\xe4\xb1\xfeR+vIQh\x91t#W\xc9j\x1a\x04Z\x89\x9d\x9b\x00\x00\x00\x04\\\xbf\xa7\xe3\x00\x00\x00!k\xe8l\x02+\xb4\xc3\xfe\x00\x01\x00l\x0f\xb7\xdfb\xfd4\xfe!3\x07\xffx.b\x19\xc8\x98\x99\x7f\x81\xc05\xfa7cZ[XX\x91\x8a\xc9\x94\xad/wF\x18\x1f|9yBL\x1d\xdc\xe1|\x17N\x9d4\x01.I\'a\xe3B|\xce\x9b\x8e\xd0\xbd\xbcF*\xebS\xc6\x9d\x90/\x06\xa2\x1d\xac[\x99\xc1\xe3\xad\xaf\xc5\xa4\xc8\x1a?\x03\xc4fu\xe6\x1e\xd0\xbc$>6\x83\xccZ%\x0b\xa6\xff\xe2Yl\x9f\xbbkB\r>\xf0\x92\x00\x15\x1d\x83\xb4X\x98+\xe7\x80\xea\\F-6\x9b@\xfdi\xcf\x87\xb9\xf2\xfeW\xec\x03ck?\xf8\x05\x90\r\xb4\xd9<\x1f@R\xe7g\xd9\xba\xb0\x8b\x9b\x93\x0b\xa87UV\x08\xe5C>\xe33\xa8!\xc0\xd6\xe4\xa1\x11\xb8&x\xc1\x1fWt\xe0\x9f\xd2z@\t\xf3w\x9e\x04\x1ao\xff,\x85L\xd9\x8e\xe7\x1d\xf3=\x91h\xf3U~\xce\x17\x18!\xe5-\xb5qxU\x86\xb3\xd2k\x08\r\xa2\xf8E3E\xda\xc7\x0ch3F@\xfed"\x0e\x8ay\xe4\xf0"'
|
||||
step2_response = b'\\\x07\xe8\xd0\x86\xd4%\xe6F\r\xe2\xa0H&3\x11_\x8fw\xf1\xe4\xb1\xfeR+vIQh\x91t#W\xc9j\x1a\xfeP\x02\x00:MC$\x99\x1b\xa8]\xf7\'[\x91\xe6\x01\xec\x04$\xa7`&Y1\xe5\x13PG\x1f3r\x05\xa0\x02\xf8+\xc7;\xd9\x1d\x84\x1ewo(\x1f\x16\xb4\x93I\xabpZ\xebVB\xce\xaf\xa9\x1c?\x8f\x18\xadq\x06\xa8=[\xe2\x9d\x187\xd8s\x0c\xf1\xa3\xfd\xdeSfw\x88$X\xbf\x14\x17\x14&\x04\xc0\xe4\x85\x00\x94\x186w2\xc6\xd0\xc7c\xe2S9<l\xb9C7}\xc1\xd8\xb5\x8eEy",\x90\xbfWd35\xdeV\xe1\xa4;\x89\x93\xd4\\\xdc\x17e\xdfsa|\xd9\xbb[*\xff\xdd;\xa7\x0b%\xe2F\x0e\xdf\xef\xaf\x80\xa1@\x05\x0c\xc4#\xb9\x10I\x0f)\xa6\x01f\xd9\xd8\xe8\xaa\xf2]\xfa\x9b\x85\xb3)Y\x1a\n\xa9\xd3\xe1\xdf\xc4s\xe1\xd3~t\xe3\xfe\xf9W\xbb\x0b5\xd2\xc3\xa3T\r\xb9\x92\xa6I\xc6\t\x91\xc5]a\x89\xd8P!\xf2\xa2\x0f\x16\tJ\x9e\x0b,\x88\xd0\xa5>\xc4\x05\xc7Ws\xf4/\xd8\x10\xfemK\x01dB\xd5q\x93T\xd6\x04\xcaW\x00\x19\x07\x92\xca8h\x13J\xf2\xae\xf9\x1d\xba\xc8\xcc\x8f\xdc\x82\xe6"^\x83\xd0\xa4\xd2\x9a\x99\x92<\x15\x1e#J \x88\x82Y\x11\xd6\x14>j\xcc\x10e\x9f1\xf3{~\xa8\xfb\xdd\x82\xd1\x81`,0LAN\x87\x8c\xcc\tZ2\x07\x18\x81\x9f\\h\x8c$\xc9\x1e^\n\rJ\x94,\xb6L\x06L\x9d\xd6\xed4}W\xb5\xcd\xbcu\xd2MI\x98\x03\xac\xa8\xebZ\xed\xc3#\x1c\x8e\xc2\x16\x89\xe3\xfc=\xd1x\x9cR\xf6\x1e\xe7\xb3\xbfS\xc0\xef\xf4!\xdeH:7SSi\x17Z\xc3-2B&\xa9\xd4,4%~@\x12\'j}p\xfay\x8bU\xab\xa1;\xb7\xfc\xf9\x9fM\xf6:\xc0\xfe\xa4T\xe2\xf2\xf1E}\x0e-\xc66\x00\x91\xd4\xf7\xd6\x0b6L\xc8\xd7\x07\xefQ\x18\xce=\xf1\x88\xdf\xd64\r\xfb\xce\xb4\xb0\xf3w~\xda\x80\xc0\xc5\xca\xa3\x02\xb2A\xd21\xbc\xefH\xf217\x00\xc1Z\x85p\xa1\x87V/\x88\xe0\x10y\x07bO"\xe1\xe0b@\x80\xf8\x9f\xe1cc5\xc2\xa8\xd0\xb0&\x97\xdb\x14<0.\xfa$B\x9a\xca\x0c\xef3\x01@+\xeeQ\x9a\xe1\x1d\x8b6\x02%\xe8\x1c\xa5\xb9N9\xc1\xce\x93\xcc\xe4\xde\xc8%\x9c\x14\xce\xf6\xaa\x02\x1a\xa1\xc3\xe3\xb5v\xd2n%=c\x0e\xa8\xbd\xfa(\xefE\xe8i\xcf\xeb'
|
||||
step3_random = b'G\x92z\n\x1b\x15b\x91\x14,\xf8\xf7;\x03)\xa1\xccl\xde\x9f\xb6\xed\x89\x135\xaf\xfa\xc3>\x08:\x7f\xee\xda_o\xe9\xd5\xe1W\xe1\x9d\xc9\x13b<6\xcc\x1e\xd3\x87ac\x08\xe8\xf93\xf2\x07\x98\x98$|\xe9]\xf0+\xc4alt\x84)\x17\x18\xbc\xdd\x03\x94\x1bfR\xaf\xe2\x94_B\x1d\xd7C\xd5\x1fr\xe9\xae\xea\xcc\x0cu\xd9\x1fK2 \xcc\xc9\xda\xf5\xec=q\x8f^*\xf5_\x18"\xbd\x06\x1c\xee\xc1\xc0^H\xb0\t\xfe\x14\xba\x16\xb4\x84O\xa4!+\xca\xf1U]\x1fV[\xfa\x86-w\xfamL\xf4U\xf2]\xacS}\x05\xbbF\x83C-z\xec\xa8Mq?\xda"\xbe\xe3\x0b\xd6d\xbe\x8b9\x1e\x96\xf0\x9c,q\x92\xb6\x1d\xd1\xb2\xeb0\xc0\x06\x19S\xc2\x8cE\x0eob\x1ed\x93:\x13\xf9Ai\xe8\xa9N\n\xea\xda}\xb4\x0c\xb1\x01\xcfw\xa2`\xaeij\t\xa5n@\x9diq\x84m@\xe6\'\xec\x82\x18\x97\x89,\\\xef<\x14\xe3\x12+\x83\xb6 \x82\x16\x00Q\xff\xce\x93\xdb\xea]\xaf\x87\xd6\x1e'
|
||||
step3_request = b"\x1f_\x04\xf5\x86\xd4%\xe6F\r\xe2\xa0H&3\x11_\x8fw\xf1\xe4\xb1\xfeR+vIQh\x91t#W\xc9j\x1a\xfeP\x01\x00%t5m\x89\x96p\x89\xb4\x92\xd8W\x97\xeb\x07J\xc0\x9c\xa9\xbcF\xe1\tbV\x98\x0fU\xa2\x1b\xf2\xe7\xe4\xbb$V\x97X7$\xc3\x0bx\xa2-N(\x1d&\xf7\xce\xfd\xd2\x0f\x13\xb4%\xd4\xca\xf9h>\x01\x0f\xf4y\xc2\xee\x87\x86\x8c\xaf\xc1\xf4u\\\xedee\x9ag\x8f\x0f'h\r`\xe9\x1f\x9a\x0f\x80?%d\xa5\xa7A\x9d\xd4{\xf1!\x82\xc6\xd4\x8c\xf9\xfaT\x9f\x89\x0f\x8ac\xaf\xfcpK\x07q\x05/H\x18\xd3\xe5\xd29\xe8\xbb\xf8`\x8e\\\xee\x9b\xef\xb2\xbcM\xd9\x0f\x17\xc0\x7f`\x8c\xae\xdfZN\\\xfe\x94\xffMvRN0\xcf\n\x02\x8a\x17\xe3q\xfb\xf5\xf7v\xe2\xf2\xf5\xf4\x07`#\xc3\r5\xd3\xb3\x7f\xc3\xb9\x03z@\xb6t\xdeg\xbc\xce\xb6J\x02^\xa1\x95\xa1\xf8\xad\\\xe1Y\xe9n}\x99\xac\x9cu\xd6\xfb\xebMn\x16\xe6\xb8\xe9\xda3\x0c%+\x0bd\xa4N\x19p\x83L\xd3cq\x16\xceS3\xa4\x05\x9b<S\xf8dGh\xbd\x8dr\xf8<\x06J\x97\x8d]\x88\xb3k\xb6N\x86>w\x05\x13e\x9b\xb8\x8d)D\xe9I\xffd\x87u\xe6\xee\xe7\x8d\xa2\xe1ve\xcc\xb6$\x9bwFx\xd3u\xad\xd9\xf0\x04\xc8\xaf\x97A\x03\xff4\x01\xd2\x12\x82\xc2\xbf\x1b\xf7\xc2\x8f\x98\xc3\xef\xac\xf1yVc\x14Y\xef\xe4\xc1"
|
||||
step3_response = b"4\xf7\xcb;\x86\xd4%\xe6F\r\xe2\xa0H&3\x11_\x8fw\xf1\xe4\xb1\xfeR+vIQh\x91t#W\xc9j\x1a\x16G\x8884\xca{\xfd\xe2s\xa2N8\xc8\xc8\xb3"
|
||||
expected_auth_key = b"\x0b\x1aJ\xd1\xb0\xa7\x91\x8bv?\xb7'<#\xca@\x08\xdc\x10z\x8c\x88\x8a}\xe7V\xeb\x93\x85\x03\x88\xe5\xc0R\xa0\xed6\x81fv\x84\xcc\x97|:\xf8\xf5\xc1\xbe+\xa2yT\xa0\x93\n:\xe3F330S\x82\xb8\x05\xc0\x87\x8a\xa7)\xcb+\xe4\xb6\x8br\t]\x96\xdc-5\xfa`R\xab\x98\xa5\xe7\x03\x04\xd8\x8dj\x96\x06\xe6$\xc5\xe6\xde\x84\x94\xad\x8b\x0eW\xc8\xb7\xc6b\x90\xd0\x1a\x9f\xfdm\x11o\xb7X^o\xf2\x05X\xfd\x9a@\x1b/\x92R\xf1\x9e\xf5\xe8J\xa3\x84\x8d\x1a\x9d\x10\x14P\x13\xa6v\x8c\xf8Q\xdf\xda\xe9\x00\xc7\xf51\x8e&#\xa8\xa9\xab\xcdo;\xe5\n\xa7\x8b\x9f\xd9@\xa4\x9d.\xfa\xc4=\xdd\x84\x9c\xd0&\xf6\x18V\xd0\x12\x1e\x13\x15\xd7\xc1\x91\xd2\xb3\nc\xdb\xed\x16\x0b_GNj\x8cpO\xf4L\xb9\x10\xd8\x97\x02\xd5\xcb\xd1\xe8\x82\x16\xedC\x15\n\xef\x0b\xaa\x89$\xb7\x1c6}\xacad\xd7\x9f0\x18\xf3\xdd\x06\x8e4\xbd\xb3\x12?\xe0"
|
||||
|
||||
request, step1 = _do_step1(step1_random)
|
||||
assert request == step1_request
|
||||
response = step1_response
|
||||
|
||||
with old_rsa_keys():
|
||||
request, step2 = _do_step2(step1, response, step2_random)
|
||||
assert request == step2_request
|
||||
response = step2_response
|
||||
|
||||
step3_now = 1580236449
|
||||
request, step3 = _do_step3(step2, response, step3_random, step3_now)
|
||||
assert request == step3_request
|
||||
response = step3_response
|
||||
|
||||
finished = create_key(step3, response)
|
||||
assert finished == CreatedKey(
|
||||
auth_key=AuthKey.from_bytes(expected_auth_key),
|
||||
time_offset=0,
|
||||
first_salt=4809708467028043047,
|
||||
)
|
|
@ -62,8 +62,8 @@ def test_decrypt_server_data_v2() -> None:
|
|||
|
||||
|
||||
def test_key_from_nonce() -> None:
|
||||
server_nonce = bytes(range(16))
|
||||
new_nonce = bytes(range(32))
|
||||
server_nonce = int.from_bytes(bytes(range(16)))
|
||||
new_nonce = int.from_bytes(bytes(range(32)))
|
||||
|
||||
(key, iv) = generate_key_data_from_nonce(server_nonce, new_nonce)
|
||||
assert (
|
||||
|
|
193
client/tests/mtproto_test.py
Normal file
193
client/tests/mtproto_test.py
Normal file
|
@ -0,0 +1,193 @@
|
|||
import struct
|
||||
|
||||
from pytest import raises
|
||||
from telethon._impl.mtproto.mtp import Encrypted, Plain, RpcError
|
||||
from telethon._impl.tl.mtproto.types import RpcError as GeneratedRpcError
|
||||
|
||||
|
||||
def test_rpc_error_parsing() -> None:
|
||||
assert RpcError.from_mtproto_error(
|
||||
GeneratedRpcError(
|
||||
error_code=400,
|
||||
error_message="CHAT_INVALID",
|
||||
)
|
||||
) == RpcError(
|
||||
code=400,
|
||||
name="CHAT_INVALID",
|
||||
value=None,
|
||||
caused_by=None,
|
||||
)
|
||||
|
||||
assert RpcError.from_mtproto_error(
|
||||
GeneratedRpcError(
|
||||
error_code=420,
|
||||
error_message="FLOOD_WAIT_31",
|
||||
)
|
||||
) == RpcError(
|
||||
code=420,
|
||||
name="FLOOD_WAIT",
|
||||
value=31,
|
||||
caused_by=None,
|
||||
)
|
||||
|
||||
assert RpcError.from_mtproto_error(
|
||||
GeneratedRpcError(
|
||||
error_code=500,
|
||||
error_message="INTERDC_2_CALL_ERROR",
|
||||
)
|
||||
) == RpcError(
|
||||
code=500,
|
||||
name="INTERDC_CALL_ERROR",
|
||||
value=2,
|
||||
caused_by=None,
|
||||
)
|
||||
|
||||
|
||||
PLAIN_REQUEST = b"Hey!"
|
||||
|
||||
|
||||
def test_plain_finalize_clears_buffer() -> None:
|
||||
mtp = Plain()
|
||||
|
||||
mtp.push(PLAIN_REQUEST)
|
||||
assert len(mtp.finalize()) == 24
|
||||
|
||||
mtp.push(PLAIN_REQUEST)
|
||||
assert len(mtp.finalize()) == 24
|
||||
|
||||
|
||||
def test_plain_only_one_push_allowed() -> None:
|
||||
mtp = Plain()
|
||||
|
||||
assert mtp.push(PLAIN_REQUEST) is not None
|
||||
assert mtp.push(PLAIN_REQUEST) is None
|
||||
|
||||
|
||||
MESSAGE_PREFIX_LEN = 8 + 8 # salt + client_id
|
||||
GZIP_PACKED_HEADER = b"\xa1\xcf\x72\x30"
|
||||
MSG_CONTAINER_HEADER = b"\xdc\xf8\xf1\x73"
|
||||
REQUEST = b"Hey!"
|
||||
REQUEST_B = b"Bye!"
|
||||
|
||||
|
||||
def auth_key() -> bytes:
|
||||
return bytes(256)
|
||||
|
||||
|
||||
def ensure_buffer_is_message(buffer: bytes, body: bytes, seq_no: int) -> None:
|
||||
# msg_id, based on time
|
||||
assert buffer[0:8] != bytes(8)
|
||||
# seq_no, sequential odd number
|
||||
assert buffer[8:12] == struct.pack("<i", seq_no)
|
||||
# bytes, body length
|
||||
assert buffer[12:16] == struct.pack("<i", len(body))
|
||||
# body
|
||||
assert buffer[16:] == body
|
||||
|
||||
|
||||
def test_serialization_has_salt_client_id() -> None:
|
||||
mtp = Encrypted(auth_key())
|
||||
|
||||
mtp.push(REQUEST)
|
||||
buffer = mtp._finalize_plain()
|
||||
|
||||
# salt
|
||||
assert buffer[0:8] == bytes(8)
|
||||
# client_id
|
||||
assert buffer[8:16] != bytes(8)
|
||||
# message
|
||||
ensure_buffer_is_message(buffer[MESSAGE_PREFIX_LEN:], REQUEST, 1)
|
||||
|
||||
|
||||
def test_correct_single_serialization() -> None:
|
||||
mtp = Encrypted(auth_key())
|
||||
|
||||
assert mtp.push(REQUEST) is not None
|
||||
buffer = mtp._finalize_plain()
|
||||
|
||||
ensure_buffer_is_message(buffer[MESSAGE_PREFIX_LEN:], REQUEST, 1)
|
||||
|
||||
|
||||
def test_correct_multi_serialization() -> None:
|
||||
mtp = Encrypted(auth_key(), compression_threshold=None)
|
||||
|
||||
assert mtp.push(REQUEST) is not None
|
||||
assert mtp.push(REQUEST_B) is not None
|
||||
buffer = mtp._finalize_plain()
|
||||
buffer = buffer[MESSAGE_PREFIX_LEN:]
|
||||
|
||||
# container msg_id
|
||||
assert buffer[0:8] != bytes(8)
|
||||
# seq_no (after 1, 3 content-related comes 4)
|
||||
assert buffer[8:12] == b"\x04\0\0\0"
|
||||
# body length
|
||||
assert buffer[12:16] == b"\x30\0\0\0"
|
||||
|
||||
# container constructor_id
|
||||
assert buffer[16:20] == MSG_CONTAINER_HEADER
|
||||
# message count
|
||||
assert buffer[20:24] == b"\x02\0\0\0"
|
||||
|
||||
ensure_buffer_is_message(buffer[24:44], REQUEST, 1)
|
||||
ensure_buffer_is_message(buffer[44:], REQUEST_B, 3)
|
||||
|
||||
|
||||
def test_correct_single_large_serialization() -> None:
|
||||
mtp = Encrypted(auth_key(), compression_threshold=None)
|
||||
data = bytes(0x7F for _ in range(768 * 1024))
|
||||
|
||||
assert mtp.push(data) is not None
|
||||
buffer = mtp._finalize_plain()
|
||||
|
||||
buffer = buffer[MESSAGE_PREFIX_LEN:]
|
||||
assert len(buffer) == 16 + len(data)
|
||||
|
||||
|
||||
def test_correct_multi_large_serialization() -> None:
|
||||
mtp = Encrypted(auth_key(), compression_threshold=None)
|
||||
data = bytes(0x7F for _ in range(768 * 1024))
|
||||
|
||||
assert mtp.push(data) is not None
|
||||
assert mtp.push(data) is None
|
||||
|
||||
buffer = mtp._finalize_plain()
|
||||
buffer = buffer[MESSAGE_PREFIX_LEN:]
|
||||
assert len(buffer) == 16 + len(data)
|
||||
|
||||
|
||||
def test_large_payload_panics() -> None:
|
||||
mtp = Encrypted(auth_key())
|
||||
|
||||
with raises(AssertionError):
|
||||
mtp.push(bytes(2 * 1024 * 1024))
|
||||
|
||||
|
||||
def test_non_padded_payload_panics() -> None:
|
||||
mtp = Encrypted(auth_key())
|
||||
|
||||
with raises(AssertionError):
|
||||
mtp.push(b"\x01\x02\x03")
|
||||
|
||||
|
||||
def test_no_compression_is_honored() -> None:
|
||||
mtp = Encrypted(auth_key(), compression_threshold=None)
|
||||
mtp.push(bytes(512 * 1024))
|
||||
buffer = mtp._finalize_plain()
|
||||
assert GZIP_PACKED_HEADER not in buffer
|
||||
|
||||
|
||||
def test_some_compression() -> None:
|
||||
mtp = Encrypted(auth_key(), compression_threshold=768 * 1024)
|
||||
mtp.push(bytes(512 * 1024))
|
||||
buffer = mtp._finalize_plain()
|
||||
assert GZIP_PACKED_HEADER not in buffer
|
||||
|
||||
mtp = Encrypted(auth_key(), compression_threshold=256 * 1024)
|
||||
mtp.push(bytes(512 * 1024))
|
||||
buffer = mtp._finalize_plain()
|
||||
assert GZIP_PACKED_HEADER in buffer
|
||||
|
||||
mtp = Encrypted(auth_key())
|
||||
mtp.push(bytes(512 * 1024))
|
||||
buffer = mtp._finalize_plain()
|
||||
assert GZIP_PACKED_HEADER in buffer
|
11
client/tests/mtproto_types_test.py
Normal file
11
client/tests/mtproto_types_test.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
from telethon._impl.mtproto.utils import gzip_decompress
|
||||
from telethon._impl.tl.core import Reader
|
||||
from telethon._impl.tl.mtproto.types import GzipPacked, RpcResult
|
||||
|
||||
|
||||
def test_gzip_decompress() -> None:
|
||||
rpc_result = Reader(
|
||||
b'\x01m\\\xf3\x84)\x96E6K1^\xa1\xcfr0\xfe\x8c\x01\x00\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03\x95\x93=K\xc3P\x14\x86O>\x14T\xaa\x01\x11\x1cD\x1c\x84n\xb7\xf7&\xb9\xf9\x9a:\n\x82tt\xaa6m\x12\x0b\xad\xa9mZp\xd2\xd1Qpp\x16\x7f\x838\xe8\xe8\xe0\x8fpp\xee\x9f\xa87\x1f\xeaM[R\x1a\xb89$O\xdes\xde\xfb&\t\xaa\x1b\xda\xd1>\x80qL\xea\xb5SR7\x1f\xaf\xdfe\x00\xd8\xf9x\xd9\xdbf\xb5\xf4\xf4\xba\xcb\n\x08lm\x12\xddF\x84\xea\x88\x98\x14Q\r\xde\x84\x94+\x0b\xb8\x90\xf1\xb2\x8a1q\xb0\xabZ\x8e\xafj-\xc7O.\xd9\x91?5~u\xf1\\1\xd7\xd70\x11\xc5\xb9\xb9\xb3\x9c\xfcq1\xe3[<\'\x94\xf0\xbe\xc4\x7f_\x86\xd9t\xb0\xeeY\xb1/\xb5\xc8\x97\xb4\x9c\xce\xe5\xf7#\xe5\xfc\xb0\x9c\x08\xc6\xfc~\x8a\xb8\x90\xf199j\x8br\x94\xa7s\xb2I.\xc7".d|\xce~\xf5\xa2\xb9b\xa6\xe3\xf27\x10Q1\x9f\xe3\x12}]\xde\xcf\xcaL\x0e~\xac\xa3E~\x94LW\xb2\t\x8b\xd5B\x94\xb9\xb10\x00\x9f\xd34\xa7v\xc6\x1b\xbd\x91\x86\x06\x91\xd7A\xcd\xb0\x0b\xf0\xc9\x9e\xab\x96$h\xb1Z\xd3%\xb8c?\x0b\x1e\x02|?\xc8I\xfdb\x8d\xce\xc7\xe9w\x12?\x0b\xcf"L&\x93\x9bx\xc1\xf80\xb9\x17\xcf\xbau\xd6\x92\x1a\xf798\x01\xb8?\x13\x12\xbdRf3/\xa2\xa87p*\x95\x08u\xbd\n{\xf7A\xdb_\xf7\xc3a\x7fp5l\xf4=\x90\xdd\xf6e\xc0t\xabA\x18\x06\x1d/\rSI\xcb\x0f:\xba\r\x8d\xd8\x03\x00\x00'
|
||||
).read_serializable(RpcResult)
|
||||
gzip_packed = Reader(rpc_result.result).read_serializable(GzipPacked)
|
||||
assert len(gzip_decompress(gzip_packed)) == 984
|
|
@ -47,3 +47,13 @@ def test_generated_object(obj: Serializable) -> None:
|
|||
assert bytes(obj)[:4] == struct.pack("<I", obj.constructor_id())
|
||||
assert type(obj)._read_from(Reader(bytes(obj)[4:])) == obj
|
||||
assert Reader(bytes(obj)).read_serializable(type(obj)) == obj
|
||||
|
||||
|
||||
def test_repeated_read() -> None:
|
||||
reader = Reader(bytes(range(8)))
|
||||
assert reader.read(4) == bytes(range(4))
|
||||
assert reader.read(4) == bytes(range(4, 8))
|
||||
|
||||
reader = Reader(bytes(range(8)))
|
||||
assert reader.read_fmt("4b", 4) == tuple(range(4))
|
||||
assert reader.read_fmt("4b", 4) == tuple(range(4, 8))
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
from rsa import PublicKey
|
||||
from telethon._impl.crypto.rsa import (
|
||||
PRODUCTION_RSA_KEY,
|
||||
TESTMODE_RSA_KEY,
|
||||
compute_fingerprint,
|
||||
encrypt_hashed,
|
||||
)
|
||||
|
||||
|
||||
|
@ -13,3 +15,15 @@ def test_fingerprint_1() -> None:
|
|||
def test_fingerprint_2() -> None:
|
||||
fp = compute_fingerprint(TESTMODE_RSA_KEY)
|
||||
assert fp == -5595554452916591101
|
||||
|
||||
|
||||
def test_rsa_encryption() -> None:
|
||||
key = PublicKey(
|
||||
n=22081946531037833540524260580660774032207476521197121128740358761486364763467087828766873972338019078976854986531076484772771735399701424566177039926855356719497736439289455286277202113900509554266057302466528985253648318314129246825219640197356165626774276930672688973278712614800066037531599375044750753580126415613086372604312320014358994394131667022861767539879232149461579922316489532682165746762569651763794500923643656753278887871955676253526661694459370047843286685859688756429293184148202379356802488805862746046071921830921840273062124571073336369210703400985851431491295910187179045081526826572515473914151,
|
||||
e=65537,
|
||||
)
|
||||
result = encrypt_hashed(b"Hello!", key, bytes(256))
|
||||
assert (
|
||||
result
|
||||
== b"up-L\x88\xd2\x9bj\xb945Q$\xdd(\xd9\xb6*GU\x88A\xc8\x03\x14P\xf7I\x9b\x1c\x9ck\xd3\x9d'\xc1X\x1cQ4NQ\xc1y#pd\xa7#\xae\x93\x9dZ\xc3P\x14\xfd\x8bO\xe2Ou\xe3\x11\\2\xa1ci\xee+7:a\xec\x94F\xb9+.=\xf0v\x18\xdb\n\x8a\xfd\xa9\x99\xb6p+2\xb5\x81\x9b\xd6\xeaIp\xfb4|\xa8J`\xd0\xc3\x8a\xb7\x0cf\xe5\xed\x01@D\x88\x89\xa3\xb8\x82\xee\xa53\xba\xd0^\xfa E\xed\xa7\x17\x12<AJ\xbf\xde\xd4>\x1e\xb4\x83\xa0Ixn\xf5\x03\x1b\x12\xd5\x1a?\xf7\xec\xb7\xd8\x04\xd4A5\x94_\x98\xf7ZJl\xf1\xa1\xdf7U\x9e0\xbb\xe9*Kyf\xc3O\x078\xe6\xd10Y\x85wm&\xdf\xab|\x0f\xdf\xd7\xec ,\xc7\x8cT\xcf\x82\xac#\x86\xc7\x9d\x0e\x19u\x80\xa4\xfa\x940\n#\x82\xf9\xe1\x16\xfe\x82\xdf\x9b\xd8r\xe5\xb9\xda{Bb#\xbf\x1a\xd8X\x890\xb5\x1e\x16]l\xdd\x02"
|
||||
)
|
||||
|
|
60
client/tests/transport/abridged_test.py
Normal file
60
client/tests/transport/abridged_test.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
from typing import Tuple
|
||||
|
||||
from pytest import raises
|
||||
from telethon._impl.mtproto.transport.abridged import Abridged
|
||||
|
||||
|
||||
def setup_pack(n: int) -> Tuple[Abridged, bytes, bytearray]:
|
||||
input = bytes(x & 0xFF for x in range(n))
|
||||
return Abridged(), input, bytearray()
|
||||
|
||||
|
||||
def test_pack_empty() -> None:
|
||||
transport, input, output = setup_pack(0)
|
||||
transport.pack(input, output)
|
||||
assert output == b"\xef\0"
|
||||
|
||||
|
||||
def test_pack_non_padded() -> None:
|
||||
transport, input, output = setup_pack(7)
|
||||
with raises(AssertionError):
|
||||
transport.pack(input, output)
|
||||
|
||||
|
||||
def test_pack_normal() -> None:
|
||||
transport, input, output = setup_pack(128)
|
||||
transport.pack(input, output)
|
||||
assert output[:2] == b"\xef\x20"
|
||||
assert output[2:] == input
|
||||
|
||||
|
||||
def pack_large() -> None:
|
||||
transport, input, output = setup_pack(1024)
|
||||
transport.pack(input, output)
|
||||
assert output[:5] == b"\xef\x7f\0\x01\0"
|
||||
assert output[5:] == input
|
||||
|
||||
|
||||
def test_unpack_small() -> None:
|
||||
transport = Abridged()
|
||||
input = b"\x01"
|
||||
output = bytearray()
|
||||
with raises(ValueError) as e:
|
||||
transport.unpack(input, output)
|
||||
e.match("missing bytes")
|
||||
|
||||
|
||||
def test_unpack_normal() -> None:
|
||||
transport, input, packed = setup_pack(128)
|
||||
unpacked = bytearray()
|
||||
transport.pack(input, packed)
|
||||
transport.unpack(packed[1:], unpacked)
|
||||
assert input == unpacked
|
||||
|
||||
|
||||
def unpack_large() -> None:
|
||||
transport, input, packed = setup_pack(1024)
|
||||
unpacked = bytearray()
|
||||
transport.pack(input, packed)
|
||||
transport.unpack(packed[1:], unpacked)
|
||||
assert input == unpacked
|
103
client/tests/transport/full_test.py
Normal file
103
client/tests/transport/full_test.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
from typing import Tuple
|
||||
|
||||
from pytest import raises
|
||||
from telethon._impl.mtproto.transport.full import Full
|
||||
|
||||
|
||||
def setup_pack(n: int) -> Tuple[Full, bytes, bytearray]:
|
||||
input = bytes(x & 0xFF for x in range(n))
|
||||
return Full(), input, bytearray()
|
||||
|
||||
|
||||
def setup_unpack(n: int) -> Tuple[bytes, Full, bytes, bytearray]:
|
||||
transport, expected_output, input = setup_pack(n)
|
||||
transport.pack(expected_output, input)
|
||||
|
||||
return expected_output, Full(), input, bytearray()
|
||||
|
||||
|
||||
def test_pack_empty() -> None:
|
||||
transport, input, output = setup_pack(0)
|
||||
transport.pack(input, output)
|
||||
|
||||
assert output == b"\x0c\x00\x00\x00\x00\x00\x00\x00&\xca\x8d2"
|
||||
|
||||
|
||||
def test_pack_non_padded() -> None:
|
||||
transport, input, output = setup_pack(7)
|
||||
with raises(AssertionError):
|
||||
transport.pack(input, output)
|
||||
|
||||
|
||||
def test_pack_normal() -> None:
|
||||
transport, input, output = setup_pack(128)
|
||||
transport.pack(input, output)
|
||||
|
||||
assert output[:4] == b"\x8c\0\0\0"
|
||||
assert output[4:8] == b"\0\0\0\0"
|
||||
assert output[8 : 8 + len(input)] == input
|
||||
assert output[8 + len(input) :] == b"\x86s\x957"
|
||||
|
||||
|
||||
def test_pack_twice() -> None:
|
||||
transport, input, output = setup_pack(128)
|
||||
transport.pack(input, output)
|
||||
output.clear()
|
||||
transport.pack(input, output)
|
||||
|
||||
assert output[:4] == b"\x8c\0\0\0"
|
||||
assert output[4:8] == b"\x01\0\0\0"
|
||||
assert output[8 : 8 + len(input)] == input
|
||||
assert output[8 + len(input) :] == b"\x96\t\xf0J"
|
||||
|
||||
|
||||
def test_unpack_small() -> None:
|
||||
transport = Full()
|
||||
input = b"\0\x01\x02"
|
||||
output = bytearray()
|
||||
with raises(ValueError) as e:
|
||||
transport.unpack(input, output)
|
||||
e.match("missing bytes")
|
||||
|
||||
|
||||
def test_unpack_normal() -> None:
|
||||
expected_output, transport, input, output = setup_unpack(128)
|
||||
transport.unpack(input, output)
|
||||
assert output == expected_output
|
||||
|
||||
|
||||
def test_unpack_twice() -> None:
|
||||
transport, input, packed = setup_pack(128)
|
||||
unpacked = bytearray()
|
||||
transport.pack(input, packed)
|
||||
transport.unpack(packed, unpacked)
|
||||
assert input == unpacked
|
||||
|
||||
packed.clear()
|
||||
unpacked.clear()
|
||||
transport.pack(input, packed)
|
||||
transport.unpack(packed, unpacked)
|
||||
assert input == unpacked
|
||||
|
||||
|
||||
def test_unpack_bad_crc() -> None:
|
||||
_, transport, input, output = setup_unpack(128)
|
||||
input = input[:-1] + bytes((input[-1] ^ 0xFF,))
|
||||
with raises(ValueError) as e:
|
||||
transport.unpack(input, output)
|
||||
e.match("bad crc")
|
||||
e.match("expected: 932541318")
|
||||
e.match("got: 3365237638")
|
||||
|
||||
|
||||
def test_unpack_bad_seq() -> None:
|
||||
transport, input, packed = setup_pack(128)
|
||||
unpacked = bytearray()
|
||||
transport.pack(input, packed)
|
||||
packed.clear()
|
||||
transport.pack(input, packed)
|
||||
with raises(ValueError) as e:
|
||||
transport.unpack(packed, unpacked)
|
||||
e.match("bad seq")
|
||||
e.match("expected: 0")
|
||||
e.match("got: 1")
|
45
client/tests/transport/intermediate_test.py
Normal file
45
client/tests/transport/intermediate_test.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
from typing import Tuple
|
||||
|
||||
from pytest import raises
|
||||
from telethon._impl.mtproto.transport.intermediate import Intermediate
|
||||
|
||||
|
||||
def setup_pack(n: int) -> Tuple[Intermediate, bytes, bytearray]:
|
||||
input = bytes(x & 0xFF for x in range(n))
|
||||
return Intermediate(), input, bytearray()
|
||||
|
||||
|
||||
def test_pack_empty() -> None:
|
||||
transport, input, output = setup_pack(0)
|
||||
transport.pack(input, output)
|
||||
assert output == b"\xee\xee\xee\xee\0\0\0\0"
|
||||
|
||||
|
||||
def test_pack_non_padded() -> None:
|
||||
transport, input, output = setup_pack(7)
|
||||
with raises(AssertionError):
|
||||
transport.pack(input, output)
|
||||
|
||||
|
||||
def test_pack_normal() -> None:
|
||||
transport, input, output = setup_pack(128)
|
||||
transport.pack(input, output)
|
||||
assert output[:8] == b"\xee\xee\xee\xee\x80\0\0\0"
|
||||
assert output[8:] == input
|
||||
|
||||
|
||||
def test_unpack_small() -> None:
|
||||
transport = Intermediate()
|
||||
input = b"\x01"
|
||||
output = bytearray()
|
||||
with raises(ValueError) as e:
|
||||
transport.unpack(input, output)
|
||||
e.match("missing bytes")
|
||||
|
||||
|
||||
def test_unpack_normal() -> None:
|
||||
transport, input, packed = setup_pack(128)
|
||||
unpacked = bytearray()
|
||||
transport.pack(input, packed)
|
||||
transport.unpack(packed[4:], unpacked)
|
||||
assert input == unpacked
|
|
@ -37,6 +37,7 @@ def generate(fs: FakeFs, tl: ParsedTl) -> None:
|
|||
generated_types = {
|
||||
"True",
|
||||
"Bool",
|
||||
"Object",
|
||||
} # initial set is considered to be "compiler built-ins"
|
||||
|
||||
ignored_types = {"true", "boolTrue", "boolFalse"} # also "compiler built-ins"
|
||||
|
@ -91,7 +92,7 @@ def generate(fs: FakeFs, tl: ParsedTl) -> None:
|
|||
writer.write(f"import struct")
|
||||
writer.write(f"from typing import List, Optional, Self")
|
||||
writer.write(f"from .. import abcs")
|
||||
writer.write(f"from ..core import Reader, serialize_bytes_to")
|
||||
writer.write(f"from ..core import Reader, Serializable, serialize_bytes_to")
|
||||
|
||||
ns = f"{typedef.namespace[0]}." if typedef.namespace else ""
|
||||
generated_type_names.add(f"{ns}{to_class_name(typedef.name)}")
|
||||
|
@ -160,8 +161,11 @@ def generate(fs: FakeFs, tl: ParsedTl) -> None:
|
|||
writer.write(f"from ..core import Request, serialize_bytes_to")
|
||||
|
||||
# def name(params, ...)
|
||||
params = ", ".join(f"{p.name}: {param_type_fmt(p.ty)}" for p in required_params)
|
||||
writer.write(f"def {to_method_name(functiondef.name)}({params}) -> Request:")
|
||||
params = "".join(f", {p.name}: {param_type_fmt(p.ty)}" for p in required_params)
|
||||
star = "*" if params else ""
|
||||
writer.write(
|
||||
f"def {to_method_name(functiondef.name)}({star}{params}) -> Request:"
|
||||
)
|
||||
writer.indent(2)
|
||||
generate_function(writer, functiondef)
|
||||
writer.dedent(2)
|
||||
|
|
|
@ -1,18 +1,28 @@
|
|||
import re
|
||||
from typing import Iterator
|
||||
from typing import Iterator, List
|
||||
|
||||
from ....tl_parser import BaseParameter, FlagsParameter, NormalParameter, Type
|
||||
|
||||
|
||||
def split_words(name: str) -> List[str]:
|
||||
return re.findall(
|
||||
r"""
|
||||
^$
|
||||
|[a-z\d]+
|
||||
|[A-Z][A-Z\d]+(?=[A-Z]|_|$)
|
||||
|[A-Z][a-z\d]+
|
||||
""",
|
||||
name,
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
|
||||
def to_class_name(name: str) -> str:
|
||||
return re.sub(r"(?:^|_)([a-z])", lambda m: m[1].upper(), name)
|
||||
return "".join(word.title() for word in split_words(name))
|
||||
|
||||
|
||||
def to_method_name(name: str) -> str:
|
||||
snake_case = re.sub(
|
||||
r"_+[A-Za-z]+|[A-Z]*[a-z]+", lambda m: "_" + m[0].replace("_", "").lower(), name
|
||||
)
|
||||
return snake_case.strip("_")
|
||||
return "_".join(word.lower() for word in split_words(name))
|
||||
|
||||
|
||||
def gen_tmp_names() -> Iterator[str]:
|
||||
|
@ -69,6 +79,8 @@ def inner_type_fmt(ty: Type) -> str:
|
|||
return to_class_name(ty.name)
|
||||
elif ty.generic_ref:
|
||||
return "bytes"
|
||||
elif ty.name == "Object":
|
||||
return "Serializable"
|
||||
else:
|
||||
ns = (".".join(ty.namespace) + ".") if ty.namespace else ""
|
||||
return f"abcs.{ns}{to_class_name(ty.name)}"
|
||||
|
@ -91,7 +103,7 @@ def param_type_fmt(ty: BaseParameter) -> str:
|
|||
else:
|
||||
inner_ty = ty.ty
|
||||
|
||||
res = inner_type_fmt(inner_ty)
|
||||
res = "bytes" if inner_ty.name == "Object" else inner_type_fmt(inner_ty)
|
||||
|
||||
if ty.ty.generic_arg:
|
||||
res = f"List[{res}]"
|
||||
|
|
|
@ -6,8 +6,15 @@ from ....tl_parser import Definition, NormalParameter, Parameter, Type
|
|||
from ..fakefs import SourceWriter
|
||||
from .common import inner_type_fmt, is_trivial, to_class_name, trivial_struct_fmt
|
||||
|
||||
# Some implementations choose to create these types by hand.
|
||||
# For consistency, we instead special-case the generator.
|
||||
SPECIAL_CASED_OBJECT_READS = {
|
||||
0xF35C6D01: "reader.read_remaining()", # rpc_result
|
||||
0x5BB8E511: "reader.read(_bytes)", # message
|
||||
}
|
||||
|
||||
def reader_read_fmt(ty: Type) -> Tuple[str, Optional[str]]:
|
||||
|
||||
def reader_read_fmt(ty: Type, constructor_id: int) -> Tuple[str, Optional[str]]:
|
||||
if is_trivial(NormalParameter(ty=ty, flag=None)):
|
||||
fmt = trivial_struct_fmt(NormalParameter(ty=ty, flag=None))
|
||||
size = struct.calcsize(f"<{fmt}")
|
||||
|
@ -17,17 +24,22 @@ def reader_read_fmt(ty: Type) -> Tuple[str, Optional[str]]:
|
|||
elif ty.name == "bytes":
|
||||
return f"reader.read_bytes()", None
|
||||
elif ty.name == "int128":
|
||||
return f"int.from_bytes(reader.read(16), 'little', signed=True)", None
|
||||
return f"int.from_bytes(reader.read(16))", None
|
||||
elif ty.name == "int256":
|
||||
return f"int.from_bytes(reader.read(32), 'little', signed=True)", None
|
||||
return f"int.from_bytes(reader.read(32))", None
|
||||
elif ty.bare:
|
||||
return f"{to_class_name(ty.name)}._read_from(reader)", None
|
||||
elif ty.name == "Object":
|
||||
try:
|
||||
return SPECIAL_CASED_OBJECT_READS[constructor_id], None
|
||||
except KeyError:
|
||||
raise NotImplementedError("missing special case for object read")
|
||||
else:
|
||||
return f"reader.read_serializable({inner_type_fmt(ty)})", "type-abstract"
|
||||
|
||||
|
||||
def generate_normal_param_read(
|
||||
writer: SourceWriter, name: str, param: NormalParameter
|
||||
writer: SourceWriter, name: str, param: NormalParameter, constructor_id: int
|
||||
) -> None:
|
||||
flag_check = f"_{param.flag.name} & {1 << param.flag.index}" if param.flag else None
|
||||
if param.ty.name == "true":
|
||||
|
@ -59,7 +71,7 @@ def generate_normal_param_read(
|
|||
fmt = trivial_struct_fmt(generic)
|
||||
size = struct.calcsize(f"<{fmt}")
|
||||
writer.write(
|
||||
f"_{name} = reader.read_fmt(f'<{{__len}}{fmt}', __len * {size})[0]"
|
||||
f"_{name} = [*reader.read_fmt(f'<{{__len}}{fmt}', __len * {size})]"
|
||||
)
|
||||
if param.ty.generic_arg.name == "Bool":
|
||||
writer.write(
|
||||
|
@ -67,11 +79,13 @@ def generate_normal_param_read(
|
|||
)
|
||||
writer.write(f"_{name} = [_{name} == 0x997275b5]")
|
||||
else:
|
||||
fmt_read, type_ignore = reader_read_fmt(param.ty.generic_arg)
|
||||
fmt_read, type_ignore = reader_read_fmt(
|
||||
param.ty.generic_arg, constructor_id
|
||||
)
|
||||
comment = f" # type: ignore [{type_ignore}]" if type_ignore else ""
|
||||
writer.write(f"_{name} = [{fmt_read} for _ in range(__len)]{comment}")
|
||||
else:
|
||||
fmt_read, type_ignore = reader_read_fmt(param.ty)
|
||||
fmt_read, type_ignore = reader_read_fmt(param.ty, constructor_id)
|
||||
comment = f" # type: ignore [{type_ignore}]" if type_ignore else ""
|
||||
writer.write(f"_{name} = {fmt_read}{comment}")
|
||||
|
||||
|
@ -97,4 +111,4 @@ def generate_read(writer: SourceWriter, defn: Definition) -> None:
|
|||
for param in iter:
|
||||
if not isinstance(param.ty, NormalParameter):
|
||||
raise RuntimeError("FlagsParameter should be considered trivial")
|
||||
generate_normal_param_read(writer, param.name, param.ty)
|
||||
generate_normal_param_read(writer, param.name, param.ty, defn.id)
|
||||
|
|
|
@ -26,16 +26,16 @@ def generate_buffer_append(
|
|||
)
|
||||
else:
|
||||
writer.write(f"{buffer} += struct.pack(f'<{fmt}', {name})")
|
||||
elif ty.generic_ref:
|
||||
elif ty.generic_ref or ty.name == "Object":
|
||||
writer.write(f"{buffer} += {name}") # assume previously-serialized
|
||||
elif ty.name == "string":
|
||||
writer.write(f"serialize_bytes_to({buffer}, {name}.encode('utf-8'))")
|
||||
elif ty.name == "bytes":
|
||||
writer.write(f"serialize_bytes_to({buffer}, {name})")
|
||||
elif ty.name == "int128":
|
||||
writer.write(f"{buffer} += {name}.to_bytes(16, 'little', signed=True)")
|
||||
writer.write(f"{buffer} += {name}.to_bytes(16)")
|
||||
elif ty.name == "int256":
|
||||
writer.write(f"{buffer} += {name}.to_bytes(32, 'little', signed=True)")
|
||||
writer.write(f"{buffer} += {name}.to_bytes(32)")
|
||||
elif ty.bare:
|
||||
writer.write(f"{name}._write_to({buffer})")
|
||||
else:
|
||||
|
|
|
@ -37,7 +37,7 @@ def iterate(contents: str) -> Iterator[TypeDef | FunctionDef | Exception]:
|
|||
definition = definition[len(FUNCTIONS_SEP) :].strip()
|
||||
elif definition.startswith(TYPES_SEP):
|
||||
cls = TypeDef
|
||||
definition = definition[len(FUNCTIONS_SEP) :].strip()
|
||||
definition = definition[len(TYPES_SEP) :].strip()
|
||||
else:
|
||||
raise ValueError("bad separator")
|
||||
|
||||
|
|
53
generator/tests/common_test.py
Normal file
53
generator/tests/common_test.py
Normal file
|
@ -0,0 +1,53 @@
|
|||
from typing import List
|
||||
|
||||
from pytest import mark
|
||||
from telethon_generator._impl.codegen.serde.common import (
|
||||
split_words,
|
||||
to_class_name,
|
||||
to_method_name,
|
||||
)
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
("name", "expected"),
|
||||
[
|
||||
("resPQ", ["res", "PQ"]),
|
||||
("p_q_inner_data", ["p", "q", "inner", "data"]),
|
||||
("client_DH_inner_data", ["client", "DH", "inner", "data"]),
|
||||
("ipPort", ["ip", "Port"]),
|
||||
("JSONObjectValue", ["JSON", "Object", "Value"]),
|
||||
("fileMp4", ["file", "Mp4"]),
|
||||
],
|
||||
)
|
||||
def test_split_name_words(name: str, expected: List[str]) -> None:
|
||||
assert split_words(name) == expected
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
("name", "expected"),
|
||||
[
|
||||
("resPQ", "ResPq"),
|
||||
("p_q_inner_data", "PQInnerData"),
|
||||
("client_DH_inner_data", "ClientDhInnerData"),
|
||||
("ipPort", "IpPort"),
|
||||
("JSONObjectValue", "JsonObjectValue"),
|
||||
("fileMp4", "FileMp4"),
|
||||
],
|
||||
)
|
||||
def test_to_class_name(name: str, expected: str) -> None:
|
||||
assert to_class_name(name) == expected
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
("name", "expected"),
|
||||
[
|
||||
("resPQ", "res_pq"),
|
||||
("p_q_inner_data", "p_q_inner_data"),
|
||||
("client_DH_inner_data", "client_dh_inner_data"),
|
||||
("ipPort", "ip_port"),
|
||||
("JSONObjectValue", "json_object_value"),
|
||||
("fileMp4", "file_mp4"),
|
||||
],
|
||||
)
|
||||
def test_to_method_name(name: str, expected: str) -> None:
|
||||
assert to_method_name(name) == expected
|
|
@ -79,4 +79,24 @@ def test_recursive_vec() -> None:
|
|||
"""
|
||||
)
|
||||
result = gen_py_code(typedefs=definitions)
|
||||
assert "value: List[abcs.JSONObjectValue]" in result
|
||||
assert "value: List[abcs.JsonObjectValue]" in result
|
||||
|
||||
|
||||
def test_object_blob_special_case() -> None:
|
||||
definitions = get_definitions(
|
||||
"""
|
||||
rpc_result#f35c6d01 req_msg_id:long result:Object = RpcResult;
|
||||
"""
|
||||
)
|
||||
result = gen_py_code(typedefs=definitions)
|
||||
assert "reader.read_remaining()" in result
|
||||
|
||||
|
||||
def test_object_blob_with_prefix_special_case() -> None:
|
||||
definitions = get_definitions(
|
||||
"""
|
||||
message msg_id:long seqno:int bytes:int body:Object = Message;
|
||||
"""
|
||||
)
|
||||
result = gen_py_code(typedefs=definitions)
|
||||
assert "reader.read(_bytes)" in result
|
||||
|
|
Loading…
Reference in New Issue
Block a user