2016-09-04 13:42:11 +03:00
|
|
|
import platform
|
2017-05-20 16:58:44 +03:00
|
|
|
from datetime import timedelta
|
2016-09-11 14:10:27 +03:00
|
|
|
from hashlib import md5
|
2016-10-09 13:57:38 +03:00
|
|
|
from mimetypes import guess_type
|
2016-11-30 00:29:42 +03:00
|
|
|
from os import listdir, path
|
2017-05-29 22:24:47 +03:00
|
|
|
from threading import Event, RLock, Thread
|
|
|
|
from time import time, sleep
|
|
|
|
import logging
|
2016-10-09 13:57:38 +03:00
|
|
|
|
|
|
|
# Import some externalized utilities to work with the Telegram types and more
|
2017-05-21 14:02:54 +03:00
|
|
|
from . import helpers as utils
|
2017-05-29 22:24:47 +03:00
|
|
|
from .errors import (RPCError, InvalidDCError, FloodWaitError,
|
|
|
|
InvalidParameterError, ReadCancelledError)
|
|
|
|
|
2017-05-21 14:02:54 +03:00
|
|
|
from .network import authenticator, MtProtoSender, TcpTransport
|
|
|
|
from .parser.markdown_parser import parse_message_entities
|
2017-06-04 18:24:08 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
# For sending and receiving requests
|
2017-05-21 14:02:54 +03:00
|
|
|
from .tl import MTProtoRequest, Session
|
|
|
|
from .tl.all_tlobjects import layer
|
2017-05-29 22:24:47 +03:00
|
|
|
from .tl.functions import (InitConnectionRequest, InvokeWithLayerRequest,
|
|
|
|
PingRequest)
|
|
|
|
|
2017-06-04 18:24:08 +03:00
|
|
|
# Required to get the password salt
|
2017-05-21 14:02:54 +03:00
|
|
|
from .tl.functions.account import GetPasswordRequest
|
2017-06-04 18:24:08 +03:00
|
|
|
|
|
|
|
# Logging in and out
|
2017-05-21 14:02:54 +03:00
|
|
|
from .tl.functions.auth import (CheckPasswordRequest, LogOutRequest,
|
|
|
|
SendCodeRequest, SignInRequest,
|
2017-05-21 14:59:16 +03:00
|
|
|
SignUpRequest, ImportBotAuthorizationRequest)
|
2017-06-04 18:24:08 +03:00
|
|
|
|
|
|
|
# Initial request
|
2017-05-21 14:02:54 +03:00
|
|
|
from .tl.functions.help import GetConfigRequest
|
2017-06-04 18:24:08 +03:00
|
|
|
|
|
|
|
# Easier access to common methods
|
2017-05-21 14:02:54 +03:00
|
|
|
from .tl.functions.messages import (
|
2016-11-30 00:29:42 +03:00
|
|
|
GetDialogsRequest, GetHistoryRequest, ReadHistoryRequest, SendMediaRequest,
|
|
|
|
SendMessageRequest)
|
2017-06-04 18:24:08 +03:00
|
|
|
|
|
|
|
# For .get_me() and ensuring we're authorized
|
|
|
|
from telethon.tl.functions.users import GetUsersRequest
|
|
|
|
|
|
|
|
# Easier access for working with media, too
|
2017-05-21 14:02:54 +03:00
|
|
|
from .tl.functions.upload import (
|
2016-11-30 00:29:42 +03:00
|
|
|
GetFileRequest, SaveBigFilePartRequest, SaveFilePartRequest)
|
2017-06-04 18:24:08 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
# All the types we need to work with
|
2017-05-21 14:02:54 +03:00
|
|
|
from .tl.types import (
|
2016-11-30 00:29:42 +03:00
|
|
|
ChatPhotoEmpty, DocumentAttributeAudio, DocumentAttributeFilename,
|
2017-03-25 19:14:11 +03:00
|
|
|
InputDocumentFileLocation, InputFile, InputFileBig, InputFileLocation,
|
2016-11-30 00:29:42 +03:00
|
|
|
InputMediaUploadedDocument, InputMediaUploadedPhoto, InputPeerEmpty,
|
|
|
|
MessageMediaContact, MessageMediaDocument, MessageMediaPhoto,
|
2017-06-04 18:24:08 +03:00
|
|
|
UserProfilePhotoEmpty, InputUserSelf)
|
|
|
|
|
2017-05-21 14:02:54 +03:00
|
|
|
from .utils import (find_user_or_chat, get_input_peer,
|
2017-05-21 14:59:16 +03:00
|
|
|
get_appropriated_part_size, get_extension)
|
2016-09-04 12:07:18 +03:00
|
|
|
|
|
|
|
|
2016-09-04 13:42:11 +03:00
|
|
|
class TelegramClient:
|
2016-09-07 12:36:34 +03:00
|
|
|
|
2016-09-18 12:59:12 +03:00
|
|
|
# Current TelegramClient version
|
2017-06-03 14:36:41 +03:00
|
|
|
__version__ = '0.10'
|
2016-09-18 12:59:12 +03:00
|
|
|
|
2017-05-08 17:01:53 +03:00
|
|
|
# region Initialization
|
2016-09-07 12:36:34 +03:00
|
|
|
|
2017-03-20 19:16:34 +03:00
|
|
|
def __init__(self, session, api_id, api_hash, proxy=None):
|
2016-11-30 17:36:59 +03:00
|
|
|
"""Initializes the Telegram client with the specified API ID and Hash.
|
|
|
|
|
|
|
|
Session can either be a `str` object (the filename for the loaded/saved .session)
|
|
|
|
or it can be a `Session` instance (in which case list_sessions() would probably not work).
|
|
|
|
If you don't want any file to be saved, pass `None`
|
|
|
|
|
|
|
|
In the later case, you are free to override the `Session` class to provide different
|
|
|
|
.save() and .load() implementations to suit your needs."""
|
|
|
|
|
2016-09-04 12:07:18 +03:00
|
|
|
if api_id is None or api_hash is None:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise PermissionError(
|
|
|
|
'Your API ID or Hash are invalid. Please read "Requirements" on README.rst')
|
2016-09-04 12:07:18 +03:00
|
|
|
|
|
|
|
self.api_id = api_id
|
|
|
|
self.api_hash = api_hash
|
|
|
|
|
2016-11-30 17:36:59 +03:00
|
|
|
# Determine what session object we have
|
2017-05-11 15:08:38 +03:00
|
|
|
if isinstance(session, str) or session is None:
|
2016-11-30 17:36:59 +03:00
|
|
|
self.session = Session.try_load_or_create_new(session)
|
|
|
|
elif isinstance(session, Session):
|
|
|
|
self.session = session
|
|
|
|
else:
|
2016-11-30 17:56:30 +03:00
|
|
|
raise ValueError(
|
|
|
|
'The given session must either be a string or a Session instance.')
|
2016-11-30 17:36:59 +03:00
|
|
|
|
2017-04-06 19:16:15 +03:00
|
|
|
self.transport = None
|
|
|
|
self.proxy = proxy # Will be used when a TcpTransport is created
|
2016-09-04 13:42:11 +03:00
|
|
|
|
2017-05-29 22:24:47 +03:00
|
|
|
# Safety across multiple threads (for the updates thread)
|
|
|
|
self._lock = RLock()
|
|
|
|
self._logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
# Methods to be called when an update is received
|
|
|
|
self.update_handlers = []
|
|
|
|
self.ping_interval = 60
|
|
|
|
self._ping_time_last = time()
|
|
|
|
self._updates_thread_running = Event()
|
|
|
|
self._updates_thread_receiving = Event()
|
|
|
|
|
2017-05-30 14:03:14 +03:00
|
|
|
# Cache "exported" senders 'dc_id: MtProtoSender' and
|
|
|
|
# their corresponding sessions not to recreate them all
|
|
|
|
# the time since it's a (somewhat expensive) process.
|
|
|
|
self._cached_senders = {}
|
|
|
|
self._cached_sessions = {}
|
|
|
|
|
2016-09-04 13:42:11 +03:00
|
|
|
# These will be set later
|
2017-05-29 22:24:47 +03:00
|
|
|
self._updates_thread = None
|
2016-09-04 12:07:18 +03:00
|
|
|
self.dc_options = None
|
2016-09-04 13:42:11 +03:00
|
|
|
self.sender = None
|
2016-09-07 12:36:34 +03:00
|
|
|
self.phone_code_hashes = {}
|
|
|
|
|
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Connecting
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2017-06-03 14:24:27 +03:00
|
|
|
def connect(self, reconnect=False,
|
|
|
|
device_model=None, system_version=None,
|
|
|
|
app_version=None, lang_code=None):
|
|
|
|
"""Connects to the Telegram servers, executing authentication if
|
|
|
|
required. Note that authenticating to the Telegram servers is
|
|
|
|
not the same as authenticating the desired user itself, which
|
|
|
|
may require a call (or several) to 'sign_in' for the first time.
|
|
|
|
|
|
|
|
Default values for the optional parameters if left as None are:
|
|
|
|
device_model = platform.node()
|
|
|
|
system_version = platform.system()
|
|
|
|
app_version = TelegramClient.__version__
|
|
|
|
lang_code = 'en'
|
|
|
|
"""
|
2017-04-06 19:16:15 +03:00
|
|
|
if self.transport is None:
|
|
|
|
self.transport = TcpTransport(self.session.server_address,
|
|
|
|
self.session.port, proxy=self.proxy)
|
|
|
|
|
2016-09-08 17:11:37 +03:00
|
|
|
try:
|
2017-03-10 14:14:51 +03:00
|
|
|
if not self.session.auth_key or (reconnect and self.sender is not None):
|
2016-09-08 17:11:37 +03:00
|
|
|
self.session.auth_key, self.session.time_offset = \
|
2016-09-17 21:42:34 +03:00
|
|
|
authenticator.do_authentication(self.transport)
|
2016-09-08 17:11:37 +03:00
|
|
|
|
|
|
|
self.session.save()
|
|
|
|
|
|
|
|
self.sender = MtProtoSender(self.transport, self.session)
|
2017-05-20 12:33:37 +03:00
|
|
|
self.sender.connect()
|
2016-09-08 17:11:37 +03:00
|
|
|
|
2017-06-03 14:24:27 +03:00
|
|
|
# Set the default parameters if left unspecified
|
|
|
|
if not device_model:
|
|
|
|
device_model = platform.node()
|
|
|
|
if not system_version:
|
|
|
|
system_version = platform.system()
|
|
|
|
if not app_version:
|
|
|
|
app_version = self.__version__
|
|
|
|
if not lang_code:
|
|
|
|
lang_code = 'en'
|
|
|
|
|
2016-09-08 17:11:37 +03:00
|
|
|
# Now it's time to send an InitConnectionRequest
|
|
|
|
# This must always be invoked with the layer we'll be using
|
2016-11-30 00:29:42 +03:00
|
|
|
query = InitConnectionRequest(
|
|
|
|
api_id=self.api_id,
|
2017-06-03 14:24:27 +03:00
|
|
|
device_model=device_model,
|
|
|
|
system_version=system_version,
|
|
|
|
app_version=app_version,
|
|
|
|
lang_code=lang_code,
|
2016-11-30 00:29:42 +03:00
|
|
|
query=GetConfigRequest())
|
|
|
|
|
|
|
|
result = self.invoke(
|
|
|
|
InvokeWithLayerRequest(
|
|
|
|
layer=layer, query=query))
|
2016-09-10 19:05:20 +03:00
|
|
|
|
|
|
|
# We're only interested in the DC options,
|
|
|
|
# although many other options are available!
|
|
|
|
self.dc_options = result.dc_options
|
2016-09-26 18:16:15 +03:00
|
|
|
|
2017-05-19 22:44:50 +03:00
|
|
|
# Once we know we're authorized, we can setup the ping thread
|
|
|
|
if self.is_user_authorized():
|
2017-05-29 22:24:47 +03:00
|
|
|
self._setup_ping_thread()
|
2017-05-19 22:44:50 +03:00
|
|
|
|
2016-09-08 17:11:37 +03:00
|
|
|
return True
|
2017-05-30 14:27:23 +03:00
|
|
|
except (RPCError, ConnectionError) as error:
|
|
|
|
# Probably errors from the previous session, ignore them
|
2017-05-30 11:24:08 +03:00
|
|
|
self._logger.warning('Could not stabilise initial connection: {}'
|
|
|
|
.format(error))
|
2016-09-08 17:11:37 +03:00
|
|
|
return False
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2016-09-09 12:47:37 +03:00
|
|
|
def disconnect(self):
|
2017-05-29 22:24:47 +03:00
|
|
|
"""Disconnects from the Telegram server and stops all the spawned threads"""
|
|
|
|
self._set_updates_thread(running=False)
|
2016-09-09 12:47:37 +03:00
|
|
|
if self.sender:
|
|
|
|
self.sender.disconnect()
|
2017-03-10 14:14:51 +03:00
|
|
|
self.sender = None
|
2017-04-06 19:16:15 +03:00
|
|
|
if self.transport:
|
|
|
|
self.transport.close()
|
|
|
|
self.transport = None
|
2016-09-09 12:47:37 +03:00
|
|
|
|
2017-05-30 14:03:14 +03:00
|
|
|
# Also disconnect all the cached senders
|
2017-05-30 14:27:23 +03:00
|
|
|
for sender in self._cached_senders.values():
|
2017-05-30 14:03:14 +03:00
|
|
|
sender.disconnect()
|
|
|
|
|
|
|
|
self._cached_senders.clear()
|
|
|
|
self._cached_sessions.clear()
|
|
|
|
|
2017-05-29 22:24:47 +03:00
|
|
|
def reconnect(self):
|
|
|
|
"""Disconnects and connects again (effectively reconnecting)"""
|
|
|
|
self.disconnect()
|
|
|
|
self.connect()
|
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# endregion
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2017-05-30 13:14:29 +03:00
|
|
|
# region Working with different Data Centers
|
|
|
|
|
|
|
|
def _reconnect_to_dc(self, dc_id):
|
|
|
|
"""Reconnects to the specified DC ID. This is automatically
|
|
|
|
called after an InvalidDCError is raised"""
|
|
|
|
dc = self._get_dc(dc_id)
|
|
|
|
|
|
|
|
self.transport.close()
|
|
|
|
self.transport = None
|
|
|
|
self.session.server_address = dc.ip_address
|
|
|
|
self.session.port = dc.port
|
|
|
|
self.session.save()
|
|
|
|
|
|
|
|
self.connect(reconnect=True)
|
|
|
|
|
|
|
|
def _get_dc(self, dc_id):
|
|
|
|
"""Gets the Data Center (DC) associated to 'dc_id'"""
|
|
|
|
if not self.dc_options:
|
|
|
|
raise ConnectionError(
|
|
|
|
'Cannot determine the required data center IP address. '
|
2017-05-30 14:27:23 +03:00
|
|
|
'Stabilise a successful initial connection first.')
|
2017-05-30 13:14:29 +03:00
|
|
|
|
|
|
|
return next(dc for dc in self.dc_options if dc.id == dc_id)
|
|
|
|
|
2017-05-30 14:03:14 +03:00
|
|
|
def _get_exported_sender(self, dc_id, init_connection=False):
|
|
|
|
"""Gets a cached exported MtProtoSender for the desired DC.
|
|
|
|
|
|
|
|
If it's the first time retrieving the MtProtoSender, the
|
|
|
|
current authorization is exported to the new DC so that
|
|
|
|
it can be used there, and the connection is initialized.
|
|
|
|
|
2017-06-03 14:36:41 +03:00
|
|
|
If after using the sender a ConnectionResetError is raised,
|
2017-05-30 14:03:14 +03:00
|
|
|
this method should be called again with init_connection=True
|
|
|
|
in order to perform the reconnection."""
|
|
|
|
# Thanks badoualy/kotlogram on /telegram/api/DefaultTelegramClient.kt
|
|
|
|
# for clearly showing how to export the authorization! ^^
|
|
|
|
|
|
|
|
sender = self._cached_senders.get(dc_id)
|
|
|
|
session = self._cached_sessions.get(dc_id)
|
|
|
|
|
|
|
|
if sender and session:
|
|
|
|
if init_connection:
|
2017-05-30 14:27:23 +03:00
|
|
|
sender.disconnect()
|
|
|
|
sender.connect()
|
2017-05-30 14:03:14 +03:00
|
|
|
|
|
|
|
return sender
|
|
|
|
else:
|
|
|
|
from telethon.tl.functions.auth import \
|
|
|
|
ExportAuthorizationRequest, ImportAuthorizationRequest
|
|
|
|
|
|
|
|
dc = self._get_dc(dc_id)
|
|
|
|
|
|
|
|
# Step 1. Export the current authorization to the new DC.
|
|
|
|
export_auth = self.invoke(ExportAuthorizationRequest(dc_id))
|
|
|
|
|
|
|
|
# Step 2. Create a transport connected to the new DC.
|
|
|
|
# We also create a temporary session because
|
|
|
|
# it's what will contain the required AuthKey
|
|
|
|
# for MtProtoSender to work.
|
|
|
|
transport = TcpTransport(dc.ip_address, dc.port, proxy=self.proxy)
|
|
|
|
session = Session(None)
|
|
|
|
session.auth_key, session.time_offset = \
|
|
|
|
authenticator.do_authentication(transport)
|
|
|
|
|
|
|
|
# Step 3. After authenticating on the new DC,
|
|
|
|
# we can create the proper MtProtoSender.
|
|
|
|
sender = MtProtoSender(transport, session)
|
|
|
|
sender.connect()
|
|
|
|
|
|
|
|
# InvokeWithLayer(InitConnection(ImportAuthorization(...)))
|
|
|
|
init_connection = InitConnectionRequest(
|
|
|
|
api_id=self.api_id,
|
|
|
|
device_model=platform.node(),
|
|
|
|
system_version=platform.system(),
|
|
|
|
app_version=self.__version__,
|
|
|
|
lang_code='en',
|
|
|
|
query=ImportAuthorizationRequest(
|
|
|
|
export_auth.id, export_auth.bytes)
|
|
|
|
)
|
|
|
|
query = InvokeWithLayerRequest(layer=layer, query=init_connection)
|
|
|
|
|
|
|
|
sender.send(query)
|
|
|
|
sender.receive(query)
|
|
|
|
|
|
|
|
# Step 4. We're connected and using the desired layer!
|
|
|
|
# Don't go through this expensive process every time.
|
|
|
|
self._cached_senders[dc_id] = sender
|
|
|
|
self._cached_sessions[dc_id] = session
|
|
|
|
|
|
|
|
return sender
|
|
|
|
|
2017-05-30 13:14:29 +03:00
|
|
|
# endregion
|
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# region Telegram requests functions
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2017-04-11 10:46:26 +03:00
|
|
|
def invoke(self, request, timeout=timedelta(seconds=5), throw_invalid_dc=False):
|
2016-10-03 10:53:41 +03:00
|
|
|
"""Invokes a MTProtoRequest (sends and receives it) and returns its result.
|
|
|
|
An optional timeout can be given to cancel the operation after the time delta.
|
2017-03-20 14:23:53 +03:00
|
|
|
Timeout can be set to None for no timeout.
|
|
|
|
|
|
|
|
If throw_invalid_dc is True, these errors won't be caught (useful to
|
|
|
|
avoid infinite recursion). This should not be set to True manually."""
|
2016-09-11 17:24:03 +03:00
|
|
|
if not issubclass(type(request), MTProtoRequest):
|
|
|
|
raise ValueError('You can only invoke MtProtoRequests')
|
|
|
|
|
2017-04-11 11:25:09 +03:00
|
|
|
if not self.sender:
|
|
|
|
raise ValueError('You must be connected to invoke requests!')
|
|
|
|
|
2017-05-29 22:24:47 +03:00
|
|
|
if self._updates_thread_receiving.is_set():
|
|
|
|
self.sender.cancel_receive()
|
|
|
|
|
2017-03-20 14:23:53 +03:00
|
|
|
try:
|
2017-05-29 22:24:47 +03:00
|
|
|
self._lock.acquire()
|
2017-05-29 21:41:03 +03:00
|
|
|
updates = []
|
2017-03-20 14:23:53 +03:00
|
|
|
self.sender.send(request)
|
2017-05-29 21:41:03 +03:00
|
|
|
self.sender.receive(request, timeout, updates=updates)
|
|
|
|
for update in updates:
|
2017-05-29 22:24:47 +03:00
|
|
|
for handler in self.update_handlers:
|
2017-05-29 21:41:03 +03:00
|
|
|
handler(update)
|
2017-03-20 14:23:53 +03:00
|
|
|
|
2017-04-11 10:46:26 +03:00
|
|
|
return request.result
|
2016-09-11 17:24:03 +03:00
|
|
|
|
2017-03-20 14:23:53 +03:00
|
|
|
except InvalidDCError as error:
|
|
|
|
if throw_invalid_dc:
|
2017-05-19 08:48:01 +03:00
|
|
|
raise
|
2017-03-20 14:23:53 +03:00
|
|
|
|
2017-05-30 14:03:14 +03:00
|
|
|
if error.message.startswith('FILE_MIGRATE_'):
|
|
|
|
return self.invoke_on_dc(request, error.new_dc,
|
|
|
|
timeout=timeout)
|
|
|
|
else:
|
|
|
|
self._reconnect_to_dc(error.new_dc)
|
|
|
|
return self.invoke(request,
|
|
|
|
timeout=timeout, throw_invalid_dc=True)
|
2016-09-11 17:24:03 +03:00
|
|
|
|
2017-05-30 11:11:18 +03:00
|
|
|
except ConnectionResetError:
|
|
|
|
self._logger.info('Server disconnected us. Reconnecting and '
|
|
|
|
'resending request...')
|
|
|
|
self.reconnect()
|
|
|
|
self.invoke(request, timeout=timeout,
|
|
|
|
throw_invalid_dc=throw_invalid_dc)
|
|
|
|
|
2017-05-29 22:24:47 +03:00
|
|
|
except FloodWaitError:
|
|
|
|
self.disconnect()
|
|
|
|
raise
|
|
|
|
|
|
|
|
finally:
|
|
|
|
self._lock.release()
|
|
|
|
|
2017-05-30 14:03:14 +03:00
|
|
|
def invoke_on_dc(self, request, dc_id,
|
|
|
|
timeout=timedelta(seconds=5), reconnect=False):
|
|
|
|
"""Invokes the given request on a different DC
|
|
|
|
by making use of the exported MtProtoSenders.
|
|
|
|
|
2017-06-03 14:36:41 +03:00
|
|
|
If 'reconnect=True', then the a reconnection will be performed and
|
|
|
|
ConnectionResetError will be raised if it occurs a second time.
|
2017-05-30 14:03:14 +03:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
sender = self._get_exported_sender(
|
|
|
|
dc_id, init_connection=reconnect)
|
|
|
|
|
|
|
|
sender.send(request)
|
|
|
|
sender.receive(request)
|
|
|
|
return request.result
|
|
|
|
|
|
|
|
except ConnectionResetError:
|
|
|
|
if reconnect:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
return self.invoke_on_dc(request, dc_id,
|
|
|
|
timeout=timeout, reconnect=True)
|
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# region Authorization requests
|
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
def is_user_authorized(self):
|
2017-06-04 18:24:08 +03:00
|
|
|
"""Has the user been authorized yet
|
|
|
|
(code request sent and confirmed)?"""
|
|
|
|
return self.session and self.get_me() is not None
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2016-09-04 13:42:11 +03:00
|
|
|
def send_code_request(self, phone_number):
|
2016-09-07 12:36:34 +03:00
|
|
|
"""Sends a code request to the specified phone number"""
|
2017-03-20 14:23:53 +03:00
|
|
|
result = self.invoke(SendCodeRequest(phone_number, self.api_id, self.api_hash))
|
|
|
|
self.phone_code_hashes[phone_number] = result.phone_code_hash
|
2016-09-05 19:35:12 +03:00
|
|
|
|
2017-03-20 14:31:13 +03:00
|
|
|
def sign_in(self, phone_number=None, code=None, password=None, bot_token=None):
|
2016-11-26 14:04:02 +03:00
|
|
|
"""Completes the authorization of a phone number by providing the received code.
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2016-11-26 14:04:02 +03:00
|
|
|
If no phone or code is provided, then the sole password will be used. The password
|
|
|
|
should be used after a normal authorization attempt has happened and an RPCError
|
2017-03-20 14:31:13 +03:00
|
|
|
with `.password_required = True` was raised.
|
|
|
|
|
|
|
|
To login as a bot, only `bot_token` should be provided. This should equal to the
|
|
|
|
bot access hash provided by https://t.me/BotFather during your bot creation."""
|
2016-11-26 14:04:02 +03:00
|
|
|
if phone_number and code:
|
|
|
|
if phone_number not in self.phone_code_hashes:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise ValueError(
|
|
|
|
'Please make sure you have called send_code_request first.')
|
2016-09-16 14:35:14 +03:00
|
|
|
|
2016-11-26 14:04:02 +03:00
|
|
|
try:
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
SignInRequest(phone_number, self.phone_code_hashes[
|
|
|
|
phone_number], code))
|
2016-11-26 14:04:02 +03:00
|
|
|
|
|
|
|
except RPCError as error:
|
|
|
|
if error.message.startswith('PHONE_CODE_'):
|
|
|
|
return False
|
|
|
|
else:
|
2017-05-19 08:48:01 +03:00
|
|
|
raise
|
2016-11-26 14:04:02 +03:00
|
|
|
elif password:
|
|
|
|
salt = self.invoke(GetPasswordRequest()).current_salt
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
CheckPasswordRequest(utils.get_password_hash(password, salt)))
|
2017-03-20 14:31:13 +03:00
|
|
|
elif bot_token:
|
|
|
|
result = self.invoke(
|
|
|
|
ImportBotAuthorizationRequest(flags=0,
|
|
|
|
api_id=self.api_id,
|
|
|
|
api_hash=self.api_hash,
|
|
|
|
bot_auth_token=bot_token))
|
2016-11-26 14:04:02 +03:00
|
|
|
else:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise ValueError(
|
|
|
|
'You must provide a phone_number and a code for the first time, '
|
|
|
|
'and a password only if an RPCError was raised before.')
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2017-06-04 18:24:08 +03:00
|
|
|
# Ignore 'result.user', we don't need it
|
|
|
|
#
|
2017-05-19 22:44:50 +03:00
|
|
|
# If we want the connection to stay alive for a long time, we need
|
|
|
|
# to start the pings thread once we're already authorized and not
|
|
|
|
# before to avoid the updates thread trying to read anything while
|
|
|
|
# we haven't yet connected.
|
2017-05-29 22:24:47 +03:00
|
|
|
self._setup_ping_thread()
|
2017-05-19 22:44:50 +03:00
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
return True
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2016-09-16 14:35:14 +03:00
|
|
|
def sign_up(self, phone_number, code, first_name, last_name=''):
|
|
|
|
"""Signs up to Telegram. Make sure you sent a code request first!"""
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
SignUpRequest(
|
|
|
|
phone_number=phone_number,
|
|
|
|
phone_code_hash=self.phone_code_hashes[phone_number],
|
|
|
|
phone_code=code,
|
|
|
|
first_name=first_name,
|
|
|
|
last_name=last_name))
|
2016-09-16 14:35:14 +03:00
|
|
|
|
|
|
|
self.session.user = result.user
|
|
|
|
self.session.save()
|
|
|
|
|
|
|
|
def log_out(self):
|
|
|
|
"""Logs out and deletes the current session. Returns True if everything went OK"""
|
2017-04-14 16:28:15 +03:00
|
|
|
# Special flag when logging out (so the ack request confirms it)
|
|
|
|
self.sender.logging_out = True
|
2016-09-16 14:35:14 +03:00
|
|
|
try:
|
2017-04-11 10:46:26 +03:00
|
|
|
self.invoke(LogOutRequest())
|
2017-04-14 16:28:15 +03:00
|
|
|
self.disconnect()
|
2016-09-16 14:35:14 +03:00
|
|
|
if not self.session.delete():
|
|
|
|
return False
|
|
|
|
|
|
|
|
self.session = None
|
2017-05-11 08:46:27 +03:00
|
|
|
return True
|
2017-05-21 14:59:16 +03:00
|
|
|
except (RPCError, ConnectionError):
|
2017-04-11 10:52:44 +03:00
|
|
|
# Something happened when logging out, restore the state back
|
2017-04-14 16:28:15 +03:00
|
|
|
self.sender.logging_out = False
|
2016-09-16 14:35:14 +03:00
|
|
|
return False
|
|
|
|
|
2017-06-04 18:24:08 +03:00
|
|
|
def get_me(self):
|
|
|
|
"""Gets "me" (the self user) which is currently authenticated,
|
|
|
|
or None if the request fails (hence, not authenticated)."""
|
|
|
|
try:
|
|
|
|
return self.invoke(GetUsersRequest([InputUserSelf()]))[0]
|
|
|
|
except RPCError as e:
|
|
|
|
if e.code == 401: # 401 UNAUTHORIZED
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
2016-09-26 14:13:11 +03:00
|
|
|
@staticmethod
|
|
|
|
def list_sessions():
|
|
|
|
"""Lists all the sessions of the users who have ever connected
|
|
|
|
using this client and never logged out"""
|
2017-05-21 14:59:16 +03:00
|
|
|
return [path.splitext(path.basename(f))[0]
|
2016-09-26 14:13:11 +03:00
|
|
|
for f in listdir('.') if f.endswith('.session')]
|
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Dialogs ("chats") requests
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def get_dialogs(self,
|
2017-05-05 16:11:48 +03:00
|
|
|
limit=10,
|
2016-11-30 00:29:42 +03:00
|
|
|
offset_date=None,
|
|
|
|
offset_id=0,
|
|
|
|
offset_peer=InputPeerEmpty()):
|
2017-05-05 16:11:48 +03:00
|
|
|
"""Returns a tuple of lists ([dialogs], [entities]) with at least 'limit' items each.
|
|
|
|
If `limit` is 0, all dialogs will be retrieved.
|
2016-10-03 20:44:01 +03:00
|
|
|
The `entity` represents the user, chat or channel corresponding to that dialog"""
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
r = self.invoke(
|
|
|
|
GetDialogsRequest(
|
|
|
|
offset_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
|
|
|
offset_peer=offset_peer,
|
2017-05-05 16:11:48 +03:00
|
|
|
limit=limit))
|
2016-11-30 00:29:42 +03:00
|
|
|
return (
|
|
|
|
r.dialogs,
|
|
|
|
[find_user_or_chat(d.peer, r.users, r.chats) for d in r.dialogs])
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Message requests
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def send_message(self,
|
2017-01-17 22:22:47 +03:00
|
|
|
entity,
|
2016-11-30 00:29:42 +03:00
|
|
|
message,
|
|
|
|
markdown=False,
|
|
|
|
no_web_page=False):
|
2017-01-17 22:22:47 +03:00
|
|
|
"""Sends a message to the given entity (or input peer) and returns the sent message ID"""
|
2016-09-07 20:01:00 +03:00
|
|
|
if markdown:
|
|
|
|
msg, entities = parse_message_entities(message)
|
|
|
|
else:
|
|
|
|
msg, entities = message, []
|
|
|
|
|
2016-10-03 20:44:01 +03:00
|
|
|
msg_id = utils.generate_random_long()
|
2016-11-30 00:29:42 +03:00
|
|
|
self.invoke(
|
|
|
|
SendMessageRequest(
|
2017-01-17 22:22:47 +03:00
|
|
|
peer=get_input_peer(entity),
|
2016-11-30 00:29:42 +03:00
|
|
|
message=msg,
|
|
|
|
random_id=msg_id,
|
|
|
|
entities=entities,
|
|
|
|
no_webpage=no_web_page))
|
2016-10-03 20:44:01 +03:00
|
|
|
return msg_id
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def get_message_history(self,
|
2017-01-17 22:22:47 +03:00
|
|
|
entity,
|
2016-11-30 00:29:42 +03:00
|
|
|
limit=20,
|
|
|
|
offset_date=None,
|
|
|
|
offset_id=0,
|
|
|
|
max_id=0,
|
|
|
|
min_id=0,
|
|
|
|
add_offset=0):
|
2016-09-08 13:13:31 +03:00
|
|
|
"""
|
2017-01-17 22:22:47 +03:00
|
|
|
Gets the message history for the specified entity
|
2016-09-08 13:13:31 +03:00
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
:param entity: The entity (or input peer) from whom to retrieve the message history
|
2016-09-08 13:13:31 +03:00
|
|
|
:param limit: Number of messages to be retrieved
|
|
|
|
:param offset_date: Offset date (messages *previous* to this date will be retrieved)
|
|
|
|
:param offset_id: Offset message ID (only messages *previous* to the given ID will be retrieved)
|
|
|
|
:param max_id: All the messages with a higher (newer) ID or equal to this will be excluded
|
|
|
|
:param min_id: All the messages with a lower (older) ID or equal to this will be excluded
|
|
|
|
:param add_offset: Additional message offset (all of the specified offsets + this offset = older messages)
|
|
|
|
|
|
|
|
:return: A tuple containing total message count and two more lists ([messages], [senders]).
|
|
|
|
Note that the sender can be null if it was not found!
|
|
|
|
"""
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
GetHistoryRequest(
|
2017-01-17 22:22:47 +03:00
|
|
|
get_input_peer(entity),
|
2016-11-30 00:29:42 +03:00
|
|
|
limit=limit,
|
|
|
|
offset_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
|
|
|
max_id=max_id,
|
|
|
|
min_id=min_id,
|
|
|
|
add_offset=add_offset))
|
2016-09-08 13:13:31 +03:00
|
|
|
|
|
|
|
# The result may be a messages slice (not all messages were retrieved) or
|
|
|
|
# simply a messages TLObject. In the later case, no "count" attribute is specified:
|
|
|
|
# the total messages count is retrieved by counting all the retrieved messages
|
|
|
|
total_messages = getattr(result, 'count', len(result.messages))
|
2016-09-11 11:35:02 +03:00
|
|
|
|
2016-09-12 15:07:45 +03:00
|
|
|
# Iterate over all the messages and find the sender User
|
|
|
|
users = []
|
|
|
|
for msg in result.messages:
|
|
|
|
for usr in result.users:
|
|
|
|
if msg.from_id == usr.id:
|
|
|
|
users.append(usr)
|
|
|
|
break
|
|
|
|
|
|
|
|
return total_messages, result.messages, users
|
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
def send_read_acknowledge(self, entity, messages=None, max_id=None):
|
2016-10-02 14:57:03 +03:00
|
|
|
"""Sends a "read acknowledge" (i.e., notifying the given peer that we've
|
|
|
|
read their messages, also known as the "double check ✅✅").
|
|
|
|
|
|
|
|
Either a list of messages (or a single message) can be given,
|
|
|
|
or the maximum message ID (until which message we want to send the read acknowledge).
|
|
|
|
|
|
|
|
Returns an AffectedMessages TLObject"""
|
|
|
|
if max_id is None:
|
|
|
|
if not messages:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise InvalidParameterError(
|
|
|
|
'Either a message list or a max_id must be provided.')
|
2016-10-02 14:57:03 +03:00
|
|
|
|
|
|
|
if isinstance(messages, list):
|
|
|
|
max_id = max(msg.id for msg in messages)
|
|
|
|
else:
|
|
|
|
max_id = messages.id
|
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
return self.invoke(ReadHistoryRequest(peer=get_input_peer(entity), max_id=max_id))
|
2016-10-02 14:57:03 +03:00
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# TODO Handle media downloading/uploading in a different session?
|
2016-09-11 14:10:27 +03:00
|
|
|
# "It is recommended that large queries (upload.getFile, upload.saveFilePart)
|
|
|
|
# be handled through a separate session and a separate connection"
|
2016-09-12 20:32:16 +03:00
|
|
|
# region Uploading media requests
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def upload_file(self,
|
|
|
|
file_path,
|
|
|
|
part_size_kb=None,
|
|
|
|
file_name=None,
|
|
|
|
progress_callback=None):
|
2016-09-17 18:04:30 +03:00
|
|
|
"""Uploads the specified file_path and returns a handle which can be later used
|
|
|
|
|
|
|
|
:param file_path: The file path of the file that will be uploaded
|
|
|
|
:param part_size_kb: The part size when uploading the file. None = Automatic
|
|
|
|
:param file_name: The name of the uploaded file. None = Automatic
|
|
|
|
:param progress_callback: A callback function which takes two parameters,
|
|
|
|
uploaded size (in bytes) and total file size (in bytes)
|
|
|
|
This is called every time a part is uploaded
|
|
|
|
"""
|
|
|
|
file_size = path.getsize(file_path)
|
|
|
|
if not part_size_kb:
|
2017-05-21 14:59:16 +03:00
|
|
|
part_size_kb = get_appropriated_part_size(file_size)
|
2016-09-16 17:37:45 +03:00
|
|
|
|
|
|
|
if part_size_kb > 512:
|
|
|
|
raise ValueError('The part size must be less or equal to 512KB')
|
2016-09-11 14:10:27 +03:00
|
|
|
|
|
|
|
part_size = int(part_size_kb * 1024)
|
|
|
|
if part_size % 1024 != 0:
|
|
|
|
raise ValueError('The part size must be evenly divisible by 1024')
|
|
|
|
|
2016-09-16 17:37:45 +03:00
|
|
|
# Determine whether the file is too big (over 10MB) or not
|
|
|
|
# Telegram does make a distinction between smaller or larger files
|
|
|
|
is_large = file_size > 10 * 1024 * 1024
|
|
|
|
part_count = (file_size + part_size - 1) // part_size
|
|
|
|
|
2016-09-11 14:10:27 +03:00
|
|
|
# Multiply the datetime timestamp by 10^6 to get the ticks
|
|
|
|
# This is high likely going to be unique
|
2017-03-25 19:14:11 +03:00
|
|
|
file_id = utils.generate_random_long()
|
2016-09-11 14:10:27 +03:00
|
|
|
hash_md5 = md5()
|
|
|
|
|
|
|
|
with open(file_path, 'rb') as file:
|
2016-09-16 17:37:45 +03:00
|
|
|
for part_index in range(part_count):
|
2016-09-11 14:10:27 +03:00
|
|
|
# Read the file by in chunks of size part_size
|
|
|
|
part = file.read(part_size)
|
|
|
|
|
2016-09-16 17:37:45 +03:00
|
|
|
# The SavePartRequest is different depending on whether
|
|
|
|
# the file is too large or not (over or less than 10MB)
|
|
|
|
if is_large:
|
2016-11-30 00:29:42 +03:00
|
|
|
request = SaveBigFilePartRequest(file_id, part_index,
|
|
|
|
part_count, part)
|
2016-09-16 17:37:45 +03:00
|
|
|
else:
|
|
|
|
request = SaveFilePartRequest(file_id, part_index, part)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2016-09-11 14:10:27 +03:00
|
|
|
# Invoke the file upload and increment both the part index and MD5 checksum
|
2016-09-16 17:37:45 +03:00
|
|
|
result = self.invoke(request)
|
2016-09-11 14:10:27 +03:00
|
|
|
if result:
|
2017-03-25 19:14:11 +03:00
|
|
|
if not is_large:
|
|
|
|
# No need to update the hash if it's a large file
|
|
|
|
hash_md5.update(part)
|
|
|
|
|
2016-09-17 18:04:30 +03:00
|
|
|
if progress_callback:
|
|
|
|
progress_callback(file.tell(), file_size)
|
2016-09-11 14:10:27 +03:00
|
|
|
else:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise ValueError('Could not upload file part #{}'.format(
|
|
|
|
part_index))
|
2016-09-11 14:10:27 +03:00
|
|
|
|
|
|
|
# Set a default file name if None was specified
|
|
|
|
if not file_name:
|
|
|
|
file_name = path.basename(file_path)
|
|
|
|
|
|
|
|
# After the file has been uploaded, we can return a handle pointing to it
|
2017-03-25 19:14:11 +03:00
|
|
|
if is_large:
|
|
|
|
return InputFileBig(
|
|
|
|
id=file_id,
|
|
|
|
parts=part_count,
|
|
|
|
name=file_name)
|
|
|
|
else:
|
|
|
|
return InputFile(
|
|
|
|
id=file_id,
|
|
|
|
parts=part_count,
|
|
|
|
name=file_name,
|
|
|
|
md5_checksum=hash_md5.hexdigest())
|
2016-09-11 14:10:27 +03:00
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
def send_photo_file(self, input_file, entity, caption=''):
|
2016-09-11 14:10:27 +03:00
|
|
|
"""Sends a previously uploaded input_file
|
2017-01-17 22:22:47 +03:00
|
|
|
(which should be a photo) to the given entity (or input peer)"""
|
2016-09-11 14:10:27 +03:00
|
|
|
self.send_media_file(
|
2017-01-17 22:22:47 +03:00
|
|
|
InputMediaUploadedPhoto(input_file, caption), entity)
|
2016-09-11 14:10:27 +03:00
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
def send_document_file(self, input_file, entity, caption=''):
|
2016-09-12 20:32:16 +03:00
|
|
|
"""Sends a previously uploaded input_file
|
2017-01-17 22:22:47 +03:00
|
|
|
(which should be a document) to the given entity (or input peer)"""
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
# Determine mime-type and attributes
|
|
|
|
# Take the first element by using [0] since it returns a tuple
|
|
|
|
mime_type = guess_type(input_file.name)[0]
|
|
|
|
attributes = [
|
|
|
|
DocumentAttributeFilename(input_file.name)
|
|
|
|
# TODO If the input file is an audio, find out:
|
|
|
|
# Performer and song title and add DocumentAttributeAudio
|
|
|
|
]
|
2016-09-16 17:37:45 +03:00
|
|
|
# Ensure we have a mime type, any; but it cannot be None
|
|
|
|
# «The "octet-stream" subtype is used to indicate that a body contains arbitrary binary data.»
|
|
|
|
if not mime_type:
|
|
|
|
mime_type = 'application/octet-stream'
|
2016-11-30 00:29:42 +03:00
|
|
|
self.send_media_file(
|
|
|
|
InputMediaUploadedDocument(
|
|
|
|
file=input_file,
|
|
|
|
mime_type=mime_type,
|
|
|
|
attributes=attributes,
|
|
|
|
caption=caption),
|
2017-01-17 22:22:47 +03:00
|
|
|
entity)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
def send_media_file(self, input_media, entity):
|
2017-05-20 16:58:44 +03:00
|
|
|
"""Sends any input_media (contact, document, photo...) to the given entity"""
|
2016-11-30 00:29:42 +03:00
|
|
|
self.invoke(
|
|
|
|
SendMediaRequest(
|
2017-01-17 22:22:47 +03:00
|
|
|
peer=get_input_peer(entity),
|
2016-11-30 00:29:42 +03:00
|
|
|
media=input_media,
|
|
|
|
random_id=utils.generate_random_long()))
|
2016-09-11 14:10:27 +03:00
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Downloading media requests
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_profile_photo(self,
|
|
|
|
profile_photo,
|
|
|
|
file_path,
|
|
|
|
add_extension=True,
|
|
|
|
download_big=True):
|
2016-10-03 20:44:01 +03:00
|
|
|
"""Downloads the profile photo for an user or a chat (including channels).
|
2017-05-21 14:59:16 +03:00
|
|
|
Returns False if no photo was provided, or if it was Empty"""
|
2016-10-03 20:44:01 +03:00
|
|
|
|
|
|
|
if (not profile_photo or
|
2016-11-30 00:29:42 +03:00
|
|
|
isinstance(profile_photo, UserProfilePhotoEmpty) or
|
2016-10-03 20:44:01 +03:00
|
|
|
isinstance(profile_photo, ChatPhotoEmpty)):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if add_extension:
|
2016-10-09 13:57:38 +03:00
|
|
|
file_path += get_extension(profile_photo)
|
2016-10-03 20:44:01 +03:00
|
|
|
|
|
|
|
if download_big:
|
|
|
|
photo_location = profile_photo.photo_big
|
|
|
|
else:
|
|
|
|
photo_location = profile_photo.photo_small
|
|
|
|
|
|
|
|
# Download the media with the largest size input file location
|
2016-11-30 00:29:42 +03:00
|
|
|
self.download_file_loc(
|
|
|
|
InputFileLocation(
|
|
|
|
volume_id=photo_location.volume_id,
|
|
|
|
local_id=photo_location.local_id,
|
|
|
|
secret=photo_location.secret),
|
|
|
|
file_path)
|
2016-10-03 20:44:01 +03:00
|
|
|
return True
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_msg_media(self,
|
|
|
|
message_media,
|
|
|
|
file_path,
|
|
|
|
add_extension=True,
|
|
|
|
progress_callback=None):
|
2016-09-12 20:32:16 +03:00
|
|
|
"""Downloads the given MessageMedia (Photo, Document or Contact)
|
2016-09-17 18:04:30 +03:00
|
|
|
into the desired file_path, optionally finding its extension automatically
|
|
|
|
The progress_callback should be a callback function which takes two parameters,
|
|
|
|
uploaded size (in bytes) and total file size (in bytes).
|
|
|
|
This will be called every time a part is downloaded"""
|
2016-09-12 20:32:16 +03:00
|
|
|
if type(message_media) == MessageMediaPhoto:
|
2016-11-30 00:29:42 +03:00
|
|
|
return self.download_photo(message_media, file_path, add_extension,
|
|
|
|
progress_callback)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
elif type(message_media) == MessageMediaDocument:
|
2016-11-30 00:29:42 +03:00
|
|
|
return self.download_document(message_media, file_path,
|
|
|
|
add_extension, progress_callback)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
elif type(message_media) == MessageMediaContact:
|
2016-11-30 00:29:42 +03:00
|
|
|
return self.download_contact(message_media, file_path,
|
|
|
|
add_extension)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_photo(self,
|
|
|
|
message_media_photo,
|
|
|
|
file_path,
|
|
|
|
add_extension=False,
|
2016-09-17 18:04:30 +03:00
|
|
|
progress_callback=None):
|
2016-09-12 20:32:16 +03:00
|
|
|
"""Downloads MessageMediaPhoto's largest size into the desired
|
2016-09-17 18:04:30 +03:00
|
|
|
file_path, optionally finding its extension automatically
|
|
|
|
The progress_callback should be a callback function which takes two parameters,
|
|
|
|
uploaded size (in bytes) and total file size (in bytes).
|
|
|
|
This will be called every time a part is downloaded"""
|
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# Determine the photo and its largest size
|
|
|
|
photo = message_media_photo.photo
|
2016-09-17 18:04:30 +03:00
|
|
|
largest_size = photo.sizes[-1]
|
|
|
|
file_size = largest_size.size
|
|
|
|
largest_size = largest_size.location
|
2016-09-10 19:05:20 +03:00
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
if add_extension:
|
2016-10-09 13:57:38 +03:00
|
|
|
file_path += get_extension(message_media_photo)
|
2016-09-10 19:05:20 +03:00
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
# Download the media with the largest size input file location
|
2016-11-30 00:29:42 +03:00
|
|
|
self.download_file_loc(
|
|
|
|
InputFileLocation(
|
|
|
|
volume_id=largest_size.volume_id,
|
|
|
|
local_id=largest_size.local_id,
|
|
|
|
secret=largest_size.secret),
|
|
|
|
file_path,
|
|
|
|
file_size=file_size,
|
|
|
|
progress_callback=progress_callback)
|
2016-09-12 20:32:16 +03:00
|
|
|
return file_path
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_document(self,
|
|
|
|
message_media_document,
|
|
|
|
file_path=None,
|
|
|
|
add_extension=True,
|
2016-09-17 18:04:30 +03:00
|
|
|
progress_callback=None):
|
2016-09-12 20:32:16 +03:00
|
|
|
"""Downloads the given MessageMediaDocument into the desired
|
|
|
|
file_path, optionally finding its extension automatically.
|
2016-09-17 18:04:30 +03:00
|
|
|
If no file_path is given, it will try to be guessed from the document
|
|
|
|
The progress_callback should be a callback function which takes two parameters,
|
|
|
|
uploaded size (in bytes) and total file size (in bytes).
|
|
|
|
This will be called every time a part is downloaded"""
|
2016-09-12 20:32:16 +03:00
|
|
|
document = message_media_document.document
|
2016-09-17 18:04:30 +03:00
|
|
|
file_size = document.size
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
# If no file path was given, try to guess it from the attributes
|
|
|
|
if file_path is None:
|
|
|
|
for attr in document.attributes:
|
|
|
|
if type(attr) == DocumentAttributeFilename:
|
|
|
|
file_path = attr.file_name
|
|
|
|
break # This attribute has higher preference
|
|
|
|
|
|
|
|
elif type(attr) == DocumentAttributeAudio:
|
|
|
|
file_path = '{} - {}'.format(attr.performer, attr.title)
|
|
|
|
|
|
|
|
if file_path is None:
|
2017-05-30 11:24:08 +03:00
|
|
|
raise ValueError('Could not infer a file_path for the document'
|
|
|
|
'. Please provide a valid file_path manually')
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
if add_extension:
|
2017-01-19 18:16:39 +03:00
|
|
|
file_path += get_extension(message_media_document)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
self.download_file_loc(
|
|
|
|
InputDocumentFileLocation(
|
|
|
|
id=document.id,
|
|
|
|
access_hash=document.access_hash,
|
|
|
|
version=document.version),
|
|
|
|
file_path,
|
|
|
|
file_size=file_size,
|
|
|
|
progress_callback=progress_callback)
|
2016-09-12 20:32:16 +03:00
|
|
|
return file_path
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def download_contact(message_media_contact, file_path, add_extension=True):
|
|
|
|
"""Downloads a media contact using the vCard 4.0 format"""
|
|
|
|
|
|
|
|
first_name = message_media_contact.first_name
|
|
|
|
last_name = message_media_contact.last_name
|
|
|
|
phone_number = message_media_contact.phone_number
|
|
|
|
|
|
|
|
# The only way we can save a contact in an understandable
|
|
|
|
# way by phones is by using the .vCard format
|
|
|
|
if add_extension:
|
|
|
|
file_path += '.vcard'
|
|
|
|
|
|
|
|
# Ensure that we'll be able to download the contact
|
|
|
|
utils.ensure_parent_dir_exists(file_path)
|
|
|
|
|
|
|
|
with open(file_path, 'w', encoding='utf-8') as file:
|
|
|
|
file.write('BEGIN:VCARD\n')
|
|
|
|
file.write('VERSION:4.0\n')
|
2016-11-30 00:29:42 +03:00
|
|
|
file.write('N:{};{};;;\n'.format(first_name, last_name
|
|
|
|
if last_name else ''))
|
2016-09-12 20:32:16 +03:00
|
|
|
file.write('FN:{}\n'.format(' '.join((first_name, last_name))))
|
2016-11-30 00:29:42 +03:00
|
|
|
file.write('TEL;TYPE=cell;VALUE=uri:tel:+{}\n'.format(
|
|
|
|
phone_number))
|
2016-09-12 20:32:16 +03:00
|
|
|
file.write('END:VCARD\n')
|
|
|
|
|
|
|
|
return file_path
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_file_loc(self,
|
|
|
|
input_location,
|
|
|
|
file_path,
|
|
|
|
part_size_kb=64,
|
|
|
|
file_size=None,
|
|
|
|
progress_callback=None):
|
2016-09-17 18:04:30 +03:00
|
|
|
"""Downloads media from the given input_file_location to the specified file_path.
|
|
|
|
If a progress_callback function is given, it will be called taking two
|
|
|
|
arguments (downloaded bytes count and total file size)"""
|
|
|
|
|
|
|
|
if not part_size_kb:
|
|
|
|
if not file_size:
|
|
|
|
raise ValueError('A part size value must be provided')
|
|
|
|
else:
|
2017-05-21 14:59:16 +03:00
|
|
|
part_size_kb = get_appropriated_part_size(file_size)
|
2016-09-11 17:24:03 +03:00
|
|
|
|
|
|
|
part_size = int(part_size_kb * 1024)
|
|
|
|
if part_size % 1024 != 0:
|
|
|
|
raise ValueError('The part size must be evenly divisible by 1024')
|
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
# Ensure that we'll be able to download the media
|
|
|
|
utils.ensure_parent_dir_exists(file_path)
|
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# Start with an offset index of 0
|
|
|
|
offset_index = 0
|
|
|
|
with open(file_path, 'wb') as file:
|
|
|
|
while True:
|
|
|
|
# The current offset equals the offset_index multiplied by the part size
|
|
|
|
offset = offset_index * part_size
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
GetFileRequest(input_location, offset, part_size))
|
2016-09-11 17:24:03 +03:00
|
|
|
offset_index += 1
|
|
|
|
|
|
|
|
# If we have received no data (0 bytes), the file is over
|
|
|
|
# So there is nothing left to download and write
|
|
|
|
if not result.bytes:
|
|
|
|
return result.type # Return some extra information
|
|
|
|
|
|
|
|
file.write(result.bytes)
|
2016-09-17 18:04:30 +03:00
|
|
|
if progress_callback:
|
|
|
|
progress_callback(file.tell(), file_size)
|
2016-09-11 17:24:03 +03:00
|
|
|
|
|
|
|
# endregion
|
2016-09-10 19:05:20 +03:00
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Updates handling
|
|
|
|
|
2016-09-11 12:50:38 +03:00
|
|
|
def add_update_handler(self, handler):
|
|
|
|
"""Adds an update handler (a function which takes a TLObject,
|
|
|
|
an update, as its parameter) and listens for updates"""
|
2017-05-11 14:16:46 +03:00
|
|
|
if not self.sender:
|
|
|
|
raise RuntimeError(
|
|
|
|
"You should connect at least once to add update handlers.")
|
2016-10-02 14:42:17 +03:00
|
|
|
|
2017-05-29 22:24:47 +03:00
|
|
|
# TODO Eventually remove these methods, the user
|
|
|
|
# can access self.update_handlers manually
|
|
|
|
self.update_handlers.append(handler)
|
2016-09-09 12:47:37 +03:00
|
|
|
|
2016-09-11 12:50:38 +03:00
|
|
|
def remove_update_handler(self, handler):
|
2017-05-29 22:24:47 +03:00
|
|
|
self.update_handlers.remove(handler)
|
2016-09-07 12:36:34 +03:00
|
|
|
|
2017-03-28 19:46:07 +03:00
|
|
|
def list_update_handlers(self):
|
2017-05-29 22:24:47 +03:00
|
|
|
return self.update_handlers[:]
|
|
|
|
|
|
|
|
def _setup_ping_thread(self):
|
|
|
|
"""Sets up the Ping's thread, so that a connection can be kept
|
|
|
|
alive for a longer time without Telegram disconnecting us"""
|
|
|
|
self._updates_thread = Thread(
|
|
|
|
name='UpdatesThread', daemon=True,
|
|
|
|
target=self._updates_thread_method)
|
|
|
|
|
|
|
|
self._set_updates_thread(running=True)
|
|
|
|
|
|
|
|
def _set_updates_thread(self, running):
|
|
|
|
"""Sets the updates thread status (running or not)"""
|
|
|
|
if not self._updates_thread or \
|
|
|
|
running == self._updates_thread_running.is_set():
|
|
|
|
return
|
|
|
|
|
|
|
|
# Different state, update the saved value and behave as required
|
|
|
|
self._logger.info('Changing updates thread running status to %s', running)
|
|
|
|
if running:
|
|
|
|
self._updates_thread_running.set()
|
|
|
|
self._updates_thread.start()
|
|
|
|
else:
|
|
|
|
self._updates_thread_running.clear()
|
|
|
|
if self._updates_thread_receiving.is_set():
|
|
|
|
self.sender.cancel_receive()
|
|
|
|
|
|
|
|
def _updates_thread_method(self):
|
|
|
|
"""This method will run until specified and listen for incoming updates"""
|
|
|
|
|
|
|
|
# Set a reasonable timeout when checking for updates
|
|
|
|
timeout = timedelta(minutes=1)
|
|
|
|
|
|
|
|
while self._updates_thread_running.is_set():
|
|
|
|
# Always sleep a bit before each iteration to relax the CPU,
|
|
|
|
# since it's possible to early 'continue' the loop to reach
|
|
|
|
# the next iteration, but we still should to sleep.
|
|
|
|
# Longer sleep if we're not expecting updates (only pings)
|
|
|
|
sleep(0.1 if self.update_handlers else 1)
|
|
|
|
|
|
|
|
with self._lock:
|
|
|
|
self._logger.debug('Updates thread acquired the lock')
|
|
|
|
try:
|
|
|
|
now = time()
|
|
|
|
# If ping_interval seconds passed since last ping, send a new one
|
|
|
|
if now >= self._ping_time_last + self.ping_interval:
|
|
|
|
self._ping_time_last = now
|
|
|
|
self.invoke(PingRequest(utils.generate_random_long()))
|
|
|
|
self._logger.debug('Ping sent from the updates thread')
|
|
|
|
|
|
|
|
# Exit the loop if we're not expecting to receive any updates
|
|
|
|
if not self.update_handlers:
|
|
|
|
self._logger.debug('No updates handlers found, continuing')
|
|
|
|
continue
|
|
|
|
|
|
|
|
self._updates_thread_receiving.set()
|
|
|
|
self._logger.debug('Trying to receive updates from the updates thread')
|
|
|
|
result = self.sender.receive_update(timeout=timeout)
|
|
|
|
self._logger.info('Received update from the updates thread')
|
|
|
|
for handler in self.update_handlers:
|
|
|
|
handler(result)
|
|
|
|
|
2017-05-30 11:11:18 +03:00
|
|
|
except ConnectionResetError:
|
|
|
|
self._logger.info('Server disconnected us. Reconnecting...')
|
|
|
|
self.reconnect()
|
|
|
|
|
2017-05-29 22:24:47 +03:00
|
|
|
except TimeoutError:
|
|
|
|
self._logger.debug('Receiving updates timed out')
|
|
|
|
|
|
|
|
except ReadCancelledError:
|
|
|
|
self._logger.info('Receiving updates cancelled')
|
|
|
|
|
|
|
|
except OSError:
|
|
|
|
self._logger.warning('OSError on updates thread, %s logging out',
|
|
|
|
'was' if self.sender.logging_out else 'was not')
|
|
|
|
|
|
|
|
if self.sender.logging_out:
|
|
|
|
# This error is okay when logging out, means we got disconnected
|
|
|
|
# TODO Not sure why this happens because we call disconnect()…
|
|
|
|
self._set_updates_thread(running=False)
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
|
|
|
self._logger.debug('Updates thread released the lock')
|
|
|
|
self._updates_thread_receiving.clear()
|
2017-03-28 19:46:07 +03:00
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# endregion
|