2016-09-04 13:42:11 +03:00
|
|
|
import platform
|
2016-10-03 10:53:41 +03:00
|
|
|
from datetime import datetime, timedelta
|
2016-09-11 14:10:27 +03:00
|
|
|
from hashlib import md5
|
2016-10-09 13:57:38 +03:00
|
|
|
from mimetypes import guess_type
|
2016-11-30 00:29:42 +03:00
|
|
|
from os import listdir, path
|
2016-10-09 13:57:38 +03:00
|
|
|
|
|
|
|
# Import some externalized utilities to work with the Telegram types and more
|
2016-09-17 21:42:34 +03:00
|
|
|
import telethon.helpers as utils
|
|
|
|
import telethon.network.authenticator as authenticator
|
|
|
|
from telethon.errors import *
|
|
|
|
from telethon.network import MtProtoSender, TcpTransport
|
|
|
|
from telethon.parser.markdown_parser import parse_message_entities
|
2016-11-30 00:29:42 +03:00
|
|
|
# For sending and receiving requests
|
|
|
|
from telethon.tl import MTProtoRequest, Session
|
2016-09-17 22:09:20 +03:00
|
|
|
from telethon.tl.all_tlobjects import layer
|
2016-11-30 00:29:42 +03:00
|
|
|
from telethon.tl.functions import InitConnectionRequest, InvokeWithLayerRequest
|
|
|
|
# The following is required to get the password salt
|
|
|
|
from telethon.tl.functions.account import GetPasswordRequest
|
|
|
|
from telethon.tl.functions.auth import (CheckPasswordRequest, LogOutRequest,
|
|
|
|
SendCodeRequest, SignInRequest,
|
|
|
|
SignUpRequest)
|
2017-03-20 14:31:13 +03:00
|
|
|
from telethon.tl.functions.auth import ImportBotAuthorizationRequest
|
2016-11-30 00:29:42 +03:00
|
|
|
from telethon.tl.functions.help import GetConfigRequest
|
|
|
|
from telethon.tl.functions.messages import (
|
|
|
|
GetDialogsRequest, GetHistoryRequest, ReadHistoryRequest, SendMediaRequest,
|
|
|
|
SendMessageRequest)
|
|
|
|
# The Requests and types that we'll be using
|
|
|
|
from telethon.tl.functions.upload import (
|
|
|
|
GetFileRequest, SaveBigFilePartRequest, SaveFilePartRequest)
|
|
|
|
# All the types we need to work with
|
|
|
|
from telethon.tl.types import (
|
|
|
|
ChatPhotoEmpty, DocumentAttributeAudio, DocumentAttributeFilename,
|
2017-03-25 19:14:11 +03:00
|
|
|
InputDocumentFileLocation, InputFile, InputFileBig, InputFileLocation,
|
2016-11-30 00:29:42 +03:00
|
|
|
InputMediaUploadedDocument, InputMediaUploadedPhoto, InputPeerEmpty,
|
|
|
|
MessageMediaContact, MessageMediaDocument, MessageMediaPhoto,
|
|
|
|
UserProfilePhotoEmpty)
|
2017-01-17 22:22:47 +03:00
|
|
|
from telethon.utils import (find_user_or_chat, get_input_peer,
|
|
|
|
get_appropiate_part_size, get_extension)
|
2016-09-04 12:07:18 +03:00
|
|
|
|
|
|
|
|
2016-09-04 13:42:11 +03:00
|
|
|
class TelegramClient:
|
2016-09-07 12:36:34 +03:00
|
|
|
|
2016-09-18 12:59:12 +03:00
|
|
|
# Current TelegramClient version
|
2017-05-19 13:46:17 +03:00
|
|
|
__version__ = '0.9'
|
2016-09-18 12:59:12 +03:00
|
|
|
|
2017-05-08 17:01:53 +03:00
|
|
|
# region Initialization
|
2016-09-07 12:36:34 +03:00
|
|
|
|
2017-03-20 19:16:34 +03:00
|
|
|
def __init__(self, session, api_id, api_hash, proxy=None):
|
2016-11-30 17:36:59 +03:00
|
|
|
"""Initializes the Telegram client with the specified API ID and Hash.
|
|
|
|
|
|
|
|
Session can either be a `str` object (the filename for the loaded/saved .session)
|
|
|
|
or it can be a `Session` instance (in which case list_sessions() would probably not work).
|
|
|
|
If you don't want any file to be saved, pass `None`
|
|
|
|
|
|
|
|
In the later case, you are free to override the `Session` class to provide different
|
|
|
|
.save() and .load() implementations to suit your needs."""
|
|
|
|
|
2016-09-04 12:07:18 +03:00
|
|
|
if api_id is None or api_hash is None:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise PermissionError(
|
|
|
|
'Your API ID or Hash are invalid. Please read "Requirements" on README.rst')
|
2016-09-04 12:07:18 +03:00
|
|
|
|
|
|
|
self.api_id = api_id
|
|
|
|
self.api_hash = api_hash
|
|
|
|
|
2016-11-30 17:36:59 +03:00
|
|
|
# Determine what session object we have
|
2017-05-11 15:08:38 +03:00
|
|
|
if isinstance(session, str) or session is None:
|
2016-11-30 17:36:59 +03:00
|
|
|
self.session = Session.try_load_or_create_new(session)
|
|
|
|
elif isinstance(session, Session):
|
|
|
|
self.session = session
|
|
|
|
else:
|
2016-11-30 17:56:30 +03:00
|
|
|
raise ValueError(
|
|
|
|
'The given session must either be a string or a Session instance.')
|
2016-11-30 17:36:59 +03:00
|
|
|
|
2017-04-06 19:16:15 +03:00
|
|
|
self.transport = None
|
|
|
|
self.proxy = proxy # Will be used when a TcpTransport is created
|
2016-09-04 13:42:11 +03:00
|
|
|
|
|
|
|
# These will be set later
|
2016-09-04 12:07:18 +03:00
|
|
|
self.dc_options = None
|
2016-09-04 13:42:11 +03:00
|
|
|
self.sender = None
|
2016-09-07 12:36:34 +03:00
|
|
|
self.phone_code_hashes = {}
|
|
|
|
|
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Connecting
|
2016-09-04 12:07:18 +03:00
|
|
|
|
|
|
|
def connect(self, reconnect=False):
|
2016-09-07 12:36:34 +03:00
|
|
|
"""Connects to the Telegram servers, executing authentication if required.
|
|
|
|
Note that authenticating to the Telegram servers is not the same as authenticating
|
|
|
|
the app, which requires to send a code first."""
|
2017-04-06 19:16:15 +03:00
|
|
|
if self.transport is None:
|
|
|
|
self.transport = TcpTransport(self.session.server_address,
|
|
|
|
self.session.port, proxy=self.proxy)
|
|
|
|
|
2016-09-08 17:11:37 +03:00
|
|
|
try:
|
2017-03-10 14:14:51 +03:00
|
|
|
if not self.session.auth_key or (reconnect and self.sender is not None):
|
2016-09-08 17:11:37 +03:00
|
|
|
self.session.auth_key, self.session.time_offset = \
|
2016-09-17 21:42:34 +03:00
|
|
|
authenticator.do_authentication(self.transport)
|
2016-09-08 17:11:37 +03:00
|
|
|
|
|
|
|
self.session.save()
|
|
|
|
|
|
|
|
self.sender = MtProtoSender(self.transport, self.session)
|
2017-05-20 12:33:37 +03:00
|
|
|
self.sender.connect()
|
2016-09-08 17:11:37 +03:00
|
|
|
|
|
|
|
# Now it's time to send an InitConnectionRequest
|
|
|
|
# This must always be invoked with the layer we'll be using
|
2016-11-30 00:29:42 +03:00
|
|
|
query = InitConnectionRequest(
|
|
|
|
api_id=self.api_id,
|
|
|
|
device_model=platform.node(),
|
|
|
|
system_version=platform.system(),
|
|
|
|
app_version=self.__version__,
|
|
|
|
lang_code='en',
|
|
|
|
query=GetConfigRequest())
|
|
|
|
|
|
|
|
result = self.invoke(
|
|
|
|
InvokeWithLayerRequest(
|
|
|
|
layer=layer, query=query))
|
2016-09-10 19:05:20 +03:00
|
|
|
|
|
|
|
# We're only interested in the DC options,
|
|
|
|
# although many other options are available!
|
|
|
|
self.dc_options = result.dc_options
|
2016-09-26 18:16:15 +03:00
|
|
|
|
2017-05-19 22:44:50 +03:00
|
|
|
# Once we know we're authorized, we can setup the ping thread
|
|
|
|
if self.is_user_authorized():
|
|
|
|
self.sender.setup_ping_thread()
|
|
|
|
|
2016-09-08 17:11:37 +03:00
|
|
|
return True
|
|
|
|
except RPCError as error:
|
|
|
|
print('Could not stabilise initial connection: {}'.format(error))
|
|
|
|
return False
|
2016-09-04 12:07:18 +03:00
|
|
|
|
|
|
|
def reconnect_to_dc(self, dc_id):
|
2016-09-07 12:36:34 +03:00
|
|
|
"""Reconnects to the specified DC ID. This is automatically called after an InvalidDCError is raised"""
|
2016-09-04 12:07:18 +03:00
|
|
|
if self.dc_options is None or not self.dc_options:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise ConnectionError(
|
|
|
|
"Can't reconnect. Stabilise an initial connection first.")
|
2016-09-04 12:07:18 +03:00
|
|
|
|
|
|
|
dc = next(dc for dc in self.dc_options if dc.id == dc_id)
|
|
|
|
|
2016-09-06 19:54:49 +03:00
|
|
|
self.transport.close()
|
2017-04-06 19:16:15 +03:00
|
|
|
self.transport = None
|
2016-09-04 12:07:18 +03:00
|
|
|
self.session.server_address = dc.ip_address
|
|
|
|
self.session.port = dc.port
|
2016-09-06 19:54:49 +03:00
|
|
|
self.session.save()
|
2016-09-04 12:07:18 +03:00
|
|
|
|
|
|
|
self.connect(reconnect=True)
|
|
|
|
|
2016-09-09 12:47:37 +03:00
|
|
|
def disconnect(self):
|
|
|
|
"""Disconnects from the Telegram server **and pauses all the spawned threads**"""
|
|
|
|
if self.sender:
|
|
|
|
self.sender.disconnect()
|
2017-03-10 14:14:51 +03:00
|
|
|
self.sender = None
|
2017-04-06 19:16:15 +03:00
|
|
|
if self.transport:
|
|
|
|
self.transport.close()
|
|
|
|
self.transport = None
|
2016-09-09 12:47:37 +03:00
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# endregion
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# region Telegram requests functions
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2017-04-11 10:46:26 +03:00
|
|
|
def invoke(self, request, timeout=timedelta(seconds=5), throw_invalid_dc=False):
|
2016-10-03 10:53:41 +03:00
|
|
|
"""Invokes a MTProtoRequest (sends and receives it) and returns its result.
|
|
|
|
An optional timeout can be given to cancel the operation after the time delta.
|
2017-03-20 14:23:53 +03:00
|
|
|
Timeout can be set to None for no timeout.
|
|
|
|
|
|
|
|
If throw_invalid_dc is True, these errors won't be caught (useful to
|
|
|
|
avoid infinite recursion). This should not be set to True manually."""
|
2016-09-11 17:24:03 +03:00
|
|
|
if not issubclass(type(request), MTProtoRequest):
|
|
|
|
raise ValueError('You can only invoke MtProtoRequests')
|
|
|
|
|
2017-04-11 11:25:09 +03:00
|
|
|
if not self.sender:
|
|
|
|
raise ValueError('You must be connected to invoke requests!')
|
|
|
|
|
2017-03-20 14:23:53 +03:00
|
|
|
try:
|
|
|
|
self.sender.send(request)
|
|
|
|
self.sender.receive(request, timeout)
|
|
|
|
|
2017-04-11 10:46:26 +03:00
|
|
|
return request.result
|
2016-09-11 17:24:03 +03:00
|
|
|
|
2017-03-20 14:23:53 +03:00
|
|
|
except InvalidDCError as error:
|
|
|
|
if throw_invalid_dc:
|
2017-05-19 08:48:01 +03:00
|
|
|
raise
|
2017-03-20 14:23:53 +03:00
|
|
|
|
|
|
|
self.reconnect_to_dc(error.new_dc)
|
|
|
|
return self.invoke(request, timeout=timeout, throw_invalid_dc=True)
|
2016-09-11 17:24:03 +03:00
|
|
|
|
|
|
|
# region Authorization requests
|
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
def is_user_authorized(self):
|
|
|
|
"""Has the user been authorized yet (code request sent and confirmed)?
|
|
|
|
Note that this will NOT yield the correct result if the session was revoked by another client!"""
|
2017-05-11 15:11:15 +03:00
|
|
|
if self.session and self.session.user is not None:
|
|
|
|
return True
|
|
|
|
return False
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2016-09-04 13:42:11 +03:00
|
|
|
def send_code_request(self, phone_number):
|
2016-09-07 12:36:34 +03:00
|
|
|
"""Sends a code request to the specified phone number"""
|
2017-03-20 14:23:53 +03:00
|
|
|
result = self.invoke(SendCodeRequest(phone_number, self.api_id, self.api_hash))
|
|
|
|
self.phone_code_hashes[phone_number] = result.phone_code_hash
|
2016-09-05 19:35:12 +03:00
|
|
|
|
2017-03-20 14:31:13 +03:00
|
|
|
def sign_in(self, phone_number=None, code=None, password=None, bot_token=None):
|
2016-11-26 14:04:02 +03:00
|
|
|
"""Completes the authorization of a phone number by providing the received code.
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2016-11-26 14:04:02 +03:00
|
|
|
If no phone or code is provided, then the sole password will be used. The password
|
|
|
|
should be used after a normal authorization attempt has happened and an RPCError
|
2017-03-20 14:31:13 +03:00
|
|
|
with `.password_required = True` was raised.
|
|
|
|
|
|
|
|
To login as a bot, only `bot_token` should be provided. This should equal to the
|
|
|
|
bot access hash provided by https://t.me/BotFather during your bot creation."""
|
2016-11-26 14:04:02 +03:00
|
|
|
if phone_number and code:
|
|
|
|
if phone_number not in self.phone_code_hashes:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise ValueError(
|
|
|
|
'Please make sure you have called send_code_request first.')
|
2016-09-16 14:35:14 +03:00
|
|
|
|
2016-11-26 14:04:02 +03:00
|
|
|
try:
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
SignInRequest(phone_number, self.phone_code_hashes[
|
|
|
|
phone_number], code))
|
2016-11-26 14:04:02 +03:00
|
|
|
|
|
|
|
except RPCError as error:
|
|
|
|
if error.message.startswith('PHONE_CODE_'):
|
|
|
|
print(error)
|
|
|
|
return False
|
|
|
|
else:
|
2017-05-19 08:48:01 +03:00
|
|
|
raise
|
2016-11-26 14:04:02 +03:00
|
|
|
elif password:
|
|
|
|
salt = self.invoke(GetPasswordRequest()).current_salt
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
CheckPasswordRequest(utils.get_password_hash(password, salt)))
|
2017-03-20 14:31:13 +03:00
|
|
|
elif bot_token:
|
|
|
|
result = self.invoke(
|
|
|
|
ImportBotAuthorizationRequest(flags=0,
|
|
|
|
api_id=self.api_id,
|
|
|
|
api_hash=self.api_hash,
|
|
|
|
bot_auth_token=bot_token))
|
2016-11-26 14:04:02 +03:00
|
|
|
else:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise ValueError(
|
|
|
|
'You must provide a phone_number and a code for the first time, '
|
|
|
|
'and a password only if an RPCError was raised before.')
|
2016-09-04 12:07:18 +03:00
|
|
|
|
|
|
|
# Result is an Auth.Authorization TLObject
|
2016-09-16 14:35:14 +03:00
|
|
|
self.session.user = result.user
|
2016-09-04 12:07:18 +03:00
|
|
|
self.session.save()
|
|
|
|
|
2017-05-19 22:44:50 +03:00
|
|
|
# If we want the connection to stay alive for a long time, we need
|
|
|
|
# to start the pings thread once we're already authorized and not
|
|
|
|
# before to avoid the updates thread trying to read anything while
|
|
|
|
# we haven't yet connected.
|
|
|
|
self.sender.setup_ping_thread()
|
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
return True
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2016-09-16 14:35:14 +03:00
|
|
|
def sign_up(self, phone_number, code, first_name, last_name=''):
|
|
|
|
"""Signs up to Telegram. Make sure you sent a code request first!"""
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
SignUpRequest(
|
|
|
|
phone_number=phone_number,
|
|
|
|
phone_code_hash=self.phone_code_hashes[phone_number],
|
|
|
|
phone_code=code,
|
|
|
|
first_name=first_name,
|
|
|
|
last_name=last_name))
|
2016-09-16 14:35:14 +03:00
|
|
|
|
|
|
|
self.session.user = result.user
|
|
|
|
self.session.save()
|
|
|
|
|
|
|
|
def log_out(self):
|
|
|
|
"""Logs out and deletes the current session. Returns True if everything went OK"""
|
2017-04-14 16:28:15 +03:00
|
|
|
# Special flag when logging out (so the ack request confirms it)
|
|
|
|
self.sender.logging_out = True
|
2016-09-16 14:35:14 +03:00
|
|
|
try:
|
2017-04-11 10:46:26 +03:00
|
|
|
self.invoke(LogOutRequest())
|
2017-04-14 16:28:15 +03:00
|
|
|
self.disconnect()
|
2016-09-16 14:35:14 +03:00
|
|
|
if not self.session.delete():
|
|
|
|
return False
|
|
|
|
|
|
|
|
self.session = None
|
2017-05-11 08:46:27 +03:00
|
|
|
return True
|
2016-09-16 14:35:14 +03:00
|
|
|
except:
|
2017-04-11 10:52:44 +03:00
|
|
|
# Something happened when logging out, restore the state back
|
2017-04-14 16:28:15 +03:00
|
|
|
self.sender.logging_out = False
|
2016-09-16 14:35:14 +03:00
|
|
|
return False
|
|
|
|
|
2016-09-26 14:13:11 +03:00
|
|
|
@staticmethod
|
|
|
|
def list_sessions():
|
|
|
|
"""Lists all the sessions of the users who have ever connected
|
|
|
|
using this client and never logged out"""
|
2016-11-30 00:29:42 +03:00
|
|
|
return [path.splitext(path.basename(f))[
|
|
|
|
0] # splitext = split ext (not spli text!)
|
2016-09-26 14:13:11 +03:00
|
|
|
for f in listdir('.') if f.endswith('.session')]
|
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Dialogs ("chats") requests
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def get_dialogs(self,
|
2017-05-05 16:11:48 +03:00
|
|
|
limit=10,
|
2016-11-30 00:29:42 +03:00
|
|
|
offset_date=None,
|
|
|
|
offset_id=0,
|
|
|
|
offset_peer=InputPeerEmpty()):
|
2017-05-05 16:11:48 +03:00
|
|
|
"""Returns a tuple of lists ([dialogs], [entities]) with at least 'limit' items each.
|
|
|
|
If `limit` is 0, all dialogs will be retrieved.
|
2016-10-03 20:44:01 +03:00
|
|
|
The `entity` represents the user, chat or channel corresponding to that dialog"""
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
r = self.invoke(
|
|
|
|
GetDialogsRequest(
|
|
|
|
offset_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
|
|
|
offset_peer=offset_peer,
|
2017-05-05 16:11:48 +03:00
|
|
|
limit=limit))
|
2016-11-30 00:29:42 +03:00
|
|
|
return (
|
|
|
|
r.dialogs,
|
|
|
|
[find_user_or_chat(d.peer, r.users, r.chats) for d in r.dialogs])
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Message requests
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def send_message(self,
|
2017-01-17 22:22:47 +03:00
|
|
|
entity,
|
2016-11-30 00:29:42 +03:00
|
|
|
message,
|
|
|
|
markdown=False,
|
|
|
|
no_web_page=False):
|
2017-01-17 22:22:47 +03:00
|
|
|
"""Sends a message to the given entity (or input peer) and returns the sent message ID"""
|
2016-09-07 20:01:00 +03:00
|
|
|
if markdown:
|
|
|
|
msg, entities = parse_message_entities(message)
|
|
|
|
else:
|
|
|
|
msg, entities = message, []
|
|
|
|
|
2016-10-03 20:44:01 +03:00
|
|
|
msg_id = utils.generate_random_long()
|
2016-11-30 00:29:42 +03:00
|
|
|
self.invoke(
|
|
|
|
SendMessageRequest(
|
2017-01-17 22:22:47 +03:00
|
|
|
peer=get_input_peer(entity),
|
2016-11-30 00:29:42 +03:00
|
|
|
message=msg,
|
|
|
|
random_id=msg_id,
|
|
|
|
entities=entities,
|
|
|
|
no_webpage=no_web_page))
|
2016-10-03 20:44:01 +03:00
|
|
|
return msg_id
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def get_message_history(self,
|
2017-01-17 22:22:47 +03:00
|
|
|
entity,
|
2016-11-30 00:29:42 +03:00
|
|
|
limit=20,
|
|
|
|
offset_date=None,
|
|
|
|
offset_id=0,
|
|
|
|
max_id=0,
|
|
|
|
min_id=0,
|
|
|
|
add_offset=0):
|
2016-09-08 13:13:31 +03:00
|
|
|
"""
|
2017-01-17 22:22:47 +03:00
|
|
|
Gets the message history for the specified entity
|
2016-09-08 13:13:31 +03:00
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
:param entity: The entity (or input peer) from whom to retrieve the message history
|
2016-09-08 13:13:31 +03:00
|
|
|
:param limit: Number of messages to be retrieved
|
|
|
|
:param offset_date: Offset date (messages *previous* to this date will be retrieved)
|
|
|
|
:param offset_id: Offset message ID (only messages *previous* to the given ID will be retrieved)
|
|
|
|
:param max_id: All the messages with a higher (newer) ID or equal to this will be excluded
|
|
|
|
:param min_id: All the messages with a lower (older) ID or equal to this will be excluded
|
|
|
|
:param add_offset: Additional message offset (all of the specified offsets + this offset = older messages)
|
|
|
|
|
|
|
|
:return: A tuple containing total message count and two more lists ([messages], [senders]).
|
|
|
|
Note that the sender can be null if it was not found!
|
|
|
|
"""
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
GetHistoryRequest(
|
2017-01-17 22:22:47 +03:00
|
|
|
get_input_peer(entity),
|
2016-11-30 00:29:42 +03:00
|
|
|
limit=limit,
|
|
|
|
offset_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
|
|
|
max_id=max_id,
|
|
|
|
min_id=min_id,
|
|
|
|
add_offset=add_offset))
|
2016-09-08 13:13:31 +03:00
|
|
|
|
|
|
|
# The result may be a messages slice (not all messages were retrieved) or
|
|
|
|
# simply a messages TLObject. In the later case, no "count" attribute is specified:
|
|
|
|
# the total messages count is retrieved by counting all the retrieved messages
|
|
|
|
total_messages = getattr(result, 'count', len(result.messages))
|
2016-09-11 11:35:02 +03:00
|
|
|
|
2016-09-12 15:07:45 +03:00
|
|
|
# Iterate over all the messages and find the sender User
|
|
|
|
users = []
|
|
|
|
for msg in result.messages:
|
|
|
|
for usr in result.users:
|
|
|
|
if msg.from_id == usr.id:
|
|
|
|
users.append(usr)
|
|
|
|
break
|
|
|
|
|
|
|
|
return total_messages, result.messages, users
|
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
def send_read_acknowledge(self, entity, messages=None, max_id=None):
|
2016-10-02 14:57:03 +03:00
|
|
|
"""Sends a "read acknowledge" (i.e., notifying the given peer that we've
|
|
|
|
read their messages, also known as the "double check ✅✅").
|
|
|
|
|
|
|
|
Either a list of messages (or a single message) can be given,
|
|
|
|
or the maximum message ID (until which message we want to send the read acknowledge).
|
|
|
|
|
|
|
|
Returns an AffectedMessages TLObject"""
|
|
|
|
if max_id is None:
|
|
|
|
if not messages:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise InvalidParameterError(
|
|
|
|
'Either a message list or a max_id must be provided.')
|
2016-10-02 14:57:03 +03:00
|
|
|
|
|
|
|
if isinstance(messages, list):
|
|
|
|
max_id = max(msg.id for msg in messages)
|
|
|
|
else:
|
|
|
|
max_id = messages.id
|
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
return self.invoke(ReadHistoryRequest(peer=get_input_peer(entity), max_id=max_id))
|
2016-10-02 14:57:03 +03:00
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# TODO Handle media downloading/uploading in a different session?
|
2016-09-11 14:10:27 +03:00
|
|
|
# "It is recommended that large queries (upload.getFile, upload.saveFilePart)
|
|
|
|
# be handled through a separate session and a separate connection"
|
2016-09-12 20:32:16 +03:00
|
|
|
# region Uploading media requests
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def upload_file(self,
|
|
|
|
file_path,
|
|
|
|
part_size_kb=None,
|
|
|
|
file_name=None,
|
|
|
|
progress_callback=None):
|
2016-09-17 18:04:30 +03:00
|
|
|
"""Uploads the specified file_path and returns a handle which can be later used
|
|
|
|
|
|
|
|
:param file_path: The file path of the file that will be uploaded
|
|
|
|
:param part_size_kb: The part size when uploading the file. None = Automatic
|
|
|
|
:param file_name: The name of the uploaded file. None = Automatic
|
|
|
|
:param progress_callback: A callback function which takes two parameters,
|
|
|
|
uploaded size (in bytes) and total file size (in bytes)
|
|
|
|
This is called every time a part is uploaded
|
|
|
|
"""
|
|
|
|
file_size = path.getsize(file_path)
|
|
|
|
if not part_size_kb:
|
2016-10-09 13:57:38 +03:00
|
|
|
part_size_kb = get_appropiate_part_size(file_size)
|
2016-09-16 17:37:45 +03:00
|
|
|
|
|
|
|
if part_size_kb > 512:
|
|
|
|
raise ValueError('The part size must be less or equal to 512KB')
|
2016-09-11 14:10:27 +03:00
|
|
|
|
|
|
|
part_size = int(part_size_kb * 1024)
|
|
|
|
if part_size % 1024 != 0:
|
|
|
|
raise ValueError('The part size must be evenly divisible by 1024')
|
|
|
|
|
2016-09-16 17:37:45 +03:00
|
|
|
# Determine whether the file is too big (over 10MB) or not
|
|
|
|
# Telegram does make a distinction between smaller or larger files
|
|
|
|
is_large = file_size > 10 * 1024 * 1024
|
|
|
|
part_count = (file_size + part_size - 1) // part_size
|
|
|
|
|
2016-09-11 14:10:27 +03:00
|
|
|
# Multiply the datetime timestamp by 10^6 to get the ticks
|
|
|
|
# This is high likely going to be unique
|
2017-03-25 19:14:11 +03:00
|
|
|
file_id = utils.generate_random_long()
|
2016-09-11 14:10:27 +03:00
|
|
|
hash_md5 = md5()
|
|
|
|
|
|
|
|
with open(file_path, 'rb') as file:
|
2016-09-16 17:37:45 +03:00
|
|
|
for part_index in range(part_count):
|
2016-09-11 14:10:27 +03:00
|
|
|
# Read the file by in chunks of size part_size
|
|
|
|
part = file.read(part_size)
|
|
|
|
|
2016-09-16 17:37:45 +03:00
|
|
|
# The SavePartRequest is different depending on whether
|
|
|
|
# the file is too large or not (over or less than 10MB)
|
|
|
|
if is_large:
|
2016-11-30 00:29:42 +03:00
|
|
|
request = SaveBigFilePartRequest(file_id, part_index,
|
|
|
|
part_count, part)
|
2016-09-16 17:37:45 +03:00
|
|
|
else:
|
|
|
|
request = SaveFilePartRequest(file_id, part_index, part)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2016-09-11 14:10:27 +03:00
|
|
|
# Invoke the file upload and increment both the part index and MD5 checksum
|
2016-09-16 17:37:45 +03:00
|
|
|
result = self.invoke(request)
|
2016-09-11 14:10:27 +03:00
|
|
|
if result:
|
2017-03-25 19:14:11 +03:00
|
|
|
if not is_large:
|
|
|
|
# No need to update the hash if it's a large file
|
|
|
|
hash_md5.update(part)
|
|
|
|
|
2016-09-17 18:04:30 +03:00
|
|
|
if progress_callback:
|
|
|
|
progress_callback(file.tell(), file_size)
|
2016-09-11 14:10:27 +03:00
|
|
|
else:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise ValueError('Could not upload file part #{}'.format(
|
|
|
|
part_index))
|
2016-09-11 14:10:27 +03:00
|
|
|
|
|
|
|
# Set a default file name if None was specified
|
|
|
|
if not file_name:
|
|
|
|
file_name = path.basename(file_path)
|
|
|
|
|
|
|
|
# After the file has been uploaded, we can return a handle pointing to it
|
2017-03-25 19:14:11 +03:00
|
|
|
if is_large:
|
|
|
|
return InputFileBig(
|
|
|
|
id=file_id,
|
|
|
|
parts=part_count,
|
|
|
|
name=file_name)
|
|
|
|
else:
|
|
|
|
return InputFile(
|
|
|
|
id=file_id,
|
|
|
|
parts=part_count,
|
|
|
|
name=file_name,
|
|
|
|
md5_checksum=hash_md5.hexdigest())
|
2016-09-11 14:10:27 +03:00
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
def send_photo_file(self, input_file, entity, caption=''):
|
2016-09-11 14:10:27 +03:00
|
|
|
"""Sends a previously uploaded input_file
|
2017-01-17 22:22:47 +03:00
|
|
|
(which should be a photo) to the given entity (or input peer)"""
|
2016-09-11 14:10:27 +03:00
|
|
|
self.send_media_file(
|
2017-01-17 22:22:47 +03:00
|
|
|
InputMediaUploadedPhoto(input_file, caption), entity)
|
2016-09-11 14:10:27 +03:00
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
def send_document_file(self, input_file, entity, caption=''):
|
2016-09-12 20:32:16 +03:00
|
|
|
"""Sends a previously uploaded input_file
|
2017-01-17 22:22:47 +03:00
|
|
|
(which should be a document) to the given entity (or input peer)"""
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
# Determine mime-type and attributes
|
|
|
|
# Take the first element by using [0] since it returns a tuple
|
|
|
|
mime_type = guess_type(input_file.name)[0]
|
|
|
|
attributes = [
|
|
|
|
DocumentAttributeFilename(input_file.name)
|
|
|
|
# TODO If the input file is an audio, find out:
|
|
|
|
# Performer and song title and add DocumentAttributeAudio
|
|
|
|
]
|
2016-09-16 17:37:45 +03:00
|
|
|
# Ensure we have a mime type, any; but it cannot be None
|
|
|
|
# «The "octet-stream" subtype is used to indicate that a body contains arbitrary binary data.»
|
|
|
|
if not mime_type:
|
|
|
|
mime_type = 'application/octet-stream'
|
2016-11-30 00:29:42 +03:00
|
|
|
self.send_media_file(
|
|
|
|
InputMediaUploadedDocument(
|
|
|
|
file=input_file,
|
|
|
|
mime_type=mime_type,
|
|
|
|
attributes=attributes,
|
|
|
|
caption=caption),
|
2017-01-17 22:22:47 +03:00
|
|
|
entity)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2017-01-17 22:22:47 +03:00
|
|
|
def send_media_file(self, input_media, entity):
|
|
|
|
"""Sends any input_media (contact, document, photo...) to the given entiy"""
|
2016-11-30 00:29:42 +03:00
|
|
|
self.invoke(
|
|
|
|
SendMediaRequest(
|
2017-01-17 22:22:47 +03:00
|
|
|
peer=get_input_peer(entity),
|
2016-11-30 00:29:42 +03:00
|
|
|
media=input_media,
|
|
|
|
random_id=utils.generate_random_long()))
|
2016-09-11 14:10:27 +03:00
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Downloading media requests
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_profile_photo(self,
|
|
|
|
profile_photo,
|
|
|
|
file_path,
|
|
|
|
add_extension=True,
|
|
|
|
download_big=True):
|
2016-10-03 20:44:01 +03:00
|
|
|
"""Downloads the profile photo for an user or a chat (including channels).
|
|
|
|
Returns False if no photo was providen, or if it was Empty"""
|
|
|
|
|
|
|
|
if (not profile_photo or
|
2016-11-30 00:29:42 +03:00
|
|
|
isinstance(profile_photo, UserProfilePhotoEmpty) or
|
2016-10-03 20:44:01 +03:00
|
|
|
isinstance(profile_photo, ChatPhotoEmpty)):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if add_extension:
|
2016-10-09 13:57:38 +03:00
|
|
|
file_path += get_extension(profile_photo)
|
2016-10-03 20:44:01 +03:00
|
|
|
|
|
|
|
if download_big:
|
|
|
|
photo_location = profile_photo.photo_big
|
|
|
|
else:
|
|
|
|
photo_location = profile_photo.photo_small
|
|
|
|
|
|
|
|
# Download the media with the largest size input file location
|
2016-11-30 00:29:42 +03:00
|
|
|
self.download_file_loc(
|
|
|
|
InputFileLocation(
|
|
|
|
volume_id=photo_location.volume_id,
|
|
|
|
local_id=photo_location.local_id,
|
|
|
|
secret=photo_location.secret),
|
|
|
|
file_path)
|
2016-10-03 20:44:01 +03:00
|
|
|
return True
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_msg_media(self,
|
|
|
|
message_media,
|
|
|
|
file_path,
|
|
|
|
add_extension=True,
|
|
|
|
progress_callback=None):
|
2016-09-12 20:32:16 +03:00
|
|
|
"""Downloads the given MessageMedia (Photo, Document or Contact)
|
2016-09-17 18:04:30 +03:00
|
|
|
into the desired file_path, optionally finding its extension automatically
|
|
|
|
The progress_callback should be a callback function which takes two parameters,
|
|
|
|
uploaded size (in bytes) and total file size (in bytes).
|
|
|
|
This will be called every time a part is downloaded"""
|
2016-09-12 20:32:16 +03:00
|
|
|
if type(message_media) == MessageMediaPhoto:
|
2016-11-30 00:29:42 +03:00
|
|
|
return self.download_photo(message_media, file_path, add_extension,
|
|
|
|
progress_callback)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
elif type(message_media) == MessageMediaDocument:
|
2016-11-30 00:29:42 +03:00
|
|
|
return self.download_document(message_media, file_path,
|
|
|
|
add_extension, progress_callback)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
elif type(message_media) == MessageMediaContact:
|
2016-11-30 00:29:42 +03:00
|
|
|
return self.download_contact(message_media, file_path,
|
|
|
|
add_extension)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_photo(self,
|
|
|
|
message_media_photo,
|
|
|
|
file_path,
|
|
|
|
add_extension=False,
|
2016-09-17 18:04:30 +03:00
|
|
|
progress_callback=None):
|
2016-09-12 20:32:16 +03:00
|
|
|
"""Downloads MessageMediaPhoto's largest size into the desired
|
2016-09-17 18:04:30 +03:00
|
|
|
file_path, optionally finding its extension automatically
|
|
|
|
The progress_callback should be a callback function which takes two parameters,
|
|
|
|
uploaded size (in bytes) and total file size (in bytes).
|
|
|
|
This will be called every time a part is downloaded"""
|
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# Determine the photo and its largest size
|
|
|
|
photo = message_media_photo.photo
|
2016-09-17 18:04:30 +03:00
|
|
|
largest_size = photo.sizes[-1]
|
|
|
|
file_size = largest_size.size
|
|
|
|
largest_size = largest_size.location
|
2016-09-10 19:05:20 +03:00
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
if add_extension:
|
2016-10-09 13:57:38 +03:00
|
|
|
file_path += get_extension(message_media_photo)
|
2016-09-10 19:05:20 +03:00
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
# Download the media with the largest size input file location
|
2016-11-30 00:29:42 +03:00
|
|
|
self.download_file_loc(
|
|
|
|
InputFileLocation(
|
|
|
|
volume_id=largest_size.volume_id,
|
|
|
|
local_id=largest_size.local_id,
|
|
|
|
secret=largest_size.secret),
|
|
|
|
file_path,
|
|
|
|
file_size=file_size,
|
|
|
|
progress_callback=progress_callback)
|
2016-09-12 20:32:16 +03:00
|
|
|
return file_path
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_document(self,
|
|
|
|
message_media_document,
|
|
|
|
file_path=None,
|
|
|
|
add_extension=True,
|
2016-09-17 18:04:30 +03:00
|
|
|
progress_callback=None):
|
2016-09-12 20:32:16 +03:00
|
|
|
"""Downloads the given MessageMediaDocument into the desired
|
|
|
|
file_path, optionally finding its extension automatically.
|
2016-09-17 18:04:30 +03:00
|
|
|
If no file_path is given, it will try to be guessed from the document
|
|
|
|
The progress_callback should be a callback function which takes two parameters,
|
|
|
|
uploaded size (in bytes) and total file size (in bytes).
|
|
|
|
This will be called every time a part is downloaded"""
|
2016-09-12 20:32:16 +03:00
|
|
|
document = message_media_document.document
|
2016-09-17 18:04:30 +03:00
|
|
|
file_size = document.size
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
# If no file path was given, try to guess it from the attributes
|
|
|
|
if file_path is None:
|
|
|
|
for attr in document.attributes:
|
|
|
|
if type(attr) == DocumentAttributeFilename:
|
|
|
|
file_path = attr.file_name
|
|
|
|
break # This attribute has higher preference
|
|
|
|
|
|
|
|
elif type(attr) == DocumentAttributeAudio:
|
|
|
|
file_path = '{} - {}'.format(attr.performer, attr.title)
|
|
|
|
|
|
|
|
if file_path is None:
|
|
|
|
print('Could not determine a filename for the document')
|
|
|
|
|
|
|
|
if add_extension:
|
2017-01-19 18:16:39 +03:00
|
|
|
file_path += get_extension(message_media_document)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
self.download_file_loc(
|
|
|
|
InputDocumentFileLocation(
|
|
|
|
id=document.id,
|
|
|
|
access_hash=document.access_hash,
|
|
|
|
version=document.version),
|
|
|
|
file_path,
|
|
|
|
file_size=file_size,
|
|
|
|
progress_callback=progress_callback)
|
2016-09-12 20:32:16 +03:00
|
|
|
return file_path
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def download_contact(message_media_contact, file_path, add_extension=True):
|
|
|
|
"""Downloads a media contact using the vCard 4.0 format"""
|
|
|
|
|
|
|
|
first_name = message_media_contact.first_name
|
|
|
|
last_name = message_media_contact.last_name
|
|
|
|
phone_number = message_media_contact.phone_number
|
|
|
|
|
|
|
|
# The only way we can save a contact in an understandable
|
|
|
|
# way by phones is by using the .vCard format
|
|
|
|
if add_extension:
|
|
|
|
file_path += '.vcard'
|
|
|
|
|
|
|
|
# Ensure that we'll be able to download the contact
|
|
|
|
utils.ensure_parent_dir_exists(file_path)
|
|
|
|
|
|
|
|
with open(file_path, 'w', encoding='utf-8') as file:
|
|
|
|
file.write('BEGIN:VCARD\n')
|
|
|
|
file.write('VERSION:4.0\n')
|
2016-11-30 00:29:42 +03:00
|
|
|
file.write('N:{};{};;;\n'.format(first_name, last_name
|
|
|
|
if last_name else ''))
|
2016-09-12 20:32:16 +03:00
|
|
|
file.write('FN:{}\n'.format(' '.join((first_name, last_name))))
|
2016-11-30 00:29:42 +03:00
|
|
|
file.write('TEL;TYPE=cell;VALUE=uri:tel:+{}\n'.format(
|
|
|
|
phone_number))
|
2016-09-12 20:32:16 +03:00
|
|
|
file.write('END:VCARD\n')
|
|
|
|
|
|
|
|
return file_path
|
|
|
|
|
2016-11-30 00:29:42 +03:00
|
|
|
def download_file_loc(self,
|
|
|
|
input_location,
|
|
|
|
file_path,
|
|
|
|
part_size_kb=64,
|
|
|
|
file_size=None,
|
|
|
|
progress_callback=None):
|
2016-09-17 18:04:30 +03:00
|
|
|
"""Downloads media from the given input_file_location to the specified file_path.
|
|
|
|
If a progress_callback function is given, it will be called taking two
|
|
|
|
arguments (downloaded bytes count and total file size)"""
|
|
|
|
|
|
|
|
if not part_size_kb:
|
|
|
|
if not file_size:
|
|
|
|
raise ValueError('A part size value must be provided')
|
|
|
|
else:
|
2016-10-09 13:57:38 +03:00
|
|
|
part_size_kb = get_appropiate_part_size(file_size)
|
2016-09-11 17:24:03 +03:00
|
|
|
|
|
|
|
part_size = int(part_size_kb * 1024)
|
|
|
|
if part_size % 1024 != 0:
|
|
|
|
raise ValueError('The part size must be evenly divisible by 1024')
|
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
# Ensure that we'll be able to download the media
|
|
|
|
utils.ensure_parent_dir_exists(file_path)
|
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# Start with an offset index of 0
|
|
|
|
offset_index = 0
|
|
|
|
with open(file_path, 'wb') as file:
|
|
|
|
while True:
|
|
|
|
# The current offset equals the offset_index multiplied by the part size
|
|
|
|
offset = offset_index * part_size
|
2016-11-30 00:29:42 +03:00
|
|
|
result = self.invoke(
|
|
|
|
GetFileRequest(input_location, offset, part_size))
|
2016-09-11 17:24:03 +03:00
|
|
|
offset_index += 1
|
|
|
|
|
|
|
|
# If we have received no data (0 bytes), the file is over
|
|
|
|
# So there is nothing left to download and write
|
|
|
|
if not result.bytes:
|
|
|
|
return result.type # Return some extra information
|
|
|
|
|
|
|
|
file.write(result.bytes)
|
2016-09-17 18:04:30 +03:00
|
|
|
if progress_callback:
|
|
|
|
progress_callback(file.tell(), file_size)
|
2016-09-11 17:24:03 +03:00
|
|
|
|
|
|
|
# endregion
|
2016-09-10 19:05:20 +03:00
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Updates handling
|
|
|
|
|
2016-09-11 12:50:38 +03:00
|
|
|
def add_update_handler(self, handler):
|
|
|
|
"""Adds an update handler (a function which takes a TLObject,
|
|
|
|
an update, as its parameter) and listens for updates"""
|
2017-05-11 14:16:46 +03:00
|
|
|
if not self.sender:
|
|
|
|
raise RuntimeError(
|
|
|
|
"You should connect at least once to add update handlers.")
|
2016-10-02 14:42:17 +03:00
|
|
|
|
2016-09-11 12:50:38 +03:00
|
|
|
self.sender.add_update_handler(handler)
|
2016-09-09 12:47:37 +03:00
|
|
|
|
2016-09-11 12:50:38 +03:00
|
|
|
def remove_update_handler(self, handler):
|
|
|
|
self.sender.remove_update_handler(handler)
|
2016-09-07 12:36:34 +03:00
|
|
|
|
2017-03-28 19:46:07 +03:00
|
|
|
def list_update_handlers(self):
|
|
|
|
return [ handler.__name__ for handler in self.sender.on_update_handlers ]
|
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# endregion
|