2018-02-18 14:17:02 +03:00
|
|
|
import getpass
|
2018-01-18 15:55:03 +03:00
|
|
|
import hashlib
|
2018-01-23 13:59:35 +03:00
|
|
|
import io
|
2017-12-24 18:18:09 +03:00
|
|
|
import itertools
|
2018-01-18 15:55:03 +03:00
|
|
|
import logging
|
2017-08-23 01:27:33 +03:00
|
|
|
import os
|
2018-02-10 12:26:58 +03:00
|
|
|
import re
|
2018-01-11 14:43:47 +03:00
|
|
|
import sys
|
2017-10-31 14:48:55 +03:00
|
|
|
import time
|
2018-02-18 15:29:05 +03:00
|
|
|
import warnings
|
2018-03-11 11:43:55 +03:00
|
|
|
from collections import UserList
|
2017-08-23 01:48:00 +03:00
|
|
|
from datetime import datetime, timedelta
|
2018-01-18 15:55:03 +03:00
|
|
|
from io import BytesIO
|
2018-01-18 22:08:05 +03:00
|
|
|
from mimetypes import guess_type
|
2018-01-18 15:55:03 +03:00
|
|
|
|
2018-01-18 22:08:05 +03:00
|
|
|
from .crypto import CdnDecrypter
|
2018-04-06 20:21:02 +03:00
|
|
|
from .tl import TLObject
|
2018-01-18 22:08:05 +03:00
|
|
|
from .tl.custom import InputSizedFile
|
2018-05-07 19:02:15 +03:00
|
|
|
from .tl.functions.updates import GetDifferenceRequest
|
2018-01-18 22:08:05 +03:00
|
|
|
from .tl.functions.upload import (
|
|
|
|
SaveBigFilePartRequest, SaveFilePartRequest, GetFileRequest
|
2018-01-18 15:55:03 +03:00
|
|
|
)
|
2018-05-07 19:02:15 +03:00
|
|
|
from .tl.types.updates import (
|
|
|
|
DifferenceSlice, DifferenceEmpty, Difference, DifferenceTooLong
|
|
|
|
)
|
2018-01-18 22:08:05 +03:00
|
|
|
from .tl.types.upload import FileCdnRedirect
|
2018-01-18 15:55:03 +03:00
|
|
|
|
2017-09-20 14:22:56 +03:00
|
|
|
try:
|
|
|
|
import socks
|
|
|
|
except ImportError:
|
|
|
|
socks = None
|
2017-06-08 14:12:57 +03:00
|
|
|
|
2018-02-17 15:00:58 +03:00
|
|
|
try:
|
|
|
|
import hachoir
|
|
|
|
import hachoir.metadata
|
|
|
|
import hachoir.parser
|
|
|
|
except ImportError:
|
|
|
|
hachoir = None
|
|
|
|
|
2017-06-08 14:12:57 +03:00
|
|
|
from . import TelegramBareClient
|
2018-02-18 15:29:05 +03:00
|
|
|
from . import helpers, utils, events
|
2017-09-04 18:18:33 +03:00
|
|
|
from .errors import (
|
2017-12-28 02:22:28 +03:00
|
|
|
RPCError, UnauthorizedError, PhoneCodeEmptyError, PhoneCodeExpiredError,
|
2018-01-11 14:43:47 +03:00
|
|
|
PhoneCodeHashEmptyError, PhoneCodeInvalidError, LocationInvalidError,
|
2018-02-01 12:12:46 +03:00
|
|
|
SessionPasswordNeededError, FileMigrateError, PhoneNumberUnoccupiedError,
|
2018-04-06 20:21:02 +03:00
|
|
|
PhoneNumberOccupiedError, UsernameNotOccupiedError
|
2018-01-18 15:55:03 +03:00
|
|
|
)
|
2018-05-10 15:22:19 +03:00
|
|
|
from .network import ConnectionTcpFull
|
2017-12-24 18:18:09 +03:00
|
|
|
from .tl.custom import Draft, Dialog
|
2017-09-04 18:18:33 +03:00
|
|
|
from .tl.functions.account import (
|
2018-03-27 18:35:33 +03:00
|
|
|
GetPasswordRequest, UpdatePasswordSettingsRequest
|
2017-09-04 18:18:33 +03:00
|
|
|
)
|
|
|
|
from .tl.functions.auth import (
|
|
|
|
CheckPasswordRequest, LogOutRequest, SendCodeRequest, SignInRequest,
|
2017-11-04 22:46:02 +03:00
|
|
|
SignUpRequest, ResendCodeRequest, ImportBotAuthorizationRequest
|
2017-09-04 18:18:33 +03:00
|
|
|
)
|
|
|
|
from .tl.functions.contacts import (
|
|
|
|
GetContactsRequest, ResolveUsernameRequest
|
|
|
|
)
|
2017-05-21 14:02:54 +03:00
|
|
|
from .tl.functions.messages import (
|
2017-12-28 13:55:05 +03:00
|
|
|
GetDialogsRequest, GetHistoryRequest, SendMediaRequest,
|
2017-11-10 15:27:51 +03:00
|
|
|
SendMessageRequest, GetChatsRequest, GetAllDraftsRequest,
|
2018-02-15 13:19:34 +03:00
|
|
|
CheckChatInviteRequest, ReadMentionsRequest, SendMultiMediaRequest,
|
2018-03-04 02:27:21 +03:00
|
|
|
UploadMediaRequest, EditMessageRequest, GetFullChatRequest,
|
2018-04-22 17:30:14 +03:00
|
|
|
ForwardMessagesRequest, SearchRequest
|
2017-11-10 15:27:51 +03:00
|
|
|
)
|
2017-10-02 19:59:29 +03:00
|
|
|
|
|
|
|
from .tl.functions import channels
|
|
|
|
from .tl.functions import messages
|
|
|
|
|
2017-09-04 18:18:33 +03:00
|
|
|
from .tl.functions.users import (
|
|
|
|
GetUsersRequest
|
|
|
|
)
|
2017-10-01 15:19:04 +03:00
|
|
|
from .tl.functions.channels import (
|
2018-02-23 23:20:32 +03:00
|
|
|
GetChannelsRequest, GetFullChannelRequest, GetParticipantsRequest
|
2017-10-01 15:19:04 +03:00
|
|
|
)
|
2017-05-21 14:02:54 +03:00
|
|
|
from .tl.types import (
|
2017-08-23 02:35:12 +03:00
|
|
|
DocumentAttributeAudio, DocumentAttributeFilename,
|
2016-11-30 00:29:42 +03:00
|
|
|
InputMediaUploadedDocument, InputMediaUploadedPhoto, InputPeerEmpty,
|
2017-08-23 01:48:00 +03:00
|
|
|
Message, MessageMediaContact, MessageMediaDocument, MessageMediaPhoto,
|
2017-09-13 13:00:27 +03:00
|
|
|
InputUserSelf, UserProfilePhoto, ChatPhoto, UpdateMessageID,
|
2017-10-13 11:53:36 +03:00
|
|
|
UpdateNewChannelMessage, UpdateNewMessage, UpdateShortSentMessage,
|
2017-11-10 15:27:51 +03:00
|
|
|
PeerUser, InputPeerUser, InputPeerChat, InputPeerChannel, MessageEmpty,
|
2018-01-15 20:15:30 +03:00
|
|
|
ChatInvite, ChatInviteAlready, PeerChannel, Photo, InputPeerSelf,
|
2018-01-18 21:36:47 +03:00
|
|
|
InputSingleMedia, InputMediaPhoto, InputPhoto, InputFile, InputFileBig,
|
2018-02-10 12:26:58 +03:00
|
|
|
InputDocument, InputMediaDocument, Document, MessageEntityTextUrl,
|
2018-02-15 13:19:34 +03:00
|
|
|
InputMessageEntityMentionName, DocumentAttributeVideo,
|
2018-02-23 14:07:57 +03:00
|
|
|
UpdateEditMessage, UpdateEditChannelMessage, UpdateShort, Updates,
|
2018-03-07 13:45:37 +03:00
|
|
|
MessageMediaWebPage, ChannelParticipantsSearch, PhotoSize, PhotoCachedSize,
|
2018-04-14 14:07:50 +03:00
|
|
|
PhotoSizeEmpty, MessageService, ChatParticipants, User, WebPage,
|
2018-05-17 11:17:22 +03:00
|
|
|
ChannelParticipantsBanned, ChannelParticipantsKicked,
|
|
|
|
InputMessagesFilterEmpty
|
2018-03-11 11:43:55 +03:00
|
|
|
)
|
2017-10-01 11:50:37 +03:00
|
|
|
from .tl.types.messages import DialogsSlice
|
2018-03-27 18:35:33 +03:00
|
|
|
from .tl.types.account import PasswordInputSettings, NoPassword
|
2018-01-22 12:06:11 +03:00
|
|
|
from .extensions import markdown, html
|
2017-10-30 13:17:22 +03:00
|
|
|
|
2018-01-18 15:55:03 +03:00
|
|
|
__log__ = logging.getLogger(__name__)
|
|
|
|
|
2017-06-08 14:12:57 +03:00
|
|
|
|
|
|
|
class TelegramClient(TelegramBareClient):
|
2018-01-05 15:30:21 +03:00
|
|
|
"""
|
|
|
|
Initializes the Telegram client with the specified API ID and Hash.
|
|
|
|
|
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
session (`str` | `telethon.sessions.abstract.Session`, `None`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The file name of the session file to be used if a string is
|
|
|
|
given (it may be a full path), or the Session instance to be
|
|
|
|
used otherwise. If it's ``None``, the session will not be saved,
|
|
|
|
and you should call :meth:`.log_out()` when you're done.
|
|
|
|
|
2018-05-06 14:03:30 +03:00
|
|
|
Note that if you pass a string it will be a file in the current
|
|
|
|
working directory, although you can also pass absolute paths.
|
|
|
|
|
|
|
|
The session file contains enough information for you to login
|
|
|
|
without re-sending the code, so if you have to enter the code
|
|
|
|
more than once, maybe you're changing the working directory,
|
|
|
|
renaming or removing the file, or using random names.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
api_id (`int` | `str`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The API ID you obtained from https://my.telegram.org.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
api_hash (`str`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The API ID you obtained from https://my.telegram.org.
|
|
|
|
|
2018-05-10 15:22:19 +03:00
|
|
|
connection (`telethon.network.connection.common.Connection`, optional):
|
|
|
|
The connection instance to be used when creating a new connection
|
|
|
|
to the servers. If it's a type, the `proxy` argument will be used.
|
|
|
|
|
|
|
|
Defaults to `telethon.network.connection.tcpfull.ConnectionTcpFull`.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
use_ipv6 (`bool`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Whether to connect to the servers through IPv6 or not.
|
|
|
|
By default this is ``False`` as IPv6 support is not
|
|
|
|
too widespread yet.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
proxy (`tuple` | `dict`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
A tuple consisting of ``(socks.SOCKS5, 'host', port)``.
|
|
|
|
See https://github.com/Anorov/PySocks#usage-1 for more.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
update_workers (`int`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
If specified, represents how many extra threads should
|
|
|
|
be spawned to handle incoming updates, and updates will
|
|
|
|
be kept in memory until they are processed. Note that
|
|
|
|
you must set this to at least ``0`` if you want to be
|
|
|
|
able to process updates through :meth:`updates.poll()`.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
timeout (`int` | `float` | `timedelta`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
The timeout to be used when receiving responses from
|
|
|
|
the network. Defaults to 5 seconds.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
spawn_read_thread (`bool`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Whether to use an extra background thread or not. Defaults
|
|
|
|
to ``True`` so receiving items from the network happens
|
|
|
|
instantly, as soon as they arrive. Can still be disabled
|
|
|
|
if you want to run the library without any additional thread.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
report_errors (`bool`, optional):
|
2018-03-15 12:22:21 +03:00
|
|
|
Whether to report RPC errors or not. Defaults to ``True``,
|
|
|
|
see :ref:`api-status` for more information.
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Kwargs:
|
2018-03-15 12:22:21 +03:00
|
|
|
Some extra parameters are required when establishing the first
|
2018-03-14 23:05:50 +03:00
|
|
|
connection. These are are (along with their default values):
|
2018-01-05 15:30:21 +03:00
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
device_model = platform.node()
|
|
|
|
system_version = platform.system()
|
|
|
|
app_version = TelegramClient.__version__
|
|
|
|
lang_code = 'en'
|
|
|
|
system_lang_code = lang_code
|
2017-06-08 14:12:57 +03:00
|
|
|
"""
|
2016-09-18 12:59:12 +03:00
|
|
|
|
2017-05-08 17:01:53 +03:00
|
|
|
# region Initialization
|
2016-09-07 12:36:34 +03:00
|
|
|
|
2017-09-04 12:24:10 +03:00
|
|
|
def __init__(self, session, api_id, api_hash,
|
2018-05-10 15:22:19 +03:00
|
|
|
*,
|
|
|
|
connection=ConnectionTcpFull,
|
2017-11-16 15:30:18 +03:00
|
|
|
use_ipv6=False,
|
2017-09-04 12:24:10 +03:00
|
|
|
proxy=None,
|
2017-09-30 12:17:31 +03:00
|
|
|
update_workers=None,
|
2018-03-18 22:01:19 +03:00
|
|
|
timeout=timedelta(seconds=10),
|
2017-09-30 12:28:15 +03:00
|
|
|
spawn_read_thread=True,
|
2018-03-15 12:22:21 +03:00
|
|
|
report_errors=True,
|
2017-08-29 17:06:14 +03:00
|
|
|
**kwargs):
|
2017-09-04 12:24:10 +03:00
|
|
|
super().__init__(
|
|
|
|
session, api_id, api_hash,
|
2018-05-10 15:22:19 +03:00
|
|
|
connection=connection,
|
2017-11-16 15:30:18 +03:00
|
|
|
use_ipv6=use_ipv6,
|
2017-09-07 19:49:08 +03:00
|
|
|
proxy=proxy,
|
2017-09-30 12:17:31 +03:00
|
|
|
update_workers=update_workers,
|
2017-09-30 12:28:15 +03:00
|
|
|
spawn_read_thread=spawn_read_thread,
|
2017-09-30 19:02:08 +03:00
|
|
|
timeout=timeout,
|
2018-03-15 12:22:21 +03:00
|
|
|
report_errors=report_errors,
|
2017-09-30 19:02:08 +03:00
|
|
|
**kwargs
|
2017-09-04 12:24:10 +03:00
|
|
|
)
|
2017-06-07 21:08:16 +03:00
|
|
|
|
2018-02-07 12:41:58 +03:00
|
|
|
self._event_builders = []
|
2018-02-28 23:09:05 +03:00
|
|
|
self._events_pending_resolve = []
|
2018-02-07 12:41:58 +03:00
|
|
|
|
2018-01-08 16:18:36 +03:00
|
|
|
# Some fields to easy signing in. Let {phone: hash} be
|
|
|
|
# a dictionary because the user may change their mind.
|
|
|
|
self._phone_code_hash = {}
|
2017-08-31 11:34:09 +03:00
|
|
|
self._phone = None
|
2016-09-07 12:36:34 +03:00
|
|
|
|
2018-02-22 22:37:56 +03:00
|
|
|
# Sometimes we need to know who we are, cache the self peer
|
|
|
|
self._self_input_peer = None
|
|
|
|
|
2017-05-30 13:14:29 +03:00
|
|
|
# endregion
|
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# region Telegram requests functions
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# region Authorization requests
|
|
|
|
|
2017-11-04 22:46:02 +03:00
|
|
|
def send_code_request(self, phone, force_sms=False):
|
2018-01-05 15:30:21 +03:00
|
|
|
"""
|
|
|
|
Sends a code request to the specified phone number.
|
|
|
|
|
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
phone (`str` | `int`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The phone to which the code will be sent.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
force_sms (`bool`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Whether to force sending as SMS.
|
2017-10-21 17:59:20 +03:00
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Returns:
|
2018-03-23 23:40:24 +03:00
|
|
|
An instance of :tl:`SentCode`.
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
2017-12-27 02:50:09 +03:00
|
|
|
phone = utils.parse_phone(phone) or self._phone
|
2018-01-08 16:18:36 +03:00
|
|
|
phone_hash = self._phone_code_hash.get(phone)
|
TelegramClient.send_code_request(): Change logic of methods invocation
Before:
First call, force_sms=False: SendCodeRequest
Next call, force_sms=False: SendCodeRequest
First call, force_sms=True: raise ValueError
Next call, force_sms=True: ResendCodeRequest
That's inconvenient because the user must remember whether the code requested at all and whether the request was successful.
In addition, the repeated invocation of SendCodeRequest does nothing.
This commit changes logic to this:
First call, force_sms=False: SendCodeRequest
Next call, force_sms=False: ResendCodeRequest
First call, force_sms=True: SendCodeRequest, ResendCodeRequest
Next call, force_sms=True: ResendCodeRequest
2017-12-24 14:21:14 +03:00
|
|
|
|
2018-01-08 16:18:36 +03:00
|
|
|
if not phone_hash:
|
2017-11-04 22:46:02 +03:00
|
|
|
result = self(SendCodeRequest(phone, self.api_id, self.api_hash))
|
2018-01-08 16:18:36 +03:00
|
|
|
self._phone_code_hash[phone] = phone_hash = result.phone_code_hash
|
TelegramClient.send_code_request(): Change logic of methods invocation
Before:
First call, force_sms=False: SendCodeRequest
Next call, force_sms=False: SendCodeRequest
First call, force_sms=True: raise ValueError
Next call, force_sms=True: ResendCodeRequest
That's inconvenient because the user must remember whether the code requested at all and whether the request was successful.
In addition, the repeated invocation of SendCodeRequest does nothing.
This commit changes logic to this:
First call, force_sms=False: SendCodeRequest
Next call, force_sms=False: ResendCodeRequest
First call, force_sms=True: SendCodeRequest, ResendCodeRequest
Next call, force_sms=True: ResendCodeRequest
2017-12-24 14:21:14 +03:00
|
|
|
else:
|
|
|
|
force_sms = True
|
2017-11-04 22:46:02 +03:00
|
|
|
|
2017-08-31 11:34:09 +03:00
|
|
|
self._phone = phone
|
TelegramClient.send_code_request(): Change logic of methods invocation
Before:
First call, force_sms=False: SendCodeRequest
Next call, force_sms=False: SendCodeRequest
First call, force_sms=True: raise ValueError
Next call, force_sms=True: ResendCodeRequest
That's inconvenient because the user must remember whether the code requested at all and whether the request was successful.
In addition, the repeated invocation of SendCodeRequest does nothing.
This commit changes logic to this:
First call, force_sms=False: SendCodeRequest
Next call, force_sms=False: ResendCodeRequest
First call, force_sms=True: SendCodeRequest, ResendCodeRequest
Next call, force_sms=True: ResendCodeRequest
2017-12-24 14:21:14 +03:00
|
|
|
|
|
|
|
if force_sms:
|
2018-01-08 16:18:36 +03:00
|
|
|
result = self(ResendCodeRequest(phone, phone_hash))
|
|
|
|
self._phone_code_hash[phone] = result.phone_code_hash
|
TelegramClient.send_code_request(): Change logic of methods invocation
Before:
First call, force_sms=False: SendCodeRequest
Next call, force_sms=False: SendCodeRequest
First call, force_sms=True: raise ValueError
Next call, force_sms=True: ResendCodeRequest
That's inconvenient because the user must remember whether the code requested at all and whether the request was successful.
In addition, the repeated invocation of SendCodeRequest does nothing.
This commit changes logic to this:
First call, force_sms=False: SendCodeRequest
Next call, force_sms=False: ResendCodeRequest
First call, force_sms=True: SendCodeRequest, ResendCodeRequest
Next call, force_sms=True: ResendCodeRequest
2017-12-24 14:21:14 +03:00
|
|
|
|
2017-08-31 11:34:09 +03:00
|
|
|
return result
|
2017-06-08 14:12:57 +03:00
|
|
|
|
2018-01-15 11:48:37 +03:00
|
|
|
def start(self,
|
|
|
|
phone=lambda: input('Please enter your phone: '),
|
2018-02-18 14:17:02 +03:00
|
|
|
password=lambda: getpass.getpass('Please enter your password: '),
|
|
|
|
bot_token=None, force_sms=False, code_callback=None,
|
2018-02-01 12:12:46 +03:00
|
|
|
first_name='New User', last_name=''):
|
2018-01-11 14:43:47 +03:00
|
|
|
"""
|
|
|
|
Convenience method to interactively connect and sign in if required,
|
|
|
|
also taking into consideration that 2FA may be enabled in the account.
|
|
|
|
|
|
|
|
Example usage:
|
|
|
|
>>> client = TelegramClient(session, api_id, api_hash).start(phone)
|
|
|
|
Please enter the code you received: 12345
|
|
|
|
Please enter your password: *******
|
|
|
|
(You are now logged in)
|
|
|
|
|
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
phone (`str` | `int` | `callable`):
|
2018-01-15 11:48:37 +03:00
|
|
|
The phone (or callable without arguments to get it)
|
|
|
|
to which the code will be sent.
|
2018-01-11 14:43:47 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
password (`callable`, optional):
|
2018-01-11 14:43:47 +03:00
|
|
|
The password for 2 Factor Authentication (2FA).
|
|
|
|
This is only required if it is enabled in your account.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
bot_token (`str`):
|
2018-03-23 23:40:24 +03:00
|
|
|
Bot Token obtained by `@BotFather <https://t.me/BotFather>`_
|
|
|
|
to log in as a bot. Cannot be specified with ``phone`` (only
|
|
|
|
one of either allowed).
|
2018-01-11 14:43:47 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
force_sms (`bool`, optional):
|
2018-01-11 14:43:47 +03:00
|
|
|
Whether to force sending the code request as SMS.
|
|
|
|
This only makes sense when signing in with a `phone`.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
code_callback (`callable`, optional):
|
2018-01-11 14:43:47 +03:00
|
|
|
A callable that will be used to retrieve the Telegram
|
|
|
|
login code. Defaults to `input()`.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
first_name (`str`, optional):
|
2018-02-01 12:12:46 +03:00
|
|
|
The first name to be used if signing up. This has no
|
|
|
|
effect if the account already exists and you sign in.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
last_name (`str`, optional):
|
2018-02-01 12:12:46 +03:00
|
|
|
Similar to the first name, but for the last. Optional.
|
|
|
|
|
2018-01-11 14:43:47 +03:00
|
|
|
Returns:
|
2018-03-28 17:03:47 +03:00
|
|
|
This `TelegramClient`, so initialization
|
2018-03-23 23:40:24 +03:00
|
|
|
can be chained with ``.start()``.
|
2018-01-11 14:43:47 +03:00
|
|
|
"""
|
|
|
|
|
|
|
|
if code_callback is None:
|
|
|
|
def code_callback():
|
|
|
|
return input('Please enter the code you received: ')
|
|
|
|
elif not callable(code_callback):
|
|
|
|
raise ValueError(
|
|
|
|
'The code_callback parameter needs to be a callable '
|
|
|
|
'function that returns the code you received by Telegram.'
|
|
|
|
)
|
|
|
|
|
2018-01-13 13:54:41 +03:00
|
|
|
if not phone and not bot_token:
|
2018-01-15 11:48:37 +03:00
|
|
|
raise ValueError('No phone number or bot token provided.')
|
2018-01-13 14:00:53 +03:00
|
|
|
|
2018-02-17 14:32:30 +03:00
|
|
|
if phone and bot_token and not callable(phone):
|
2018-01-15 11:48:37 +03:00
|
|
|
raise ValueError('Both a phone and a bot token provided, '
|
|
|
|
'must only provide one of either')
|
2018-01-11 14:43:47 +03:00
|
|
|
|
|
|
|
if not self.is_connected():
|
|
|
|
self.connect()
|
|
|
|
|
|
|
|
if self.is_user_authorized():
|
2018-02-28 23:09:05 +03:00
|
|
|
self._check_events_pending_resolve()
|
2018-01-11 14:43:47 +03:00
|
|
|
return self
|
|
|
|
|
|
|
|
if bot_token:
|
|
|
|
self.sign_in(bot_token=bot_token)
|
|
|
|
return self
|
|
|
|
|
2018-01-15 11:48:37 +03:00
|
|
|
# Turn the callable into a valid phone number
|
|
|
|
while callable(phone):
|
|
|
|
phone = utils.parse_phone(phone()) or phone
|
|
|
|
|
2018-01-11 14:43:47 +03:00
|
|
|
me = None
|
|
|
|
attempts = 0
|
|
|
|
max_attempts = 3
|
|
|
|
two_step_detected = False
|
|
|
|
|
2018-02-01 12:12:46 +03:00
|
|
|
sent_code = self.send_code_request(phone, force_sms=force_sms)
|
|
|
|
sign_up = not sent_code.phone_registered
|
2018-01-11 14:43:47 +03:00
|
|
|
while attempts < max_attempts:
|
|
|
|
try:
|
2018-02-01 12:12:46 +03:00
|
|
|
if sign_up:
|
|
|
|
me = self.sign_up(code_callback(), first_name, last_name)
|
|
|
|
else:
|
|
|
|
# Raises SessionPasswordNeededError if 2FA enabled
|
|
|
|
me = self.sign_in(phone, code_callback())
|
2018-01-11 14:43:47 +03:00
|
|
|
break
|
|
|
|
except SessionPasswordNeededError:
|
|
|
|
two_step_detected = True
|
|
|
|
break
|
2018-02-01 12:12:46 +03:00
|
|
|
except PhoneNumberOccupiedError:
|
|
|
|
sign_up = False
|
|
|
|
except PhoneNumberUnoccupiedError:
|
|
|
|
sign_up = True
|
2018-01-11 14:43:47 +03:00
|
|
|
except (PhoneCodeEmptyError, PhoneCodeExpiredError,
|
|
|
|
PhoneCodeHashEmptyError, PhoneCodeInvalidError):
|
|
|
|
print('Invalid code. Please try again.', file=sys.stderr)
|
2018-02-01 12:12:46 +03:00
|
|
|
|
|
|
|
attempts += 1
|
2018-01-11 14:43:47 +03:00
|
|
|
else:
|
|
|
|
raise RuntimeError(
|
|
|
|
'{} consecutive sign-in attempts failed. Aborting'
|
|
|
|
.format(max_attempts)
|
|
|
|
)
|
|
|
|
|
|
|
|
if two_step_detected:
|
|
|
|
if not password:
|
|
|
|
raise ValueError(
|
|
|
|
"Two-step verification is enabled for this account. "
|
|
|
|
"Please provide the 'password' argument to 'start()'."
|
|
|
|
)
|
2018-02-18 14:17:02 +03:00
|
|
|
# TODO If callable given make it retry on invalid
|
|
|
|
if callable(password):
|
|
|
|
password = password()
|
2018-01-11 14:43:47 +03:00
|
|
|
me = self.sign_in(phone=phone, password=password)
|
|
|
|
|
|
|
|
# We won't reach here if any step failed (exit by exception)
|
2018-04-08 12:47:18 +03:00
|
|
|
signed, name = 'Signed in successfully as', utils.get_display_name(me)
|
|
|
|
try:
|
|
|
|
print(signed, name)
|
|
|
|
except UnicodeEncodeError:
|
|
|
|
# Some terminals don't support certain characters
|
|
|
|
print(signed, name.encode('utf-8', errors='ignore')
|
|
|
|
.decode('ascii', errors='ignore'))
|
|
|
|
|
2018-02-28 23:09:05 +03:00
|
|
|
self._check_events_pending_resolve()
|
2018-01-11 14:43:47 +03:00
|
|
|
return self
|
|
|
|
|
2017-08-31 11:34:09 +03:00
|
|
|
def sign_in(self, phone=None, code=None,
|
2017-10-01 12:37:18 +03:00
|
|
|
password=None, bot_token=None, phone_code_hash=None):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Starts or completes the sign in process with the given phone number
|
|
|
|
or code that Telegram sent.
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
phone (`str` | `int`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The phone to send the code to if no code was provided,
|
|
|
|
or to override the phone that was previously used with
|
|
|
|
these requests.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
code (`str` | `int`):
|
2018-03-22 21:20:35 +03:00
|
|
|
The code that Telegram sent. Note that if you have sent this
|
|
|
|
code through the application itself it will immediately
|
|
|
|
expire. If you want to send the code, obfuscate it somehow.
|
|
|
|
If you're not doing any of this you can ignore this note.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
password (`str`):
|
2018-01-05 15:30:21 +03:00
|
|
|
2FA password, should be used if a previous call raised
|
|
|
|
SessionPasswordNeededError.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
bot_token (`str`):
|
2018-01-05 15:30:21 +03:00
|
|
|
Used to sign in as a bot. Not all requests will be available.
|
|
|
|
This should be the hash the @BotFather gave you.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
phone_code_hash (`str`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The hash returned by .send_code_request. This can be set to None
|
|
|
|
to use the last hash known.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The signed in user, or the information about
|
2018-03-22 20:39:42 +03:00
|
|
|
:meth:`send_code_request`.
|
2017-06-08 14:12:57 +03:00
|
|
|
"""
|
2018-02-28 23:09:05 +03:00
|
|
|
if self.is_user_authorized():
|
|
|
|
self._check_events_pending_resolve()
|
|
|
|
return self.get_me()
|
2017-08-31 11:34:09 +03:00
|
|
|
|
2018-01-11 14:43:47 +03:00
|
|
|
if phone and not code and not password:
|
2017-08-31 11:34:09 +03:00
|
|
|
return self.send_code_request(phone)
|
|
|
|
elif code:
|
2017-12-27 02:50:09 +03:00
|
|
|
phone = utils.parse_phone(phone) or self._phone
|
2018-01-08 16:18:36 +03:00
|
|
|
phone_code_hash = \
|
|
|
|
phone_code_hash or self._phone_code_hash.get(phone, None)
|
|
|
|
|
2017-10-01 12:37:18 +03:00
|
|
|
if not phone:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise ValueError(
|
2017-10-01 12:31:26 +03:00
|
|
|
'Please make sure to call send_code_request first.'
|
|
|
|
)
|
2017-10-01 12:37:18 +03:00
|
|
|
if not phone_code_hash:
|
|
|
|
raise ValueError('You also need to provide a phone_code_hash.')
|
2016-09-16 14:35:14 +03:00
|
|
|
|
2018-01-11 14:43:47 +03:00
|
|
|
# May raise PhoneCodeEmptyError, PhoneCodeExpiredError,
|
|
|
|
# PhoneCodeHashEmptyError or PhoneCodeInvalidError.
|
|
|
|
result = self(SignInRequest(phone, phone_code_hash, str(code)))
|
2016-11-26 14:04:02 +03:00
|
|
|
elif password:
|
2017-07-02 12:56:40 +03:00
|
|
|
salt = self(GetPasswordRequest()).current_salt
|
2017-09-21 16:36:20 +03:00
|
|
|
result = self(CheckPasswordRequest(
|
2017-10-01 17:57:07 +03:00
|
|
|
helpers.get_password_hash(password, salt)
|
2017-09-21 16:36:20 +03:00
|
|
|
))
|
2017-03-20 14:31:13 +03:00
|
|
|
elif bot_token:
|
2017-07-02 12:56:40 +03:00
|
|
|
result = self(ImportBotAuthorizationRequest(
|
2017-06-08 14:12:57 +03:00
|
|
|
flags=0, bot_auth_token=bot_token,
|
2017-09-21 16:36:20 +03:00
|
|
|
api_id=self.api_id, api_hash=self.api_hash
|
|
|
|
))
|
2016-11-26 14:04:02 +03:00
|
|
|
else:
|
2016-11-30 00:29:42 +03:00
|
|
|
raise ValueError(
|
2017-08-31 11:34:09 +03:00
|
|
|
'You must provide a phone and a code the first time, '
|
2017-09-21 16:36:20 +03:00
|
|
|
'and a password only if an RPCError was raised before.'
|
|
|
|
)
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2018-02-22 22:37:56 +03:00
|
|
|
self._self_input_peer = utils.get_input_peer(
|
|
|
|
result.user, allow_self=False
|
|
|
|
)
|
2017-09-21 16:36:20 +03:00
|
|
|
self._set_connected_and_authorized()
|
2017-06-08 14:12:57 +03:00
|
|
|
return result.user
|
2016-09-04 12:07:18 +03:00
|
|
|
|
2017-08-31 11:34:09 +03:00
|
|
|
def sign_up(self, code, first_name, last_name=''):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Signs up to Telegram if you don't have an account yet.
|
|
|
|
You must call .send_code_request(phone) first.
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
code (`str` | `int`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The code sent by Telegram
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
first_name (`str`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The first name to be used by the new account.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
last_name (`str`, optional)
|
2018-01-05 15:30:21 +03:00
|
|
|
Optional last name.
|
|
|
|
|
|
|
|
Returns:
|
2018-03-23 23:40:24 +03:00
|
|
|
The new created :tl:`User`.
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
2018-02-28 23:09:05 +03:00
|
|
|
if self.is_user_authorized():
|
|
|
|
self._check_events_pending_resolve()
|
|
|
|
return self.get_me()
|
|
|
|
|
2017-09-21 16:36:20 +03:00
|
|
|
result = self(SignUpRequest(
|
2017-09-04 19:07:20 +03:00
|
|
|
phone_number=self._phone,
|
2018-01-08 16:18:36 +03:00
|
|
|
phone_code_hash=self._phone_code_hash.get(self._phone, ''),
|
2018-02-09 21:39:20 +03:00
|
|
|
phone_code=str(code),
|
2017-09-04 19:07:20 +03:00
|
|
|
first_name=first_name,
|
|
|
|
last_name=last_name
|
2017-09-21 16:36:20 +03:00
|
|
|
))
|
|
|
|
|
2018-02-22 22:37:56 +03:00
|
|
|
self._self_input_peer = utils.get_input_peer(
|
|
|
|
result.user, allow_self=False
|
|
|
|
)
|
2017-09-21 16:36:20 +03:00
|
|
|
self._set_connected_and_authorized()
|
|
|
|
return result.user
|
2016-09-16 14:35:14 +03:00
|
|
|
|
|
|
|
def log_out(self):
|
2018-01-05 15:30:21 +03:00
|
|
|
"""
|
2018-01-20 13:47:17 +03:00
|
|
|
Logs out Telegram and deletes the current ``*.session`` file.
|
2017-10-21 17:59:20 +03:00
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Returns:
|
2018-03-23 23:40:24 +03:00
|
|
|
``True`` if the operation was successful.
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
2016-09-16 14:35:14 +03:00
|
|
|
try:
|
2017-07-02 12:56:40 +03:00
|
|
|
self(LogOutRequest())
|
2017-09-29 14:58:15 +03:00
|
|
|
except RPCError:
|
2016-09-16 14:35:14 +03:00
|
|
|
return False
|
|
|
|
|
2017-09-29 14:58:15 +03:00
|
|
|
self.disconnect()
|
2017-08-21 10:00:23 +03:00
|
|
|
self.session.delete()
|
2018-04-14 20:15:33 +03:00
|
|
|
self._authorized = False
|
2017-08-21 10:00:23 +03:00
|
|
|
return True
|
|
|
|
|
2018-02-22 22:37:56 +03:00
|
|
|
def get_me(self, input_peer=False):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Gets "me" (the self user) which is currently authenticated,
|
|
|
|
or None if the request fails (hence, not authenticated).
|
|
|
|
|
2018-02-22 22:37:56 +03:00
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
input_peer (`bool`, optional):
|
2018-03-23 23:40:24 +03:00
|
|
|
Whether to return the :tl:`InputPeerUser` version or the normal
|
|
|
|
:tl:`User`. This can be useful if you just need to know the ID
|
2018-02-22 22:37:56 +03:00
|
|
|
of yourself.
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Returns:
|
2018-03-23 23:40:24 +03:00
|
|
|
Your own :tl:`User`.
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
2018-02-22 22:37:56 +03:00
|
|
|
if input_peer and self._self_input_peer:
|
|
|
|
return self._self_input_peer
|
|
|
|
|
2017-06-04 18:24:08 +03:00
|
|
|
try:
|
2018-02-22 22:37:56 +03:00
|
|
|
me = self(GetUsersRequest([InputUserSelf()]))[0]
|
|
|
|
if not self._self_input_peer:
|
|
|
|
self._self_input_peer = utils.get_input_peer(
|
|
|
|
me, allow_self=False
|
|
|
|
)
|
2018-02-24 20:30:09 +03:00
|
|
|
|
|
|
|
return self._self_input_peer if input_peer else me
|
2017-06-10 12:47:51 +03:00
|
|
|
except UnauthorizedError:
|
|
|
|
return None
|
2017-06-04 18:24:08 +03:00
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Dialogs ("chats") requests
|
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
def iter_dialogs(self, limit=None, offset_date=None, offset_id=0,
|
2018-03-22 21:13:42 +03:00
|
|
|
offset_peer=InputPeerEmpty(), _total=None):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
2018-03-08 13:44:13 +03:00
|
|
|
Returns an iterator over the dialogs, yielding 'limit' at most.
|
2018-05-06 14:03:30 +03:00
|
|
|
Dialogs are the open "chats" or conversations with other people,
|
|
|
|
groups you have joined, or channels you are subscribed to.
|
2017-10-21 17:59:20 +03:00
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
limit (`int` | `None`):
|
2018-01-05 15:30:21 +03:00
|
|
|
How many dialogs to be retrieved as maximum. Can be set to
|
|
|
|
``None`` to retrieve all dialogs. Note that this may take
|
|
|
|
whole minutes if you have hundreds of dialogs, as Telegram
|
|
|
|
will tell the library to slow down through a
|
|
|
|
``FloodWaitError``.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
offset_date (`datetime`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
The offset date to be used.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
offset_id (`int`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
The message ID to be used as an offset.
|
|
|
|
|
2018-03-23 23:40:24 +03:00
|
|
|
offset_peer (:tl:`InputPeer`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
The peer to be used as an offset.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
_total (`list`, optional):
|
2018-03-22 21:13:42 +03:00
|
|
|
A single-item list to pass the total parameter by reference.
|
2018-03-08 13:44:13 +03:00
|
|
|
|
|
|
|
Yields:
|
2018-03-28 17:03:47 +03:00
|
|
|
Instances of `telethon.tl.custom.dialog.Dialog`.
|
2017-06-08 14:12:57 +03:00
|
|
|
"""
|
2017-11-04 14:34:44 +03:00
|
|
|
limit = float('inf') if limit is None else int(limit)
|
|
|
|
if limit == 0:
|
2018-03-22 21:13:42 +03:00
|
|
|
if not _total:
|
2018-03-08 13:44:13 +03:00
|
|
|
return
|
2017-12-28 13:49:35 +03:00
|
|
|
# Special case, get a single dialog and determine count
|
|
|
|
dialogs = self(GetDialogsRequest(
|
|
|
|
offset_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
|
|
|
offset_peer=offset_peer,
|
|
|
|
limit=1
|
|
|
|
))
|
2018-03-22 21:13:42 +03:00
|
|
|
_total[0] = getattr(dialogs, 'count', len(dialogs.dialogs))
|
2018-03-08 13:44:13 +03:00
|
|
|
return
|
|
|
|
|
|
|
|
seen = set()
|
2018-03-11 11:55:31 +03:00
|
|
|
req = GetDialogsRequest(
|
|
|
|
offset_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
|
|
|
offset_peer=offset_peer,
|
|
|
|
limit=0
|
|
|
|
)
|
2018-03-08 13:44:13 +03:00
|
|
|
while len(seen) < limit:
|
2018-03-11 11:55:31 +03:00
|
|
|
req.limit = min(limit - len(seen), 100)
|
|
|
|
r = self(req)
|
2017-10-01 11:50:37 +03:00
|
|
|
|
2018-03-22 21:13:42 +03:00
|
|
|
if _total:
|
|
|
|
_total[0] = getattr(r, 'count', len(r.dialogs))
|
2017-12-24 18:18:09 +03:00
|
|
|
messages = {m.id: m for m in r.messages}
|
2017-12-28 15:31:43 +03:00
|
|
|
entities = {utils.get_peer_id(x): x
|
2017-12-24 18:18:09 +03:00
|
|
|
for x in itertools.chain(r.users, r.chats)}
|
2017-10-01 11:50:37 +03:00
|
|
|
|
2018-03-11 11:55:31 +03:00
|
|
|
# Happens when there are pinned dialogs
|
|
|
|
if len(r.dialogs) > limit:
|
|
|
|
r.dialogs = r.dialogs[:limit]
|
|
|
|
|
2017-12-24 18:18:09 +03:00
|
|
|
for d in r.dialogs:
|
2018-03-08 13:44:13 +03:00
|
|
|
peer_id = utils.get_peer_id(d.peer)
|
|
|
|
if peer_id not in seen:
|
|
|
|
seen.add(peer_id)
|
|
|
|
yield Dialog(self, d, entities, messages)
|
2017-10-01 11:50:37 +03:00
|
|
|
|
2018-03-11 11:55:31 +03:00
|
|
|
if len(r.dialogs) < req.limit or not isinstance(r, DialogsSlice):
|
2017-11-04 14:34:44 +03:00
|
|
|
# Less than we requested means we reached the end, or
|
|
|
|
# we didn't get a DialogsSlice which means we got all.
|
2017-10-01 11:50:37 +03:00
|
|
|
break
|
|
|
|
|
2018-03-11 11:55:31 +03:00
|
|
|
req.offset_date = r.messages[-1].date
|
|
|
|
req.offset_peer = entities[utils.get_peer_id(r.dialogs[-1].peer)]
|
|
|
|
req.offset_id = r.messages[-1].id
|
|
|
|
req.exclude_pinned = True
|
2017-10-01 11:50:37 +03:00
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
def get_dialogs(self, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Same as :meth:`iter_dialogs`, but returns a list instead
|
2018-03-22 20:39:42 +03:00
|
|
|
with an additional ``.total`` attribute on the list.
|
2018-03-08 13:44:13 +03:00
|
|
|
"""
|
2018-03-22 21:13:42 +03:00
|
|
|
total = [0]
|
|
|
|
kwargs['_total'] = total
|
2018-03-08 13:44:13 +03:00
|
|
|
dialogs = UserList(self.iter_dialogs(*args, **kwargs))
|
2018-03-22 21:13:42 +03:00
|
|
|
dialogs.total = total[0]
|
2017-12-28 16:55:02 +03:00
|
|
|
return dialogs
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
def iter_drafts(self): # TODO: Ability to provide a `filter`
|
2017-10-09 10:54:48 +03:00
|
|
|
"""
|
2018-03-08 13:44:13 +03:00
|
|
|
Iterator over all open draft messages.
|
2016-09-11 17:24:03 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
Instances of `telethon.tl.custom.draft.Draft` are yielded.
|
|
|
|
You can call `telethon.tl.custom.draft.Draft.set_message`
|
|
|
|
to change the message or `telethon.tl.custom.draft.Draft.delete`
|
2018-03-22 20:39:42 +03:00
|
|
|
among other things.
|
2017-10-09 10:54:48 +03:00
|
|
|
"""
|
2018-03-08 13:44:13 +03:00
|
|
|
for update in self(GetAllDraftsRequest()).updates:
|
|
|
|
yield Draft._from_update(self, update)
|
|
|
|
|
|
|
|
def get_drafts(self):
|
|
|
|
"""
|
|
|
|
Same as :meth:`iter_drafts`, but returns a list instead.
|
|
|
|
"""
|
|
|
|
return list(self.iter_drafts())
|
2016-09-11 17:24:03 +03:00
|
|
|
|
2018-01-02 15:30:29 +03:00
|
|
|
@staticmethod
|
|
|
|
def _get_response_message(request, result):
|
2018-01-15 20:15:30 +03:00
|
|
|
"""
|
|
|
|
Extracts the response message known a request and Update result.
|
|
|
|
The request may also be the ID of the message to match.
|
|
|
|
"""
|
2018-01-02 15:30:29 +03:00
|
|
|
# Telegram seems to send updateMessageID first, then updateNewMessage,
|
|
|
|
# however let's not rely on that just in case.
|
2018-01-15 20:15:30 +03:00
|
|
|
if isinstance(request, int):
|
|
|
|
msg_id = request
|
|
|
|
else:
|
|
|
|
msg_id = None
|
|
|
|
for update in result.updates:
|
|
|
|
if isinstance(update, UpdateMessageID):
|
|
|
|
if update.random_id == request.random_id:
|
|
|
|
msg_id = update.id
|
|
|
|
break
|
2018-01-02 15:30:29 +03:00
|
|
|
|
2018-02-15 13:19:34 +03:00
|
|
|
if isinstance(result, UpdateShort):
|
|
|
|
updates = [result.update]
|
|
|
|
elif isinstance(result, Updates):
|
|
|
|
updates = result.updates
|
|
|
|
else:
|
|
|
|
return
|
|
|
|
|
|
|
|
for update in updates:
|
2018-01-02 15:30:29 +03:00
|
|
|
if isinstance(update, (UpdateNewChannelMessage, UpdateNewMessage)):
|
|
|
|
if update.message.id == msg_id:
|
|
|
|
return update.message
|
|
|
|
|
2018-02-15 13:19:34 +03:00
|
|
|
elif (isinstance(update, UpdateEditMessage) and
|
|
|
|
not isinstance(request.peer, InputPeerChannel)):
|
|
|
|
if request.id == update.message.id:
|
|
|
|
return update.message
|
|
|
|
|
|
|
|
elif (isinstance(update, UpdateEditChannelMessage) and
|
|
|
|
utils.get_peer_id(request.peer) ==
|
|
|
|
utils.get_peer_id(update.message.to_id)):
|
|
|
|
if request.id == update.message.id:
|
|
|
|
return update.message
|
|
|
|
|
|
|
|
def _parse_message_text(self, message, parse_mode):
|
|
|
|
"""
|
2018-03-22 20:39:42 +03:00
|
|
|
Returns a (parsed message, entities) tuple depending on ``parse_mode``.
|
2018-02-15 13:19:34 +03:00
|
|
|
"""
|
|
|
|
if not parse_mode:
|
|
|
|
return message, []
|
|
|
|
|
2018-04-03 15:23:05 +03:00
|
|
|
if isinstance(parse_mode, str):
|
|
|
|
parse_mode = parse_mode.lower()
|
|
|
|
if parse_mode in {'md', 'markdown'}:
|
|
|
|
message, msg_entities = markdown.parse(message)
|
|
|
|
elif parse_mode.startswith('htm'):
|
|
|
|
message, msg_entities = html.parse(message)
|
|
|
|
else:
|
|
|
|
raise ValueError('Unknown parsing mode: {}'.format(parse_mode))
|
|
|
|
elif callable(parse_mode):
|
|
|
|
message, msg_entities = parse_mode(message)
|
2018-02-15 13:19:34 +03:00
|
|
|
else:
|
2018-04-03 15:23:05 +03:00
|
|
|
raise TypeError('Invalid parsing mode type: {}'.format(parse_mode))
|
2018-02-15 13:19:34 +03:00
|
|
|
|
|
|
|
for i, e in enumerate(msg_entities):
|
|
|
|
if isinstance(e, MessageEntityTextUrl):
|
|
|
|
m = re.match(r'^@|\+|tg://user\?id=(\d+)', e.url)
|
|
|
|
if m:
|
|
|
|
try:
|
|
|
|
msg_entities[i] = InputMessageEntityMentionName(
|
|
|
|
e.offset, e.length, self.get_input_entity(
|
|
|
|
int(m.group(1)) if m.group(1) else e.url
|
|
|
|
)
|
|
|
|
)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
# Make no replacement
|
|
|
|
pass
|
|
|
|
|
|
|
|
return message, msg_entities
|
|
|
|
|
2018-03-04 01:12:05 +03:00
|
|
|
def send_message(self, entity, message='', reply_to=None, parse_mode='md',
|
2018-03-08 14:37:06 +03:00
|
|
|
link_preview=True, file=None, force_document=False,
|
|
|
|
clear_draft=False):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Sends the given message to the specified entity (user/chat/channel).
|
2017-08-23 01:01:10 +03:00
|
|
|
|
2018-04-06 20:00:21 +03:00
|
|
|
The default parse mode is the same as the official applications
|
|
|
|
(a custom flavour of markdown). ``**bold**, `code` or __italic__``
|
|
|
|
are available. In addition you can send ``[links](https://example.com)``
|
|
|
|
and ``[mentions](@username)`` (or using IDs like in the Bot API:
|
|
|
|
``[mention](tg://user?id=123456789)``) and ``pre`` blocks with three
|
|
|
|
backticks.
|
|
|
|
|
2018-04-14 13:03:08 +03:00
|
|
|
Sending a ``/start`` command with a parameter (like ``?start=data``)
|
|
|
|
is also done through this method. Simply send ``'/start data'`` to
|
|
|
|
the bot.
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`entity`):
|
2018-01-05 15:30:21 +03:00
|
|
|
To who will it be sent.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
message (`str` | :tl:`Message`):
|
2018-02-23 14:07:57 +03:00
|
|
|
The message to be sent, or another message object to resend.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-05-06 14:03:30 +03:00
|
|
|
The maximum length for a message is 35,000 bytes or 4,096
|
|
|
|
characters. Longer messages will not be sliced automatically,
|
|
|
|
and you should slice them manually if the text to send is
|
|
|
|
longer than said length.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
reply_to (`int` | :tl:`Message`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Whether to reply to a message or not. If an integer is provided,
|
|
|
|
it should be the ID of the message that it should reply to.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
parse_mode (`str`, optional):
|
2018-02-10 12:45:43 +03:00
|
|
|
Can be 'md' or 'markdown' for markdown-like parsing (default),
|
|
|
|
or 'htm' or 'html' for HTML-like parsing. If ``None`` or any
|
|
|
|
other false-y value is provided, the message will be sent with
|
|
|
|
no formatting.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-05-17 13:00:22 +03:00
|
|
|
If a ``callable`` is passed, it should be a function accepting
|
|
|
|
a `str` as an input and return as output a tuple consisting
|
|
|
|
of ``(parsed message str, [MessageEntity instances])``.
|
|
|
|
|
|
|
|
See :tl:`MessageEntity` for allowed message entities.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
link_preview (`bool`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Should the link preview be shown?
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
file (`file`, optional):
|
2018-03-04 01:12:05 +03:00
|
|
|
Sends a message with a file attached (e.g. a photo,
|
|
|
|
video, audio or document). The ``message`` may be empty.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
force_document (`bool`, optional):
|
2018-03-04 01:12:05 +03:00
|
|
|
Whether to send the given file as a document or not.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
clear_draft (`bool`, optional):
|
2018-03-08 14:37:06 +03:00
|
|
|
Whether the existing draft should be cleared or not.
|
|
|
|
Has no effect when sending a file.
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Returns:
|
2018-03-23 23:40:24 +03:00
|
|
|
The sent :tl:`Message`.
|
2017-08-23 01:01:10 +03:00
|
|
|
"""
|
2018-03-04 01:12:05 +03:00
|
|
|
if file is not None:
|
|
|
|
return self.send_file(
|
|
|
|
entity, file, caption=message, reply_to=reply_to,
|
|
|
|
parse_mode=parse_mode, force_document=force_document
|
|
|
|
)
|
|
|
|
elif not message:
|
|
|
|
raise ValueError(
|
|
|
|
'The message cannot be empty unless a file is provided'
|
|
|
|
)
|
|
|
|
|
2017-10-01 17:54:11 +03:00
|
|
|
entity = self.get_input_entity(entity)
|
2018-02-23 14:07:57 +03:00
|
|
|
if isinstance(message, Message):
|
|
|
|
if (message.media
|
|
|
|
and not isinstance(message.media, MessageMediaWebPage)):
|
2018-04-20 10:44:33 +03:00
|
|
|
return self.send_file(entity, message.media,
|
|
|
|
caption=message.message,
|
|
|
|
entities=message.entities)
|
2018-02-23 14:07:57 +03:00
|
|
|
|
2018-04-20 10:36:34 +03:00
|
|
|
if reply_to is not None:
|
|
|
|
reply_id = self._get_message_id(reply_to)
|
|
|
|
elif utils.get_peer_id(entity) == utils.get_peer_id(message.to_id):
|
2018-02-23 14:07:57 +03:00
|
|
|
reply_id = message.reply_to_msg_id
|
|
|
|
else:
|
|
|
|
reply_id = None
|
|
|
|
request = SendMessageRequest(
|
|
|
|
peer=entity,
|
|
|
|
message=message.message or '',
|
|
|
|
silent=message.silent,
|
|
|
|
reply_to_msg_id=reply_id,
|
|
|
|
reply_markup=message.reply_markup,
|
|
|
|
entities=message.entities,
|
2018-03-08 14:37:06 +03:00
|
|
|
no_webpage=not isinstance(message.media, MessageMediaWebPage),
|
|
|
|
clear_draft=clear_draft
|
2018-02-23 14:07:57 +03:00
|
|
|
)
|
|
|
|
message = message.message
|
|
|
|
else:
|
|
|
|
message, msg_ent = self._parse_message_text(message, parse_mode)
|
|
|
|
request = SendMessageRequest(
|
|
|
|
peer=entity,
|
|
|
|
message=message,
|
|
|
|
entities=msg_ent,
|
|
|
|
no_webpage=not link_preview,
|
2018-03-08 14:37:06 +03:00
|
|
|
reply_to_msg_id=self._get_message_id(reply_to),
|
|
|
|
clear_draft=clear_draft
|
2018-02-23 14:07:57 +03:00
|
|
|
)
|
|
|
|
|
2017-07-04 17:53:07 +03:00
|
|
|
result = self(request)
|
2017-09-19 17:27:10 +03:00
|
|
|
if isinstance(result, UpdateShortSentMessage):
|
2017-09-18 12:59:55 +03:00
|
|
|
return Message(
|
|
|
|
id=result.id,
|
|
|
|
to_id=entity,
|
|
|
|
message=message,
|
|
|
|
date=result.date,
|
|
|
|
out=result.out,
|
|
|
|
media=result.media,
|
|
|
|
entities=result.entities
|
|
|
|
)
|
|
|
|
|
2018-01-02 15:30:29 +03:00
|
|
|
return self._get_response_message(request, result)
|
2016-09-06 19:54:49 +03:00
|
|
|
|
2018-03-04 02:27:21 +03:00
|
|
|
def forward_messages(self, entity, messages, from_peer=None):
|
|
|
|
"""
|
|
|
|
Forwards the given message(s) to the specified entity.
|
|
|
|
|
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`entity`):
|
2018-03-04 02:27:21 +03:00
|
|
|
To which entity the message(s) will be forwarded.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
messages (`list` | `int` | :tl:`Message`):
|
2018-03-04 02:27:21 +03:00
|
|
|
The message(s) to forward, or their integer IDs.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
from_peer (`entity`):
|
2018-03-04 02:27:21 +03:00
|
|
|
If the given messages are integer IDs and not instances
|
|
|
|
of the ``Message`` class, this *must* be specified in
|
|
|
|
order for the forward to work.
|
|
|
|
|
|
|
|
Returns:
|
2018-04-04 21:58:58 +03:00
|
|
|
The list of forwarded :tl:`Message`, or a single one if a list
|
|
|
|
wasn't provided as input.
|
2018-03-04 02:27:21 +03:00
|
|
|
"""
|
2018-04-04 21:58:58 +03:00
|
|
|
single = not utils.is_list_like(messages)
|
|
|
|
if single:
|
2018-03-04 02:27:21 +03:00
|
|
|
messages = (messages,)
|
|
|
|
|
|
|
|
if not from_peer:
|
|
|
|
try:
|
|
|
|
# On private chats (to_id = PeerUser), if the message is
|
|
|
|
# not outgoing, we actually need to use "from_id" to get
|
|
|
|
# the conversation on which the message was sent.
|
|
|
|
from_peer = next(
|
|
|
|
m.from_id if not m.out and isinstance(m.to_id, PeerUser)
|
|
|
|
else m.to_id for m in messages if isinstance(m, Message)
|
|
|
|
)
|
|
|
|
except StopIteration:
|
|
|
|
raise ValueError(
|
|
|
|
'from_chat must be given if integer IDs are used'
|
|
|
|
)
|
|
|
|
|
|
|
|
req = ForwardMessagesRequest(
|
|
|
|
from_peer=from_peer,
|
|
|
|
id=[m if isinstance(m, int) else m.id for m in messages],
|
|
|
|
to_peer=entity
|
|
|
|
)
|
|
|
|
result = self(req)
|
|
|
|
random_to_id = {}
|
|
|
|
id_to_message = {}
|
|
|
|
for update in result.updates:
|
|
|
|
if isinstance(update, UpdateMessageID):
|
|
|
|
random_to_id[update.random_id] = update.id
|
2018-03-18 19:23:10 +03:00
|
|
|
elif isinstance(update, (UpdateNewMessage, UpdateNewChannelMessage)):
|
2018-03-04 02:27:21 +03:00
|
|
|
id_to_message[update.message.id] = update.message
|
|
|
|
|
2018-04-04 21:58:58 +03:00
|
|
|
result = [id_to_message[random_to_id[rnd]] for rnd in req.random_id]
|
|
|
|
return result[0] if single else result
|
2018-03-04 02:27:21 +03:00
|
|
|
|
2018-04-18 11:27:11 +03:00
|
|
|
def edit_message(self, entity, message=None, text=None, parse_mode='md',
|
2018-02-15 13:19:34 +03:00
|
|
|
link_preview=True):
|
|
|
|
"""
|
|
|
|
Edits the given message ID (to change its contents or disable preview).
|
|
|
|
|
|
|
|
Args:
|
2018-04-18 11:27:11 +03:00
|
|
|
entity (`entity` | :tl:`Message`):
|
|
|
|
From which chat to edit the message. This can also be
|
|
|
|
the message to be edited, and the entity will be inferred
|
|
|
|
from it, so the next parameter will be assumed to be the
|
|
|
|
message text.
|
2018-02-15 13:19:34 +03:00
|
|
|
|
2018-04-18 11:27:11 +03:00
|
|
|
message (`int` | :tl:`Message` | `str`):
|
|
|
|
The ID of the message (or :tl:`Message` itself) to be edited.
|
|
|
|
If the `entity` was a :tl:`Message`, then this message will be
|
|
|
|
treated as the new text.
|
2018-02-15 13:19:34 +03:00
|
|
|
|
2018-04-18 11:27:11 +03:00
|
|
|
text (`str`, optional):
|
|
|
|
The new text of the message. Does nothing if the `entity`
|
|
|
|
was a :tl:`Message`.
|
2018-02-15 13:19:34 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
parse_mode (`str`, optional):
|
2018-02-15 13:19:34 +03:00
|
|
|
Can be 'md' or 'markdown' for markdown-like parsing (default),
|
|
|
|
or 'htm' or 'html' for HTML-like parsing. If ``None`` or any
|
|
|
|
other false-y value is provided, the message will be sent with
|
|
|
|
no formatting.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
link_preview (`bool`, optional):
|
2018-02-15 13:19:34 +03:00
|
|
|
Should the link preview be shown?
|
|
|
|
|
2018-04-18 11:27:11 +03:00
|
|
|
Examples:
|
|
|
|
|
|
|
|
>>> client = TelegramClient(...).start()
|
|
|
|
>>> message = client.send_message('username', 'hello')
|
|
|
|
>>>
|
|
|
|
>>> client.edit_message('username', message, 'hello!')
|
|
|
|
>>> # or
|
|
|
|
>>> client.edit_message('username', message.id, 'Hello')
|
|
|
|
>>> # or
|
|
|
|
>>> client.edit_message(message, 'Hello!')
|
|
|
|
|
2018-02-15 13:19:34 +03:00
|
|
|
Raises:
|
|
|
|
``MessageAuthorRequiredError`` if you're not the author of the
|
2018-05-24 11:19:48 +03:00
|
|
|
message but tried editing it anyway.
|
2018-02-15 13:19:34 +03:00
|
|
|
|
|
|
|
``MessageNotModifiedError`` if the contents of the message were
|
|
|
|
not modified at all.
|
|
|
|
|
|
|
|
Returns:
|
2018-03-23 23:40:24 +03:00
|
|
|
The edited :tl:`Message`.
|
2018-02-15 13:19:34 +03:00
|
|
|
"""
|
2018-04-18 11:27:11 +03:00
|
|
|
if isinstance(entity, Message):
|
|
|
|
text = message # Shift the parameters to the right
|
|
|
|
message = entity
|
|
|
|
entity = entity.to_id
|
|
|
|
|
|
|
|
text, msg_entities = self._parse_message_text(text, parse_mode)
|
2018-02-15 13:19:34 +03:00
|
|
|
request = EditMessageRequest(
|
|
|
|
peer=self.get_input_entity(entity),
|
2018-04-18 11:27:11 +03:00
|
|
|
id=self._get_message_id(message),
|
|
|
|
message=text,
|
2018-02-16 15:42:12 +03:00
|
|
|
no_webpage=not link_preview,
|
|
|
|
entities=msg_entities
|
2018-02-15 13:19:34 +03:00
|
|
|
)
|
|
|
|
result = self(request)
|
|
|
|
return self._get_response_message(request, result)
|
|
|
|
|
2017-10-02 19:59:29 +03:00
|
|
|
def delete_messages(self, entity, message_ids, revoke=True):
|
|
|
|
"""
|
2018-01-05 15:30:21 +03:00
|
|
|
Deletes a message from a chat, optionally "for everyone".
|
|
|
|
|
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`entity`):
|
2018-01-05 15:30:21 +03:00
|
|
|
From who the message will be deleted. This can actually
|
|
|
|
be ``None`` for normal chats, but **must** be present
|
|
|
|
for channels and megagroups.
|
2017-10-02 19:59:29 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
message_ids (`list` | `int` | :tl:`Message`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The IDs (or ID) or messages to be deleted.
|
2017-10-02 19:59:29 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
revoke (`bool`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Whether the message should be deleted for everyone or not.
|
|
|
|
By default it has the opposite behaviour of official clients,
|
|
|
|
and it will delete the message for everyone.
|
|
|
|
This has no effect on channels or megagroups.
|
2017-10-02 19:59:29 +03:00
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Returns:
|
2018-03-23 23:40:24 +03:00
|
|
|
The :tl:`AffectedMessages`.
|
2017-10-02 19:59:29 +03:00
|
|
|
"""
|
2018-03-11 11:43:55 +03:00
|
|
|
if not utils.is_list_like(message_ids):
|
|
|
|
message_ids = (message_ids,)
|
2017-10-02 19:59:29 +03:00
|
|
|
|
2018-03-11 11:43:55 +03:00
|
|
|
message_ids = [
|
|
|
|
m.id if isinstance(m, (Message, MessageService, MessageEmpty))
|
|
|
|
else int(m) for m in message_ids
|
|
|
|
]
|
2017-10-02 19:59:29 +03:00
|
|
|
|
|
|
|
if entity is None:
|
|
|
|
return self(messages.DeleteMessagesRequest(message_ids, revoke=revoke))
|
|
|
|
|
|
|
|
entity = self.get_input_entity(entity)
|
|
|
|
|
|
|
|
if isinstance(entity, InputPeerChannel):
|
|
|
|
return self(channels.DeleteMessagesRequest(entity, message_ids))
|
|
|
|
else:
|
|
|
|
return self(messages.DeleteMessagesRequest(message_ids, revoke=revoke))
|
|
|
|
|
2018-05-24 11:40:44 +03:00
|
|
|
def iter_messages(self, entity, limit=None, offset_date=None,
|
2018-03-08 13:44:13 +03:00
|
|
|
offset_id=0, max_id=0, min_id=0, add_offset=0,
|
2018-04-22 17:30:14 +03:00
|
|
|
search=None, filter=None, from_user=None,
|
2018-05-28 20:33:23 +03:00
|
|
|
batch_size=100, wait_time=None, ids=None,
|
|
|
|
_total=None):
|
2016-09-08 13:13:31 +03:00
|
|
|
"""
|
2018-03-08 13:44:13 +03:00
|
|
|
Iterator over the message history for the specified entity.
|
2016-09-08 13:13:31 +03:00
|
|
|
|
2018-04-22 17:30:14 +03:00
|
|
|
If either `search`, `filter` or `from_user` are provided,
|
|
|
|
:tl:`messages.Search` will be used instead of :tl:`messages.getHistory`.
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`entity`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The entity from whom to retrieve the message history.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
limit (`int` | `None`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Number of messages to be retrieved. Due to limitations with
|
|
|
|
the API retrieving more than 3000 messages will take longer
|
|
|
|
than half a minute (or even more based on previous calls).
|
|
|
|
The limit may also be ``None``, which would eventually return
|
|
|
|
the whole history.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
offset_date (`datetime`):
|
2018-01-05 15:30:21 +03:00
|
|
|
Offset date (messages *previous* to this date will be
|
|
|
|
retrieved). Exclusive.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
offset_id (`int`):
|
2018-01-05 15:30:21 +03:00
|
|
|
Offset message ID (only messages *previous* to the given
|
|
|
|
ID will be retrieved). Exclusive.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
max_id (`int`):
|
2018-01-05 15:30:21 +03:00
|
|
|
All the messages with a higher (newer) ID or equal to this will
|
2018-05-24 11:19:48 +03:00
|
|
|
be excluded.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
min_id (`int`):
|
2018-01-05 15:30:21 +03:00
|
|
|
All the messages with a lower (older) ID or equal to this will
|
|
|
|
be excluded.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
add_offset (`int`):
|
2018-01-05 15:30:21 +03:00
|
|
|
Additional message offset (all of the specified offsets +
|
|
|
|
this offset = older messages).
|
|
|
|
|
2018-04-22 17:30:14 +03:00
|
|
|
search (`str`):
|
|
|
|
The string to be used as a search query.
|
|
|
|
|
|
|
|
filter (:tl:`MessagesFilter` | `type`):
|
|
|
|
The filter to use when returning messages. For instance,
|
|
|
|
:tl:`InputMessagesFilterPhotos` would yield only messages
|
|
|
|
containing photos.
|
|
|
|
|
|
|
|
from_user (`entity`):
|
|
|
|
Only messages from this user will be returned.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
batch_size (`int`):
|
2018-01-27 23:37:57 +03:00
|
|
|
Messages will be returned in chunks of this size (100 is
|
|
|
|
the maximum). While it makes no sense to modify this value,
|
|
|
|
you are still free to do so.
|
2018-01-27 23:29:38 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
wait_time (`int`):
|
2018-03-23 23:40:24 +03:00
|
|
|
Wait time between different :tl:`GetHistoryRequest`. Use this
|
2018-01-27 23:37:57 +03:00
|
|
|
parameter to avoid hitting the ``FloodWaitError`` as needed.
|
|
|
|
If left to ``None``, it will default to 1 second only if
|
|
|
|
the limit is higher than 3000.
|
2018-01-27 23:29:38 +03:00
|
|
|
|
2018-05-28 20:33:23 +03:00
|
|
|
ids (`int`, `list`):
|
|
|
|
A single integer ID (or several IDs) for the message that
|
|
|
|
should be returned. This parameter takes precedence over
|
|
|
|
the rest (which will be ignored if this is set). This can
|
|
|
|
for instance be used to get the message with ID 123 from
|
|
|
|
a channel. Note that if the message doesn't exist, ``None``
|
|
|
|
will appear in its place, so that zipping the list of IDs
|
|
|
|
with the messages can match one-to-one.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
_total (`list`, optional):
|
2018-03-22 21:13:42 +03:00
|
|
|
A single-item list to pass the total parameter by reference.
|
2018-03-08 13:44:13 +03:00
|
|
|
|
|
|
|
Yields:
|
2018-03-23 23:40:24 +03:00
|
|
|
Instances of :tl:`Message` with extra attributes:
|
2018-01-05 15:30:21 +03:00
|
|
|
|
|
|
|
* ``.sender`` = entity of the sender.
|
|
|
|
* ``.fwd_from.sender`` = if fwd_from, who sent it originally.
|
|
|
|
* ``.fwd_from.channel`` = if fwd_from, original channel.
|
|
|
|
* ``.to`` = entity to which the message was sent.
|
2018-01-27 23:29:38 +03:00
|
|
|
|
|
|
|
Notes:
|
2018-03-23 23:40:24 +03:00
|
|
|
Telegram's flood wait limit for :tl:`GetHistoryRequest` seems to
|
2018-01-27 23:37:57 +03:00
|
|
|
be around 30 seconds per 3000 messages, therefore a sleep of 1
|
|
|
|
second is the default for this limit (or above). You may need
|
|
|
|
an higher limit, so you're free to set the ``batch_size`` that
|
|
|
|
you think may be good.
|
2016-09-08 13:13:31 +03:00
|
|
|
"""
|
2018-05-28 20:33:23 +03:00
|
|
|
entity = self.get_input_entity(entity)
|
|
|
|
if ids:
|
|
|
|
if not utils.is_list_like(ids):
|
|
|
|
ids = (ids,)
|
|
|
|
yield from self._iter_ids(entity, ids, total=_total)
|
|
|
|
return
|
|
|
|
|
2018-05-24 11:19:48 +03:00
|
|
|
# Telegram doesn't like min_id/max_id. If these IDs are low enough
|
|
|
|
# (starting from last_id - 100), the request will return nothing.
|
|
|
|
#
|
|
|
|
# We can emulate their behaviour locally by setting offset = max_id
|
|
|
|
# and simply stopping once we hit a message with ID <= min_id.
|
|
|
|
offset_id = max(offset_id, max_id)
|
2018-05-24 11:40:44 +03:00
|
|
|
if offset_id and min_id:
|
|
|
|
if offset_id - min_id <= 1:
|
|
|
|
return
|
2018-05-24 11:19:48 +03:00
|
|
|
|
2017-10-31 14:48:55 +03:00
|
|
|
limit = float('inf') if limit is None else int(limit)
|
2018-04-22 17:30:14 +03:00
|
|
|
if search is not None or filter or from_user:
|
2018-05-17 11:17:22 +03:00
|
|
|
if filter is None:
|
|
|
|
filter = InputMessagesFilterEmpty()
|
2018-04-22 17:30:14 +03:00
|
|
|
request = SearchRequest(
|
|
|
|
peer=entity,
|
|
|
|
q=search or '',
|
|
|
|
filter=filter() if isinstance(filter, type) else filter,
|
|
|
|
min_date=None,
|
|
|
|
max_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
|
|
|
add_offset=add_offset,
|
|
|
|
limit=1,
|
2018-05-24 11:19:48 +03:00
|
|
|
max_id=0,
|
|
|
|
min_id=0,
|
2018-04-28 12:49:43 +03:00
|
|
|
hash=0,
|
2018-04-22 17:30:14 +03:00
|
|
|
from_id=self.get_input_entity(from_user) if from_user else None
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
request = GetHistoryRequest(
|
|
|
|
peer=entity,
|
|
|
|
limit=1,
|
|
|
|
offset_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
2018-05-24 11:19:48 +03:00
|
|
|
min_id=0,
|
|
|
|
max_id=0,
|
2018-04-22 17:30:14 +03:00
|
|
|
add_offset=add_offset,
|
|
|
|
hash=0
|
|
|
|
)
|
|
|
|
|
2017-10-31 15:52:43 +03:00
|
|
|
if limit == 0:
|
2018-03-22 21:13:42 +03:00
|
|
|
if not _total:
|
2018-03-08 13:44:13 +03:00
|
|
|
return
|
2017-10-31 15:52:43 +03:00
|
|
|
# No messages, but we still need to know the total message count
|
2018-04-22 17:30:14 +03:00
|
|
|
result = self(request)
|
2018-03-22 21:13:42 +03:00
|
|
|
_total[0] = getattr(result, 'count', len(result.messages))
|
2018-03-08 13:44:13 +03:00
|
|
|
return
|
2017-10-31 15:52:43 +03:00
|
|
|
|
2018-01-27 23:37:57 +03:00
|
|
|
if wait_time is None:
|
|
|
|
wait_time = 1 if limit > 3000 else 0
|
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
have = 0
|
2018-05-17 13:08:52 +03:00
|
|
|
last_id = float('inf')
|
2018-01-27 23:37:57 +03:00
|
|
|
batch_size = min(max(batch_size, 1), 100)
|
2018-03-08 13:44:13 +03:00
|
|
|
while have < limit:
|
2018-04-25 10:51:50 +03:00
|
|
|
start = time.time()
|
2017-10-31 14:48:55 +03:00
|
|
|
# Telegram has a hard limit of 100
|
2018-04-22 17:30:14 +03:00
|
|
|
request.limit = min(limit - have, batch_size)
|
|
|
|
r = self(request)
|
2018-03-22 21:13:42 +03:00
|
|
|
if _total:
|
|
|
|
_total[0] = getattr(r, 'count', len(r.messages))
|
2016-09-08 13:13:31 +03:00
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
entities = {utils.get_peer_id(x): x
|
|
|
|
for x in itertools.chain(r.users, r.chats)}
|
2016-09-11 11:35:02 +03:00
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
for message in r.messages:
|
2018-05-24 11:19:48 +03:00
|
|
|
if message.id <= min_id:
|
|
|
|
return
|
|
|
|
|
2018-05-17 13:08:52 +03:00
|
|
|
if isinstance(message, MessageEmpty) or message.id >= last_id:
|
2018-03-08 13:44:13 +03:00
|
|
|
continue
|
|
|
|
|
2018-05-17 13:08:52 +03:00
|
|
|
# There has been reports that on bad connections this method
|
|
|
|
# was returning duplicated IDs sometimes. Using ``last_id``
|
|
|
|
# is an attempt to avoid these duplicates, since the message
|
|
|
|
# IDs are returned in descending order.
|
|
|
|
last_id = message.id
|
|
|
|
|
2018-05-28 20:33:23 +03:00
|
|
|
self._make_message_friendly(message, entities)
|
2018-03-08 13:44:13 +03:00
|
|
|
yield message
|
|
|
|
have += 1
|
|
|
|
|
2018-04-22 17:30:14 +03:00
|
|
|
if len(r.messages) < request.limit:
|
2017-10-31 14:48:55 +03:00
|
|
|
break
|
2017-10-01 17:57:07 +03:00
|
|
|
|
2018-04-22 17:30:14 +03:00
|
|
|
request.offset_id = r.messages[-1].id
|
|
|
|
if isinstance(request, GetHistoryRequest):
|
|
|
|
request.offset_date = r.messages[-1].date
|
|
|
|
else:
|
|
|
|
request.max_date = r.messages[-1].date
|
|
|
|
|
2018-04-25 10:51:50 +03:00
|
|
|
time.sleep(max(wait_time - (time.time() - start), 0))
|
2017-10-31 14:48:55 +03:00
|
|
|
|
2018-05-28 20:33:23 +03:00
|
|
|
@staticmethod
|
|
|
|
def _make_message_friendly(message, entities):
|
|
|
|
"""
|
|
|
|
Add a few extra attributes to the :tl:`Message` to be friendlier.
|
|
|
|
|
|
|
|
To make messages more friendly, always add message
|
|
|
|
to service messages, and action to normal messages.
|
|
|
|
"""
|
|
|
|
# TODO Create an actual friendlier class
|
|
|
|
message.message = getattr(message, 'message', None)
|
|
|
|
message.action = getattr(message, 'action', None)
|
|
|
|
message.to = entities[utils.get_peer_id(message.to_id)]
|
|
|
|
message.sender = (
|
|
|
|
None if not message.from_id else
|
|
|
|
entities[utils.get_peer_id(message.from_id)]
|
|
|
|
)
|
|
|
|
if getattr(message, 'fwd_from', None):
|
|
|
|
message.fwd_from.sender = (
|
|
|
|
None if not message.fwd_from.from_id else
|
|
|
|
entities[utils.get_peer_id(message.fwd_from.from_id)]
|
|
|
|
)
|
|
|
|
message.fwd_from.channel = (
|
|
|
|
None if not message.fwd_from.channel_id else
|
|
|
|
entities[utils.get_peer_id(
|
|
|
|
PeerChannel(message.fwd_from.channel_id)
|
|
|
|
)]
|
|
|
|
)
|
|
|
|
|
|
|
|
def _iter_ids(self, entity, ids, total):
|
|
|
|
"""
|
|
|
|
Special case for `iter_messages` when it should only fetch some IDs.
|
|
|
|
"""
|
|
|
|
if total:
|
|
|
|
total[0] = len(ids)
|
|
|
|
|
|
|
|
if isinstance(entity, InputPeerChannel):
|
|
|
|
r = self(channels.GetMessagesRequest(entity, ids))
|
|
|
|
else:
|
|
|
|
r = self(messages.GetMessagesRequest(ids))
|
|
|
|
|
|
|
|
entities = {utils.get_peer_id(x): x
|
|
|
|
for x in itertools.chain(r.users, r.chats)}
|
|
|
|
|
|
|
|
# Telegram seems to return the messages in the order in which
|
|
|
|
# we asked them for, so we don't need to check it ourselves.
|
|
|
|
for message in r.messages:
|
|
|
|
if isinstance(message, MessageEmpty):
|
|
|
|
yield None
|
|
|
|
else:
|
|
|
|
self._make_message_friendly(message, entities)
|
|
|
|
yield message
|
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
def get_messages(self, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Same as :meth:`iter_messages`, but returns a list instead
|
2018-03-22 20:39:42 +03:00
|
|
|
with an additional ``.total`` attribute on the list.
|
2018-05-24 11:40:44 +03:00
|
|
|
|
|
|
|
If the `limit` is not set, it will be 1 by default unless both
|
|
|
|
`min_id` **and** `max_id` are set (as *named* arguments), in
|
|
|
|
which case the entire range will be returned.
|
|
|
|
|
|
|
|
This is so because any integer limit would be rather arbitrary and
|
|
|
|
it's common to only want to fetch one message, but if a range is
|
|
|
|
specified it makes sense that it should return the entirety of it.
|
2018-05-28 20:33:23 +03:00
|
|
|
|
|
|
|
If `ids` is present in the *named* arguments and is not a list,
|
|
|
|
a single :tl:`Message` will be returned for convenience instead
|
|
|
|
of a list.
|
2018-03-08 13:44:13 +03:00
|
|
|
"""
|
2018-03-22 21:13:42 +03:00
|
|
|
total = [0]
|
|
|
|
kwargs['_total'] = total
|
2018-05-24 11:40:44 +03:00
|
|
|
if len(args) == 1 and 'limit' not in kwargs:
|
|
|
|
if 'min_id' in kwargs and 'max_id' in kwargs:
|
|
|
|
kwargs['limit'] = None
|
|
|
|
else:
|
|
|
|
kwargs['limit'] = 1
|
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
msgs = UserList(self.iter_messages(*args, **kwargs))
|
2018-03-22 21:13:42 +03:00
|
|
|
msgs.total = total[0]
|
2018-05-28 20:33:23 +03:00
|
|
|
if 'ids' in kwargs and not utils.is_list_like(kwargs['ids']):
|
|
|
|
return msgs[0]
|
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
return msgs
|
|
|
|
|
|
|
|
def get_message_history(self, *args, **kwargs):
|
2018-05-07 20:53:32 +03:00
|
|
|
"""Deprecated, see :meth:`get_messages`."""
|
2018-03-08 13:44:13 +03:00
|
|
|
warnings.warn(
|
|
|
|
'get_message_history is deprecated, use get_messages instead'
|
|
|
|
)
|
|
|
|
return self.get_messages(*args, **kwargs)
|
2016-09-12 15:07:45 +03:00
|
|
|
|
2018-01-10 14:50:49 +03:00
|
|
|
def send_read_acknowledge(self, entity, message=None, max_id=None,
|
|
|
|
clear_mentions=False):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Sends a "read acknowledge" (i.e., notifying the given peer that we've
|
|
|
|
read their messages, also known as the "double check").
|
|
|
|
|
2018-04-14 13:03:08 +03:00
|
|
|
This effectively marks a message as read (or more than one) in the
|
|
|
|
given conversation.
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`entity`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The chat where these messages are located.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
message (`list` | :tl:`Message`):
|
2018-01-05 15:30:21 +03:00
|
|
|
Either a list of messages or a single message.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
max_id (`int`):
|
2018-01-05 15:30:21 +03:00
|
|
|
Overrides messages, until which message should the
|
|
|
|
acknowledge should be sent.
|
2018-01-10 14:50:49 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
clear_mentions (`bool`):
|
2018-01-10 14:50:49 +03:00
|
|
|
Whether the mention badge should be cleared (so that
|
|
|
|
there are no more mentions) or not for the given entity.
|
|
|
|
|
|
|
|
If no message is provided, this will be the only action
|
|
|
|
taken.
|
2017-08-23 01:01:10 +03:00
|
|
|
"""
|
2016-10-02 14:57:03 +03:00
|
|
|
if max_id is None:
|
2018-01-10 14:50:49 +03:00
|
|
|
if message:
|
2018-02-26 16:12:21 +03:00
|
|
|
if utils.is_list_like(message):
|
2018-01-10 14:50:49 +03:00
|
|
|
max_id = max(msg.id for msg in message)
|
|
|
|
else:
|
|
|
|
max_id = message.id
|
|
|
|
elif not clear_mentions:
|
2017-12-28 02:22:28 +03:00
|
|
|
raise ValueError(
|
2016-11-30 00:29:42 +03:00
|
|
|
'Either a message list or a max_id must be provided.')
|
2016-10-02 14:57:03 +03:00
|
|
|
|
2018-01-10 14:50:49 +03:00
|
|
|
entity = self.get_input_entity(entity)
|
|
|
|
if clear_mentions:
|
|
|
|
self(ReadMentionsRequest(entity))
|
|
|
|
if max_id is None:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if max_id is not None:
|
|
|
|
if isinstance(entity, InputPeerChannel):
|
|
|
|
return self(channels.ReadHistoryRequest(entity, max_id=max_id))
|
2016-10-02 14:57:03 +03:00
|
|
|
else:
|
2018-01-10 14:50:49 +03:00
|
|
|
return self(messages.ReadHistoryRequest(entity, max_id=max_id))
|
2016-10-02 14:57:03 +03:00
|
|
|
|
2018-01-10 14:50:49 +03:00
|
|
|
return False
|
2016-10-02 14:57:03 +03:00
|
|
|
|
2017-09-13 12:51:23 +03:00
|
|
|
@staticmethod
|
2018-02-15 13:19:34 +03:00
|
|
|
def _get_message_id(message):
|
2017-09-13 12:51:23 +03:00
|
|
|
"""Sanitizes the 'reply_to' parameter a user may send"""
|
2018-02-15 13:19:34 +03:00
|
|
|
if message is None:
|
2017-09-13 12:51:23 +03:00
|
|
|
return None
|
|
|
|
|
2018-02-15 13:19:34 +03:00
|
|
|
if isinstance(message, int):
|
|
|
|
return message
|
2017-09-13 12:51:23 +03:00
|
|
|
|
2018-01-19 15:00:17 +03:00
|
|
|
try:
|
2018-02-15 13:19:34 +03:00
|
|
|
if message.SUBCLASS_OF_ID == 0x790009e3:
|
2018-01-19 15:00:17 +03:00
|
|
|
# hex(crc32(b'Message')) = 0x790009e3
|
2018-02-15 13:19:34 +03:00
|
|
|
return message.id
|
2018-01-19 15:00:17 +03:00
|
|
|
except AttributeError:
|
|
|
|
pass
|
2017-09-13 12:51:23 +03:00
|
|
|
|
2018-02-15 13:19:34 +03:00
|
|
|
raise TypeError('Invalid message type: {}'.format(type(message)))
|
2017-09-13 12:51:23 +03:00
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
def iter_participants(self, entity, limit=None, search='',
|
2018-03-22 21:13:42 +03:00
|
|
|
filter=None, aggressive=False, _total=None):
|
2018-02-23 23:20:32 +03:00
|
|
|
"""
|
2018-03-12 12:27:49 +03:00
|
|
|
Iterator over the participants belonging to the specified chat.
|
2018-02-23 23:20:32 +03:00
|
|
|
|
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`entity`):
|
2018-02-23 23:20:32 +03:00
|
|
|
The entity from which to retrieve the participants list.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
limit (`int`):
|
2018-02-23 23:20:32 +03:00
|
|
|
Limits amount of participants fetched.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
search (`str`, optional):
|
2018-02-23 23:20:32 +03:00
|
|
|
Look for participants with this string in name/username.
|
|
|
|
|
2018-03-23 23:40:24 +03:00
|
|
|
filter (:tl:`ChannelParticipantsFilter`, optional):
|
|
|
|
The filter to be used, if you want e.g. only admins
|
2018-03-13 15:15:02 +03:00
|
|
|
Note that you might not have permissions for some filter.
|
|
|
|
This has no effect for normal chats or users.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
aggressive (`bool`, optional):
|
2018-02-28 15:24:44 +03:00
|
|
|
Aggressively looks for all participants in the chat in
|
|
|
|
order to get more than 10,000 members (a hard limit
|
2018-02-28 19:10:44 +03:00
|
|
|
imposed by Telegram). Note that this might take a long
|
|
|
|
time (over 5 minutes), but is able to return over 90,000
|
|
|
|
participants on groups with 100,000 members.
|
2018-02-28 15:24:44 +03:00
|
|
|
|
|
|
|
This has no effect for groups or channels with less than
|
2018-03-13 15:15:02 +03:00
|
|
|
10,000 members, or if a ``filter`` is given.
|
2018-02-28 15:24:44 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
_total (`list`, optional):
|
2018-03-22 21:13:42 +03:00
|
|
|
A single-item list to pass the total parameter by reference.
|
2018-03-08 13:44:13 +03:00
|
|
|
|
2018-03-12 12:27:49 +03:00
|
|
|
Yields:
|
2018-03-23 23:40:24 +03:00
|
|
|
The :tl:`User` objects returned by :tl:`GetParticipantsRequest`
|
2018-03-12 12:27:49 +03:00
|
|
|
with an additional ``.participant`` attribute which is the
|
2018-03-23 23:40:24 +03:00
|
|
|
matched :tl:`ChannelParticipant` type for channels/megagroups
|
|
|
|
or :tl:`ChatParticipants` for normal chats.
|
2018-02-23 23:20:32 +03:00
|
|
|
"""
|
2018-03-13 15:15:02 +03:00
|
|
|
if isinstance(filter, type):
|
2018-04-07 13:31:30 +03:00
|
|
|
if filter in (ChannelParticipantsBanned, ChannelParticipantsKicked,
|
|
|
|
ChannelParticipantsSearch):
|
|
|
|
# These require a `q` parameter (support types for convenience)
|
|
|
|
filter = filter('')
|
|
|
|
else:
|
|
|
|
filter = filter()
|
2018-03-13 15:15:02 +03:00
|
|
|
|
2018-02-23 23:20:32 +03:00
|
|
|
entity = self.get_input_entity(entity)
|
2018-03-13 15:15:02 +03:00
|
|
|
if search and (filter or not isinstance(entity, InputPeerChannel)):
|
|
|
|
# We need to 'search' ourselves unless we have a PeerChannel
|
|
|
|
search = search.lower()
|
|
|
|
|
|
|
|
def filter_entity(ent):
|
|
|
|
return search in utils.get_display_name(ent).lower() or\
|
|
|
|
search in (getattr(ent, 'username', '') or None).lower()
|
|
|
|
else:
|
|
|
|
def filter_entity(ent):
|
|
|
|
return True
|
|
|
|
|
2018-02-23 23:20:32 +03:00
|
|
|
limit = float('inf') if limit is None else int(limit)
|
|
|
|
if isinstance(entity, InputPeerChannel):
|
2018-04-14 13:08:50 +03:00
|
|
|
if _total or (aggressive and not filter):
|
|
|
|
total = self(GetFullChannelRequest(
|
|
|
|
entity
|
|
|
|
)).full_chat.participants_count
|
|
|
|
if _total:
|
|
|
|
_total[0] = total
|
|
|
|
else:
|
|
|
|
total = 0
|
2018-03-08 13:44:13 +03:00
|
|
|
|
2018-03-06 14:24:37 +03:00
|
|
|
if limit == 0:
|
2018-03-08 13:44:13 +03:00
|
|
|
return
|
2018-02-28 15:24:44 +03:00
|
|
|
|
2018-03-08 13:44:13 +03:00
|
|
|
seen = set()
|
2018-03-13 15:15:02 +03:00
|
|
|
if total > 10000 and aggressive and not filter:
|
2018-02-28 19:10:44 +03:00
|
|
|
requests = [GetParticipantsRequest(
|
|
|
|
channel=entity,
|
|
|
|
filter=ChannelParticipantsSearch(search + chr(x)),
|
|
|
|
offset=0,
|
|
|
|
limit=200,
|
|
|
|
hash=0
|
|
|
|
) for x in range(ord('a'), ord('z') + 1)]
|
2018-02-28 15:24:44 +03:00
|
|
|
else:
|
2018-02-28 19:10:44 +03:00
|
|
|
requests = [GetParticipantsRequest(
|
2018-02-28 15:24:44 +03:00
|
|
|
channel=entity,
|
2018-03-13 15:15:02 +03:00
|
|
|
filter=filter or ChannelParticipantsSearch(search),
|
2018-02-28 15:24:44 +03:00
|
|
|
offset=0,
|
2018-02-28 19:10:44 +03:00
|
|
|
limit=200,
|
2018-02-28 15:24:44 +03:00
|
|
|
hash=0
|
2018-02-28 19:10:44 +03:00
|
|
|
)]
|
|
|
|
|
|
|
|
while requests:
|
|
|
|
# Only care about the limit for the first request
|
|
|
|
# (small amount of people, won't be aggressive).
|
|
|
|
#
|
|
|
|
# Most people won't care about getting exactly 12,345
|
|
|
|
# members so it doesn't really matter not to be 100%
|
|
|
|
# precise with being out of the offset/limit here.
|
|
|
|
requests[0].limit = min(limit - requests[0].offset, 200)
|
|
|
|
if requests[0].offset > limit:
|
|
|
|
break
|
|
|
|
|
2018-05-09 11:19:45 +03:00
|
|
|
results = self(requests)
|
2018-02-28 19:10:44 +03:00
|
|
|
for i in reversed(range(len(requests))):
|
|
|
|
participants = results[i]
|
2018-02-28 15:24:44 +03:00
|
|
|
if not participants.users:
|
2018-02-28 19:10:44 +03:00
|
|
|
requests.pop(i)
|
|
|
|
else:
|
2018-03-12 11:52:16 +03:00
|
|
|
requests[i].offset += len(participants.participants)
|
2018-03-12 12:27:49 +03:00
|
|
|
users = {user.id: user for user in participants.users}
|
|
|
|
for participant in participants.participants:
|
2018-03-13 15:15:02 +03:00
|
|
|
user = users[participant.user_id]
|
|
|
|
if not filter_entity(user) or user.id in seen:
|
|
|
|
continue
|
|
|
|
|
|
|
|
seen.add(participant.user_id)
|
|
|
|
user = users[participant.user_id]
|
|
|
|
user.participant = participant
|
|
|
|
yield user
|
|
|
|
if len(seen) >= limit:
|
|
|
|
return
|
2018-02-23 23:20:32 +03:00
|
|
|
|
|
|
|
elif isinstance(entity, InputPeerChat):
|
2018-03-13 15:15:02 +03:00
|
|
|
# TODO We *could* apply the `filter` here ourselves
|
2018-03-12 12:27:49 +03:00
|
|
|
full = self(GetFullChatRequest(entity.chat_id))
|
2018-03-27 18:15:22 +03:00
|
|
|
if not isinstance(full.full_chat.participants, ChatParticipants):
|
|
|
|
# ChatParticipantsForbidden won't have ``.participants``
|
|
|
|
_total[0] = 0
|
|
|
|
return
|
|
|
|
|
2018-03-22 21:13:42 +03:00
|
|
|
if _total:
|
|
|
|
_total[0] = len(full.full_chat.participants.participants)
|
2018-03-08 13:44:13 +03:00
|
|
|
|
|
|
|
have = 0
|
2018-03-12 12:27:49 +03:00
|
|
|
users = {user.id: user for user in full.users}
|
|
|
|
for participant in full.full_chat.participants.participants:
|
2018-03-13 15:15:02 +03:00
|
|
|
user = users[participant.user_id]
|
|
|
|
if not filter_entity(user):
|
|
|
|
continue
|
2018-03-08 13:44:13 +03:00
|
|
|
have += 1
|
|
|
|
if have > limit:
|
|
|
|
break
|
|
|
|
else:
|
2018-03-12 12:27:49 +03:00
|
|
|
user = users[participant.user_id]
|
|
|
|
user.participant = participant
|
2018-03-08 13:44:13 +03:00
|
|
|
yield user
|
2018-02-23 23:20:32 +03:00
|
|
|
else:
|
2018-03-22 21:13:42 +03:00
|
|
|
if _total:
|
|
|
|
_total[0] = 1
|
2018-03-08 13:44:13 +03:00
|
|
|
if limit != 0:
|
2018-03-12 12:27:49 +03:00
|
|
|
user = self.get_entity(entity)
|
2018-03-13 15:15:02 +03:00
|
|
|
if filter_entity(user):
|
|
|
|
user.participant = None
|
|
|
|
yield user
|
2018-03-08 13:44:13 +03:00
|
|
|
|
|
|
|
def get_participants(self, *args, **kwargs):
|
|
|
|
"""
|
|
|
|
Same as :meth:`iter_participants`, but returns a list instead
|
2018-03-22 20:39:42 +03:00
|
|
|
with an additional ``.total`` attribute on the list.
|
2018-03-08 13:44:13 +03:00
|
|
|
"""
|
2018-03-22 21:13:42 +03:00
|
|
|
total = [0]
|
|
|
|
kwargs['_total'] = total
|
2018-03-12 12:27:49 +03:00
|
|
|
participants = UserList(self.iter_participants(*args, **kwargs))
|
2018-03-22 21:13:42 +03:00
|
|
|
participants.total = total[0]
|
2018-03-12 12:27:49 +03:00
|
|
|
return participants
|
2018-02-23 23:20:32 +03:00
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# endregion
|
|
|
|
|
2017-06-15 16:50:44 +03:00
|
|
|
# region Uploading files
|
|
|
|
|
2018-03-07 11:09:05 +03:00
|
|
|
def send_file(self, entity, file, caption='',
|
2017-09-13 12:30:20 +03:00
|
|
|
force_document=False, progress_callback=None,
|
2017-09-13 12:51:23 +03:00
|
|
|
reply_to=None,
|
2017-10-09 12:20:09 +03:00
|
|
|
attributes=None,
|
2018-01-03 14:47:38 +03:00
|
|
|
thumb=None,
|
2018-01-15 14:36:46 +03:00
|
|
|
allow_cache=True,
|
2018-03-02 23:28:33 +03:00
|
|
|
parse_mode='md',
|
2018-05-07 20:53:32 +03:00
|
|
|
voice_note=False,
|
|
|
|
video_note=False,
|
2017-09-13 12:30:20 +03:00
|
|
|
**kwargs):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Sends a file to the specified entity.
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`entity`):
|
2018-01-05 15:30:21 +03:00
|
|
|
Who will receive the file.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
file (`str` | `bytes` | `file` | `media`):
|
2018-01-05 15:30:21 +03:00
|
|
|
The path of the file, byte array, or stream that will be sent.
|
|
|
|
Note that if a byte array or a stream is given, a filename
|
|
|
|
or its type won't be inferred, and it will be sent as an
|
|
|
|
"unnamed application/octet-stream".
|
|
|
|
|
2018-01-23 14:13:03 +03:00
|
|
|
Furthermore the file may be any media (a message, document,
|
|
|
|
photo or similar) so that it can be resent without the need
|
|
|
|
to download and re-upload it again.
|
|
|
|
|
2018-03-15 12:17:45 +03:00
|
|
|
If a list or similar is provided, the files in it will be
|
|
|
|
sent as an album in the order in which they appear, sliced
|
|
|
|
in chunks of 10 if more than 10 are given.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
caption (`str`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Optional caption for the sent media message.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
force_document (`bool`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
If left to ``False`` and the file is a path that ends with
|
2018-02-12 12:33:51 +03:00
|
|
|
the extension of an image file or a video file, it will be
|
|
|
|
sent as such. Otherwise always as a document.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
progress_callback (`callable`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
A callback function accepting two parameters:
|
|
|
|
``(sent bytes, total)``.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
reply_to (`int` | :tl:`Message`):
|
2018-05-17 13:00:22 +03:00
|
|
|
Same as `reply_to` from `send_message`.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
attributes (`list`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Optional attributes that override the inferred ones, like
|
2018-03-23 23:40:24 +03:00
|
|
|
:tl:`DocumentAttributeFilename` and so on.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
thumb (`str` | `bytes` | `file`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Optional thumbnail (for videos).
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
allow_cache (`bool`, optional):
|
2018-01-15 14:36:46 +03:00
|
|
|
Whether to allow using the cached version stored in the
|
2018-01-18 21:36:47 +03:00
|
|
|
database or not. Defaults to ``True`` to avoid re-uploads.
|
|
|
|
Must be ``False`` if you wish to use different attributes
|
|
|
|
or thumb than those that were used when the file was cached.
|
2018-01-15 14:36:46 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
parse_mode (`str`, optional):
|
2018-03-02 23:28:33 +03:00
|
|
|
The parse mode for the caption message.
|
|
|
|
|
2018-05-07 20:53:32 +03:00
|
|
|
voice_note (`bool`, optional):
|
|
|
|
If ``True`` the audio will be sent as a voice note.
|
|
|
|
|
|
|
|
Set `allow_cache` to ``False`` if you sent the same file
|
|
|
|
without this setting before for it to work.
|
|
|
|
|
|
|
|
video_note (`bool`, optional):
|
|
|
|
If ``True`` the video will be sent as a video note,
|
|
|
|
also known as a round video message.
|
|
|
|
|
|
|
|
Set `allow_cache` to ``False`` if you sent the same file
|
|
|
|
without this setting before for it to work.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-02-17 15:00:58 +03:00
|
|
|
Notes:
|
|
|
|
If the ``hachoir3`` package (``hachoir`` module) is installed,
|
|
|
|
it will be used to determine metadata from audio and video files.
|
|
|
|
|
2018-01-15 20:15:30 +03:00
|
|
|
Returns:
|
2018-04-04 21:58:58 +03:00
|
|
|
The :tl:`Message` (or messages) containing the sent file,
|
|
|
|
or messages if a list of them was passed.
|
2017-08-23 01:01:10 +03:00
|
|
|
"""
|
2018-01-15 20:15:30 +03:00
|
|
|
# First check if the user passed an iterable, in which case
|
|
|
|
# we may want to send as an album if all are photo files.
|
2018-02-26 16:12:21 +03:00
|
|
|
if utils.is_list_like(file):
|
2018-03-15 12:17:45 +03:00
|
|
|
# TODO Fix progress_callback
|
|
|
|
images = []
|
2018-03-21 11:46:57 +03:00
|
|
|
if force_document:
|
|
|
|
documents = file
|
|
|
|
else:
|
|
|
|
documents = []
|
|
|
|
for x in file:
|
|
|
|
if utils.is_image(x):
|
|
|
|
images.append(x)
|
|
|
|
else:
|
|
|
|
documents.append(x)
|
2018-03-15 12:17:45 +03:00
|
|
|
|
|
|
|
result = []
|
|
|
|
while images:
|
|
|
|
result += self._send_album(
|
|
|
|
entity, images[:10], caption=caption,
|
2018-03-02 23:32:38 +03:00
|
|
|
progress_callback=progress_callback, reply_to=reply_to,
|
|
|
|
parse_mode=parse_mode
|
2018-01-15 20:15:30 +03:00
|
|
|
)
|
2018-03-15 12:17:45 +03:00
|
|
|
images = images[10:]
|
|
|
|
|
|
|
|
result.extend(
|
2018-01-15 20:15:30 +03:00
|
|
|
self.send_file(
|
2018-03-21 11:46:57 +03:00
|
|
|
entity, x, allow_cache=allow_cache,
|
2018-01-15 20:15:30 +03:00
|
|
|
caption=caption, force_document=force_document,
|
|
|
|
progress_callback=progress_callback, reply_to=reply_to,
|
2018-05-07 20:53:32 +03:00
|
|
|
attributes=attributes, thumb=thumb, voice_note=voice_note,
|
|
|
|
video_note=video_note, **kwargs
|
2018-03-15 12:17:45 +03:00
|
|
|
) for x in documents
|
|
|
|
)
|
|
|
|
return result
|
2017-08-23 01:27:33 +03:00
|
|
|
|
2018-01-23 13:59:35 +03:00
|
|
|
entity = self.get_input_entity(entity)
|
2018-02-15 13:19:34 +03:00
|
|
|
reply_to = self._get_message_id(reply_to)
|
2018-04-20 10:44:33 +03:00
|
|
|
|
|
|
|
# Not document since it's subject to change.
|
|
|
|
# Needed when a Message is passed to send_message and it has media.
|
|
|
|
if 'entities' in kwargs:
|
|
|
|
msg_entities = kwargs['entities']
|
|
|
|
else:
|
|
|
|
caption, msg_entities =\
|
|
|
|
self._parse_message_text(caption, parse_mode)
|
2018-01-23 13:59:35 +03:00
|
|
|
|
|
|
|
if not isinstance(file, (str, bytes, io.IOBase)):
|
|
|
|
# The user may pass a Message containing media (or the media,
|
|
|
|
# or anything similar) that should be treated as a file. Try
|
|
|
|
# getting the input media for whatever they passed and send it.
|
|
|
|
try:
|
2018-03-02 23:28:33 +03:00
|
|
|
media = utils.get_input_media(file)
|
2018-01-23 13:59:35 +03:00
|
|
|
except TypeError:
|
|
|
|
pass # Can't turn whatever was given into media
|
|
|
|
else:
|
|
|
|
request = SendMediaRequest(entity, media,
|
2018-03-02 23:28:33 +03:00
|
|
|
reply_to_msg_id=reply_to,
|
|
|
|
message=caption,
|
|
|
|
entities=msg_entities)
|
2018-01-23 13:59:35 +03:00
|
|
|
return self._get_response_message(request, self(request))
|
|
|
|
|
2018-01-18 21:36:47 +03:00
|
|
|
as_image = utils.is_image(file) and not force_document
|
|
|
|
use_cache = InputPhoto if as_image else InputDocument
|
2018-01-05 17:33:25 +03:00
|
|
|
file_handle = self.upload_file(
|
2018-01-18 21:36:47 +03:00
|
|
|
file, progress_callback=progress_callback,
|
|
|
|
use_cache=use_cache if allow_cache else None
|
|
|
|
)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2018-01-18 21:36:47 +03:00
|
|
|
if isinstance(file_handle, use_cache):
|
|
|
|
# File was cached, so an instance of use_cache was returned
|
|
|
|
if as_image:
|
2018-03-02 23:28:33 +03:00
|
|
|
media = InputMediaPhoto(file_handle)
|
2018-01-18 21:36:47 +03:00
|
|
|
else:
|
2018-03-02 23:28:33 +03:00
|
|
|
media = InputMediaDocument(file_handle)
|
2018-01-18 21:36:47 +03:00
|
|
|
elif as_image:
|
2018-03-02 23:28:33 +03:00
|
|
|
media = InputMediaUploadedPhoto(file_handle)
|
2017-08-23 01:27:33 +03:00
|
|
|
else:
|
|
|
|
mime_type = None
|
|
|
|
if isinstance(file, str):
|
|
|
|
# Determine mime-type and attributes
|
|
|
|
# Take the first element by using [0] since it returns a tuple
|
|
|
|
mime_type = guess_type(file)[0]
|
2017-10-09 12:20:09 +03:00
|
|
|
attr_dict = {
|
|
|
|
DocumentAttributeFilename:
|
2018-01-11 14:43:47 +03:00
|
|
|
DocumentAttributeFilename(os.path.basename(file))
|
2017-10-09 12:20:09 +03:00
|
|
|
}
|
2018-02-17 15:00:58 +03:00
|
|
|
if utils.is_audio(file) and hachoir:
|
|
|
|
m = hachoir.metadata.extractMetadata(
|
|
|
|
hachoir.parser.createParser(file)
|
|
|
|
)
|
|
|
|
attr_dict[DocumentAttributeAudio] = DocumentAttributeAudio(
|
2018-05-07 20:53:32 +03:00
|
|
|
voice=voice_note,
|
2018-02-17 15:00:58 +03:00
|
|
|
title=m.get('title') if m.has('title') else None,
|
|
|
|
performer=m.get('author') if m.has('author') else None,
|
|
|
|
duration=int(m.get('duration').seconds
|
|
|
|
if m.has('duration') else 0)
|
|
|
|
)
|
|
|
|
|
2018-02-12 12:33:51 +03:00
|
|
|
if not force_document and utils.is_video(file):
|
2018-02-17 15:00:58 +03:00
|
|
|
if hachoir:
|
|
|
|
m = hachoir.metadata.extractMetadata(
|
|
|
|
hachoir.parser.createParser(file)
|
|
|
|
)
|
|
|
|
doc = DocumentAttributeVideo(
|
2018-05-07 20:53:32 +03:00
|
|
|
round_message=video_note,
|
2018-02-17 15:00:58 +03:00
|
|
|
w=m.get('width') if m.has('width') else 0,
|
|
|
|
h=m.get('height') if m.has('height') else 0,
|
|
|
|
duration=int(m.get('duration').seconds
|
|
|
|
if m.has('duration') else 0)
|
|
|
|
)
|
|
|
|
else:
|
2018-05-24 13:28:10 +03:00
|
|
|
doc = DocumentAttributeVideo(0, 1, 1,
|
2018-05-07 20:53:32 +03:00
|
|
|
round_message=video_note)
|
|
|
|
|
2018-02-17 15:00:58 +03:00
|
|
|
attr_dict[DocumentAttributeVideo] = doc
|
2017-08-23 01:27:33 +03:00
|
|
|
else:
|
2017-10-09 12:20:09 +03:00
|
|
|
attr_dict = {
|
2018-02-25 22:35:55 +03:00
|
|
|
DocumentAttributeFilename: DocumentAttributeFilename(
|
2018-02-26 16:14:44 +03:00
|
|
|
os.path.basename(
|
|
|
|
getattr(file, 'name', None) or 'unnamed'))
|
2017-10-09 12:20:09 +03:00
|
|
|
}
|
2017-08-23 01:27:33 +03:00
|
|
|
|
2018-05-07 20:53:32 +03:00
|
|
|
if voice_note:
|
2018-02-17 15:00:58 +03:00
|
|
|
if DocumentAttributeAudio in attr_dict:
|
|
|
|
attr_dict[DocumentAttributeAudio].voice = True
|
|
|
|
else:
|
|
|
|
attr_dict[DocumentAttributeAudio] = \
|
|
|
|
DocumentAttributeAudio(0, voice=True)
|
2017-10-09 12:20:09 +03:00
|
|
|
|
|
|
|
# Now override the attributes if any. As we have a dict of
|
|
|
|
# {cls: instance}, we can override any class with the list
|
|
|
|
# of attributes provided by the user easily.
|
|
|
|
if attributes:
|
|
|
|
for a in attributes:
|
|
|
|
attr_dict[type(a)] = a
|
2017-09-13 12:30:20 +03:00
|
|
|
|
2017-08-23 01:27:33 +03:00
|
|
|
# Ensure we have a mime type, any; but it cannot be None
|
|
|
|
# 'The "octet-stream" subtype is used to indicate that a body
|
|
|
|
# contains arbitrary binary data.'
|
|
|
|
if not mime_type:
|
|
|
|
mime_type = 'application/octet-stream'
|
|
|
|
|
2018-01-03 14:47:38 +03:00
|
|
|
input_kw = {}
|
|
|
|
if thumb:
|
|
|
|
input_kw['thumb'] = self.upload_file(thumb)
|
|
|
|
|
2017-08-23 01:27:33 +03:00
|
|
|
media = InputMediaUploadedDocument(
|
|
|
|
file=file_handle,
|
2016-11-30 00:29:42 +03:00
|
|
|
mime_type=mime_type,
|
2017-10-09 12:20:09 +03:00
|
|
|
attributes=list(attr_dict.values()),
|
2018-01-03 14:47:38 +03:00
|
|
|
**input_kw
|
2017-08-23 01:27:33 +03:00
|
|
|
)
|
2017-08-23 01:01:10 +03:00
|
|
|
|
2017-08-23 01:27:33 +03:00
|
|
|
# Once the media type is properly specified and the file uploaded,
|
|
|
|
# send the media message to the desired entity.
|
2018-03-02 23:28:33 +03:00
|
|
|
request = SendMediaRequest(entity, media, reply_to_msg_id=reply_to,
|
|
|
|
message=caption, entities=msg_entities)
|
2018-01-18 21:36:47 +03:00
|
|
|
msg = self._get_response_message(request, self(request))
|
2018-01-18 22:08:05 +03:00
|
|
|
if msg and isinstance(file_handle, InputSizedFile):
|
2018-01-18 21:36:47 +03:00
|
|
|
# There was a response message and we didn't use cached
|
|
|
|
# version, so cache whatever we just sent to the database.
|
|
|
|
md5, size = file_handle.md5, file_handle.size
|
|
|
|
if as_image:
|
|
|
|
to_cache = utils.get_input_photo(msg.media.photo)
|
|
|
|
else:
|
|
|
|
to_cache = utils.get_input_document(msg.media.document)
|
|
|
|
self.session.cache_file(md5, size, to_cache)
|
|
|
|
|
|
|
|
return msg
|
2016-09-11 14:10:27 +03:00
|
|
|
|
2018-02-25 22:34:40 +03:00
|
|
|
def send_voice_note(self, *args, **kwargs):
|
2018-05-07 20:53:32 +03:00
|
|
|
"""Deprecated, see :meth:`send_file`."""
|
|
|
|
warnings.warn('send_voice_note is deprecated, use '
|
|
|
|
'send_file(..., voice_note=True) instead')
|
2018-02-27 12:05:27 +03:00
|
|
|
kwargs['is_voice_note'] = True
|
|
|
|
return self.send_file(*args, **kwargs)
|
2017-09-13 12:30:20 +03:00
|
|
|
|
2018-03-07 11:09:05 +03:00
|
|
|
def _send_album(self, entity, files, caption='',
|
2018-03-02 23:28:33 +03:00
|
|
|
progress_callback=None, reply_to=None,
|
|
|
|
parse_mode='md'):
|
2018-01-15 20:15:30 +03:00
|
|
|
"""Specialized version of .send_file for albums"""
|
2018-01-18 21:36:47 +03:00
|
|
|
# We don't care if the user wants to avoid cache, we will use it
|
|
|
|
# anyway. Why? The cached version will be exactly the same thing
|
|
|
|
# we need to produce right now to send albums (uploadMedia), and
|
|
|
|
# cache only makes a difference for documents where the user may
|
2018-03-02 23:28:33 +03:00
|
|
|
# want the attributes used on them to change.
|
2018-03-15 12:17:45 +03:00
|
|
|
#
|
|
|
|
# In theory documents can be sent inside the albums but they appear
|
|
|
|
# as different messages (not inside the album), and the logic to set
|
|
|
|
# the attributes/avoid cache is already written in .send_file().
|
2018-01-15 20:15:30 +03:00
|
|
|
entity = self.get_input_entity(entity)
|
2018-03-02 23:32:38 +03:00
|
|
|
if not utils.is_list_like(caption):
|
|
|
|
caption = (caption,)
|
|
|
|
captions = [
|
|
|
|
self._parse_message_text(caption or '', parse_mode)
|
|
|
|
for caption in reversed(caption) # Pop from the end (so reverse)
|
|
|
|
]
|
2018-02-15 13:19:34 +03:00
|
|
|
reply_to = self._get_message_id(reply_to)
|
2018-01-18 21:36:47 +03:00
|
|
|
|
|
|
|
# Need to upload the media first, but only if they're not cached yet
|
|
|
|
media = []
|
|
|
|
for file in files:
|
|
|
|
# fh will either be InputPhoto or a modified InputFile
|
|
|
|
fh = self.upload_file(file, use_cache=InputPhoto)
|
|
|
|
if not isinstance(fh, InputPhoto):
|
|
|
|
input_photo = utils.get_input_photo(self(UploadMediaRequest(
|
2018-03-02 23:28:33 +03:00
|
|
|
entity, media=InputMediaUploadedPhoto(fh)
|
2018-01-18 21:36:47 +03:00
|
|
|
)).photo)
|
|
|
|
self.session.cache_file(fh.md5, fh.size, input_photo)
|
|
|
|
fh = input_photo
|
2018-03-02 23:32:38 +03:00
|
|
|
|
|
|
|
if captions:
|
|
|
|
caption, msg_entities = captions.pop()
|
|
|
|
else:
|
|
|
|
caption, msg_entities = '', None
|
|
|
|
media.append(InputSingleMedia(InputMediaPhoto(fh), message=caption,
|
|
|
|
entities=msg_entities))
|
2018-01-18 21:36:47 +03:00
|
|
|
|
|
|
|
# Now we can construct the multi-media request
|
|
|
|
result = self(SendMultiMediaRequest(
|
|
|
|
entity, reply_to_msg_id=reply_to, multi_media=media
|
|
|
|
))
|
|
|
|
return [
|
|
|
|
self._get_response_message(update.id, result)
|
|
|
|
for update in result.updates
|
|
|
|
if isinstance(update, UpdateMessageID)
|
|
|
|
]
|
2018-01-15 20:15:30 +03:00
|
|
|
|
2018-01-18 15:55:03 +03:00
|
|
|
def upload_file(self,
|
|
|
|
file,
|
|
|
|
part_size_kb=None,
|
|
|
|
file_name=None,
|
2018-01-18 21:36:47 +03:00
|
|
|
use_cache=None,
|
2018-01-18 15:55:03 +03:00
|
|
|
progress_callback=None):
|
|
|
|
"""
|
|
|
|
Uploads the specified file and returns a handle (an instance of
|
2018-05-17 13:00:22 +03:00
|
|
|
:tl:`InputFile` or :tl:`InputFileBig`, as required) which can be
|
|
|
|
later used before it expires (they are usable during less than a day).
|
2018-01-18 15:55:03 +03:00
|
|
|
|
|
|
|
Uploading a file will simply return a "handle" to the file stored
|
|
|
|
remotely in the Telegram servers, which can be later used on. This
|
|
|
|
will **not** upload the file to your own chat or any chat at all.
|
|
|
|
|
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
file (`str` | `bytes` | `file`):
|
2018-01-18 15:55:03 +03:00
|
|
|
The path of the file, byte array, or stream that will be sent.
|
|
|
|
Note that if a byte array or a stream is given, a filename
|
|
|
|
or its type won't be inferred, and it will be sent as an
|
|
|
|
"unnamed application/octet-stream".
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
part_size_kb (`int`, optional):
|
2018-01-18 15:55:03 +03:00
|
|
|
Chunk size when uploading files. The larger, the less
|
|
|
|
requests will be made (up to 512KB maximum).
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
file_name (`str`, optional):
|
2018-01-18 15:55:03 +03:00
|
|
|
The file name which will be used on the resulting InputFile.
|
|
|
|
If not specified, the name will be taken from the ``file``
|
|
|
|
and if this is not a ``str``, it will be ``"unnamed"``.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
use_cache (`type`, optional):
|
2018-05-17 13:00:22 +03:00
|
|
|
The type of cache to use (currently either :tl:`InputDocument`
|
|
|
|
or :tl:`InputPhoto`). If present and the file is small enough
|
2018-01-18 21:36:47 +03:00
|
|
|
to need the MD5, it will be checked against the database,
|
|
|
|
and if a match is found, the upload won't be made. Instead,
|
|
|
|
an instance of type ``use_cache`` will be returned.
|
2018-01-18 15:55:03 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
progress_callback (`callable`, optional):
|
2018-01-18 15:55:03 +03:00
|
|
|
A callback function accepting two parameters:
|
|
|
|
``(sent bytes, total)``.
|
|
|
|
|
|
|
|
Returns:
|
2018-03-23 23:40:24 +03:00
|
|
|
:tl:`InputFileBig` if the file size is larger than 10MB,
|
2018-05-17 13:00:22 +03:00
|
|
|
`telethon.tl.custom.input_sized_file.InputSizedFile`
|
|
|
|
(subclass of :tl:`InputFile`) otherwise.
|
2018-01-18 15:55:03 +03:00
|
|
|
"""
|
|
|
|
if isinstance(file, (InputFile, InputFileBig)):
|
|
|
|
return file # Already uploaded
|
|
|
|
|
|
|
|
if isinstance(file, str):
|
|
|
|
file_size = os.path.getsize(file)
|
|
|
|
elif isinstance(file, bytes):
|
|
|
|
file_size = len(file)
|
|
|
|
else:
|
|
|
|
file = file.read()
|
|
|
|
file_size = len(file)
|
|
|
|
|
|
|
|
# File will now either be a string or bytes
|
|
|
|
if not part_size_kb:
|
|
|
|
part_size_kb = utils.get_appropriated_part_size(file_size)
|
|
|
|
|
|
|
|
if part_size_kb > 512:
|
|
|
|
raise ValueError('The part size must be less or equal to 512KB')
|
|
|
|
|
|
|
|
part_size = int(part_size_kb * 1024)
|
|
|
|
if part_size % 1024 != 0:
|
|
|
|
raise ValueError(
|
|
|
|
'The part size must be evenly divisible by 1024')
|
|
|
|
|
|
|
|
# Set a default file name if None was specified
|
|
|
|
file_id = helpers.generate_random_long()
|
|
|
|
if not file_name:
|
|
|
|
if isinstance(file, str):
|
|
|
|
file_name = os.path.basename(file)
|
|
|
|
else:
|
|
|
|
file_name = str(file_id)
|
|
|
|
|
|
|
|
# Determine whether the file is too big (over 10MB) or not
|
|
|
|
# Telegram does make a distinction between smaller or larger files
|
|
|
|
is_large = file_size > 10 * 1024 * 1024
|
2018-01-18 21:36:47 +03:00
|
|
|
hash_md5 = hashlib.md5()
|
2018-01-18 15:55:03 +03:00
|
|
|
if not is_large:
|
|
|
|
# Calculate the MD5 hash before anything else.
|
|
|
|
# As this needs to be done always for small files,
|
|
|
|
# might as well do it before anything else and
|
|
|
|
# check the cache.
|
|
|
|
if isinstance(file, str):
|
|
|
|
with open(file, 'rb') as stream:
|
|
|
|
file = stream.read()
|
2018-01-18 21:36:47 +03:00
|
|
|
hash_md5.update(file)
|
|
|
|
if use_cache:
|
|
|
|
cached = self.session.get_file(
|
|
|
|
hash_md5.digest(), file_size, cls=use_cache
|
|
|
|
)
|
|
|
|
if cached:
|
|
|
|
return cached
|
2018-01-18 15:55:03 +03:00
|
|
|
|
|
|
|
part_count = (file_size + part_size - 1) // part_size
|
|
|
|
__log__.info('Uploading file of %d bytes in %d chunks of %d',
|
|
|
|
file_size, part_count, part_size)
|
|
|
|
|
|
|
|
with open(file, 'rb') if isinstance(file, str) else BytesIO(file) \
|
|
|
|
as stream:
|
|
|
|
for part_index in range(part_count):
|
|
|
|
# Read the file by in chunks of size part_size
|
|
|
|
part = stream.read(part_size)
|
|
|
|
|
|
|
|
# The SavePartRequest is different depending on whether
|
|
|
|
# the file is too large or not (over or less than 10MB)
|
|
|
|
if is_large:
|
|
|
|
request = SaveBigFilePartRequest(file_id, part_index,
|
|
|
|
part_count, part)
|
|
|
|
else:
|
|
|
|
request = SaveFilePartRequest(file_id, part_index, part)
|
|
|
|
|
|
|
|
result = self(request)
|
|
|
|
if result:
|
|
|
|
__log__.debug('Uploaded %d/%d', part_index + 1,
|
|
|
|
part_count)
|
|
|
|
if progress_callback:
|
|
|
|
progress_callback(stream.tell(), file_size)
|
|
|
|
else:
|
|
|
|
raise RuntimeError(
|
|
|
|
'Failed to upload file part {}.'.format(part_index))
|
|
|
|
|
|
|
|
if is_large:
|
2018-01-18 22:08:05 +03:00
|
|
|
return InputFileBig(file_id, part_count, file_name)
|
2018-01-18 15:55:03 +03:00
|
|
|
else:
|
2018-01-18 22:08:05 +03:00
|
|
|
return InputSizedFile(
|
|
|
|
file_id, part_count, file_name, md5=hash_md5, size=file_size
|
|
|
|
)
|
2018-01-18 15:55:03 +03:00
|
|
|
|
2016-09-12 20:32:16 +03:00
|
|
|
# endregion
|
|
|
|
|
|
|
|
# region Downloading media requests
|
|
|
|
|
2017-08-24 18:44:38 +03:00
|
|
|
def download_profile_photo(self, entity, file=None, download_big=True):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Downloads the profile photo of the given entity (user/chat/channel).
|
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`entity`):
|
2018-01-05 15:30:21 +03:00
|
|
|
From who the photo will be downloaded.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
file (`str` | `file`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
The output file path, directory, or stream-like object.
|
|
|
|
If the path exists and is a file, it will be overwritten.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
download_big (`bool`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
Whether to use the big version of the available photos.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
``None`` if no photo was provided, or if it was Empty. On success
|
2017-10-21 17:59:20 +03:00
|
|
|
the file path is returned since it may differ from the one given.
|
2017-08-23 02:35:12 +03:00
|
|
|
"""
|
2018-05-06 12:41:42 +03:00
|
|
|
# hex(crc32(x.encode('ascii'))) for x in
|
|
|
|
# ('User', 'Chat', 'UserFull', 'ChatFull')
|
|
|
|
ENTITIES = (0x2da17977, 0xc5af5d94, 0x1f4661b9, 0xd49a2697)
|
|
|
|
# ('InputPeer', 'InputUser', 'InputChannel')
|
|
|
|
INPUTS = (0xc91c90b6, 0xe669bf46, 0x40f202fd)
|
|
|
|
if not isinstance(entity, TLObject) or entity.SUBCLASS_OF_ID in INPUTS:
|
2017-09-11 12:54:32 +03:00
|
|
|
entity = self.get_entity(entity)
|
2018-05-06 12:41:42 +03:00
|
|
|
|
|
|
|
possible_names = []
|
|
|
|
if entity.SUBCLASS_OF_ID not in ENTITIES:
|
|
|
|
photo = entity
|
|
|
|
else:
|
2017-08-23 02:35:12 +03:00
|
|
|
if not hasattr(entity, 'photo'):
|
|
|
|
# Special case: may be a ChatFull with photo:Photo
|
|
|
|
# This is different from a normal UserProfilePhoto and Chat
|
2018-05-06 12:41:42 +03:00
|
|
|
if not hasattr(entity, 'chat_photo'):
|
2017-08-23 02:35:12 +03:00
|
|
|
return None
|
|
|
|
|
2018-05-06 12:41:42 +03:00
|
|
|
return self._download_photo(entity.chat_photo, file,
|
|
|
|
date=None, progress_callback=None)
|
|
|
|
|
2017-08-24 18:44:38 +03:00
|
|
|
for attr in ('username', 'first_name', 'title'):
|
|
|
|
possible_names.append(getattr(entity, attr, None))
|
2017-08-23 02:35:12 +03:00
|
|
|
|
2017-12-28 01:54:31 +03:00
|
|
|
photo = entity.photo
|
2017-08-23 02:35:12 +03:00
|
|
|
|
2018-05-06 12:41:42 +03:00
|
|
|
if isinstance(photo, (UserProfilePhoto, ChatPhoto)):
|
|
|
|
loc = photo.photo_big if download_big else photo.photo_small
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
loc = utils.get_input_location(photo)
|
|
|
|
except TypeError:
|
|
|
|
return None
|
2017-08-23 02:35:12 +03:00
|
|
|
|
2017-08-24 18:44:38 +03:00
|
|
|
file = self._get_proper_filename(
|
|
|
|
file, 'profile_photo', '.jpg',
|
|
|
|
possible_names=possible_names
|
|
|
|
)
|
|
|
|
|
2017-12-28 01:54:31 +03:00
|
|
|
try:
|
2018-05-06 12:46:04 +03:00
|
|
|
self.download_file(loc, file)
|
2018-05-06 12:41:42 +03:00
|
|
|
return file
|
2017-12-28 01:54:31 +03:00
|
|
|
except LocationInvalidError:
|
|
|
|
# See issue #500, Android app fails as of v4.6.0 (1155).
|
|
|
|
# The fix seems to be using the full channel chat photo.
|
|
|
|
ie = self.get_input_entity(entity)
|
|
|
|
if isinstance(ie, InputPeerChannel):
|
|
|
|
full = self(GetFullChannelRequest(ie))
|
|
|
|
return self._download_photo(
|
|
|
|
full.full_chat.chat_photo, file,
|
|
|
|
date=None, progress_callback=None
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# Until there's a report for chats, no need to.
|
|
|
|
return None
|
2016-10-03 20:44:01 +03:00
|
|
|
|
2017-08-24 18:44:38 +03:00
|
|
|
def download_media(self, message, file=None, progress_callback=None):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Downloads the given media, or the media from a specified Message.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-04-14 13:03:08 +03:00
|
|
|
Note that if the download is too slow, you should consider installing
|
|
|
|
``cryptg`` (through ``pip install cryptg``) so that decrypting the
|
|
|
|
received data is done in C instead of Python (much faster).
|
|
|
|
|
2018-03-23 23:40:24 +03:00
|
|
|
message (:tl:`Message` | :tl:`Media`):
|
2017-10-21 17:59:20 +03:00
|
|
|
The media or message containing the media that will be downloaded.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
file (`str` | `file`, optional):
|
2017-10-21 17:59:20 +03:00
|
|
|
The output file path, directory, or stream-like object.
|
|
|
|
If the path exists and is a file, it will be overwritten.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
progress_callback (`callable`, optional):
|
2018-01-05 15:30:21 +03:00
|
|
|
A callback function accepting two parameters:
|
2018-03-23 23:40:24 +03:00
|
|
|
``(received bytes, total)``.
|
2018-01-05 15:30:21 +03:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
``None`` if no media was provided, or if it was Empty. On success
|
|
|
|
the file path is returned since it may differ from the one given.
|
2017-07-23 19:38:27 +03:00
|
|
|
"""
|
2017-08-23 01:48:00 +03:00
|
|
|
# TODO This won't work for messageService
|
|
|
|
if isinstance(message, Message):
|
|
|
|
date = message.date
|
2017-08-24 18:44:38 +03:00
|
|
|
media = message.media
|
2017-08-23 01:48:00 +03:00
|
|
|
else:
|
|
|
|
date = datetime.now()
|
2017-08-24 18:44:38 +03:00
|
|
|
media = message
|
2017-08-23 01:48:00 +03:00
|
|
|
|
2018-04-14 14:07:50 +03:00
|
|
|
if isinstance(media, MessageMediaWebPage):
|
|
|
|
if isinstance(media.webpage, WebPage):
|
|
|
|
media = media.webpage.document or media.webpage.photo
|
|
|
|
|
2018-03-07 13:45:37 +03:00
|
|
|
if isinstance(media, (MessageMediaPhoto, Photo,
|
|
|
|
PhotoSize, PhotoCachedSize)):
|
2017-08-23 01:48:00 +03:00
|
|
|
return self._download_photo(
|
2017-08-24 18:44:38 +03:00
|
|
|
media, file, date, progress_callback
|
2017-08-23 01:48:00 +03:00
|
|
|
)
|
2018-01-23 14:10:23 +03:00
|
|
|
elif isinstance(media, (MessageMediaDocument, Document)):
|
2017-08-23 01:48:00 +03:00
|
|
|
return self._download_document(
|
2017-08-24 18:44:38 +03:00
|
|
|
media, file, date, progress_callback
|
2017-08-23 01:48:00 +03:00
|
|
|
)
|
2017-08-24 18:44:38 +03:00
|
|
|
elif isinstance(media, MessageMediaContact):
|
2017-08-23 01:48:00 +03:00
|
|
|
return self._download_contact(
|
2017-08-24 18:44:38 +03:00
|
|
|
media, file
|
2017-08-23 01:48:00 +03:00
|
|
|
)
|
|
|
|
|
2017-12-28 14:11:31 +03:00
|
|
|
def _download_photo(self, photo, file, date, progress_callback):
|
2017-08-23 01:48:00 +03:00
|
|
|
"""Specialized version of .download_media() for photos"""
|
2016-09-11 17:24:03 +03:00
|
|
|
# Determine the photo and its largest size
|
2017-12-28 14:11:31 +03:00
|
|
|
if isinstance(photo, MessageMediaPhoto):
|
|
|
|
photo = photo.photo
|
2018-03-07 13:45:37 +03:00
|
|
|
if isinstance(photo, Photo):
|
|
|
|
for size in reversed(photo.sizes):
|
|
|
|
if not isinstance(size, PhotoSizeEmpty):
|
|
|
|
photo = size
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
return
|
|
|
|
if not isinstance(photo, (PhotoSize, PhotoCachedSize)):
|
2017-12-28 14:11:31 +03:00
|
|
|
return
|
|
|
|
|
2017-08-24 18:44:38 +03:00
|
|
|
file = self._get_proper_filename(file, 'photo', '.jpg', date=date)
|
2018-03-07 13:45:37 +03:00
|
|
|
if isinstance(photo, PhotoCachedSize):
|
|
|
|
# No need to download anything, simply write the bytes
|
|
|
|
if isinstance(file, str):
|
|
|
|
helpers.ensure_parent_dir_exists(file)
|
|
|
|
f = open(file, 'wb')
|
|
|
|
else:
|
|
|
|
f = file
|
|
|
|
try:
|
|
|
|
f.write(photo.bytes)
|
|
|
|
finally:
|
|
|
|
if isinstance(file, str):
|
|
|
|
f.close()
|
|
|
|
return file
|
2016-09-10 19:05:20 +03:00
|
|
|
|
2018-05-06 12:46:04 +03:00
|
|
|
self.download_file(photo.location, file, file_size=photo.size,
|
|
|
|
progress_callback=progress_callback)
|
2017-07-20 10:37:19 +03:00
|
|
|
return file
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2018-01-23 14:10:23 +03:00
|
|
|
def _download_document(self, document, file, date, progress_callback):
|
2018-03-22 20:39:42 +03:00
|
|
|
"""Specialized version of .download_media() for documents."""
|
2018-01-23 14:10:23 +03:00
|
|
|
if isinstance(document, MessageMediaDocument):
|
|
|
|
document = document.document
|
|
|
|
if not isinstance(document, Document):
|
|
|
|
return
|
|
|
|
|
2016-09-17 18:04:30 +03:00
|
|
|
file_size = document.size
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2018-02-24 19:40:12 +03:00
|
|
|
kind = 'document'
|
2017-08-24 18:44:38 +03:00
|
|
|
possible_names = []
|
|
|
|
for attr in document.attributes:
|
|
|
|
if isinstance(attr, DocumentAttributeFilename):
|
|
|
|
possible_names.insert(0, attr.file_name)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2017-08-24 18:44:38 +03:00
|
|
|
elif isinstance(attr, DocumentAttributeAudio):
|
2018-02-24 19:40:12 +03:00
|
|
|
kind = 'audio'
|
|
|
|
if attr.performer and attr.title:
|
|
|
|
possible_names.append('{} - {}'.format(
|
|
|
|
attr.performer, attr.title
|
|
|
|
))
|
|
|
|
elif attr.performer:
|
|
|
|
possible_names.append(attr.performer)
|
|
|
|
elif attr.title:
|
|
|
|
possible_names.append(attr.title)
|
|
|
|
elif attr.voice:
|
|
|
|
kind = 'voice'
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2017-08-24 18:44:38 +03:00
|
|
|
file = self._get_proper_filename(
|
2018-02-24 19:40:12 +03:00
|
|
|
file, kind, utils.get_extension(document),
|
2017-08-24 18:44:38 +03:00
|
|
|
date=date, possible_names=possible_names
|
|
|
|
)
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2018-05-06 12:46:04 +03:00
|
|
|
self.download_file(document, file, file_size=file_size,
|
|
|
|
progress_callback=progress_callback)
|
2017-07-20 10:37:19 +03:00
|
|
|
return file
|
2016-09-12 20:32:16 +03:00
|
|
|
|
|
|
|
@staticmethod
|
2017-08-24 18:44:38 +03:00
|
|
|
def _download_contact(mm_contact, file):
|
2017-08-23 01:48:00 +03:00
|
|
|
"""Specialized version of .download_media() for contacts.
|
2018-03-22 20:39:42 +03:00
|
|
|
Will make use of the vCard 4.0 format.
|
2017-08-23 01:48:00 +03:00
|
|
|
"""
|
2017-08-24 18:44:38 +03:00
|
|
|
first_name = mm_contact.first_name
|
|
|
|
last_name = mm_contact.last_name
|
|
|
|
phone_number = mm_contact.phone_number
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2017-07-20 10:37:19 +03:00
|
|
|
if isinstance(file, str):
|
2017-08-24 18:44:38 +03:00
|
|
|
file = TelegramClient._get_proper_filename(
|
|
|
|
file, 'contact', '.vcard',
|
|
|
|
possible_names=[first_name, phone_number, last_name]
|
|
|
|
)
|
2017-07-23 18:08:04 +03:00
|
|
|
f = open(file, 'w', encoding='utf-8')
|
2017-07-20 10:37:19 +03:00
|
|
|
else:
|
2017-07-23 18:08:04 +03:00
|
|
|
f = file
|
|
|
|
|
|
|
|
try:
|
2017-10-28 12:09:46 +03:00
|
|
|
# Remove these pesky characters
|
2017-10-28 12:11:51 +03:00
|
|
|
first_name = first_name.replace(';', '')
|
|
|
|
last_name = (last_name or '').replace(';', '')
|
2017-07-23 18:08:04 +03:00
|
|
|
f.write('BEGIN:VCARD\n')
|
|
|
|
f.write('VERSION:4.0\n')
|
2017-10-29 22:10:29 +03:00
|
|
|
f.write('N:{};{};;;\n'.format(first_name, last_name))
|
2017-10-28 12:11:51 +03:00
|
|
|
f.write('FN:{} {}\n'.format(first_name, last_name))
|
2017-10-29 22:10:29 +03:00
|
|
|
f.write('TEL;TYPE=cell;VALUE=uri:tel:+{}\n'.format(phone_number))
|
2017-07-23 18:08:04 +03:00
|
|
|
f.write('END:VCARD\n')
|
|
|
|
finally:
|
|
|
|
# Only close the stream if we opened it
|
|
|
|
if isinstance(file, str):
|
|
|
|
f.close()
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2017-07-20 10:37:19 +03:00
|
|
|
return file
|
2016-09-12 20:32:16 +03:00
|
|
|
|
2017-08-24 18:44:38 +03:00
|
|
|
@staticmethod
|
|
|
|
def _get_proper_filename(file, kind, extension,
|
|
|
|
date=None, possible_names=None):
|
|
|
|
"""Gets a proper filename for 'file', if this is a path.
|
|
|
|
|
|
|
|
'kind' should be the kind of the output file (photo, document...)
|
|
|
|
'extension' should be the extension to be added to the file if
|
|
|
|
the filename doesn't have any yet
|
|
|
|
'date' should be when this file was originally sent, if known
|
|
|
|
'possible_names' should be an ordered list of possible names
|
|
|
|
|
|
|
|
If no modification is made to the path, any existing file
|
|
|
|
will be overwritten.
|
|
|
|
If any modification is made to the path, this method will
|
|
|
|
ensure that no existing file will be overwritten.
|
|
|
|
"""
|
|
|
|
if file is not None and not isinstance(file, str):
|
|
|
|
# Probably a stream-like object, we cannot set a filename here
|
|
|
|
return file
|
|
|
|
|
|
|
|
if file is None:
|
|
|
|
file = ''
|
|
|
|
elif os.path.isfile(file):
|
|
|
|
# Make no modifications to valid existing paths
|
|
|
|
return file
|
|
|
|
|
|
|
|
if os.path.isdir(file) or not file:
|
|
|
|
try:
|
|
|
|
name = None if possible_names is None else next(
|
|
|
|
x for x in possible_names if x
|
|
|
|
)
|
|
|
|
except StopIteration:
|
|
|
|
name = None
|
|
|
|
|
|
|
|
if not name:
|
2017-12-27 16:52:33 +03:00
|
|
|
if not date:
|
|
|
|
date = datetime.now()
|
2017-08-24 18:44:38 +03:00
|
|
|
name = '{}_{}-{:02}-{:02}_{:02}-{:02}-{:02}'.format(
|
|
|
|
kind,
|
|
|
|
date.year, date.month, date.day,
|
|
|
|
date.hour, date.minute, date.second,
|
|
|
|
)
|
|
|
|
file = os.path.join(file, name)
|
|
|
|
|
|
|
|
directory, name = os.path.split(file)
|
|
|
|
name, ext = os.path.splitext(name)
|
|
|
|
if not ext:
|
|
|
|
ext = extension
|
|
|
|
|
|
|
|
result = os.path.join(directory, name + ext)
|
|
|
|
if not os.path.isfile(result):
|
|
|
|
return result
|
|
|
|
|
|
|
|
i = 1
|
|
|
|
while True:
|
|
|
|
result = os.path.join(directory, '{} ({}){}'.format(name, i, ext))
|
|
|
|
if not os.path.isfile(result):
|
|
|
|
return result
|
|
|
|
i += 1
|
|
|
|
|
2018-01-18 15:55:03 +03:00
|
|
|
def download_file(self,
|
|
|
|
input_location,
|
2018-04-27 21:58:08 +03:00
|
|
|
file=None,
|
2018-01-18 15:55:03 +03:00
|
|
|
part_size_kb=None,
|
|
|
|
file_size=None,
|
|
|
|
progress_callback=None):
|
|
|
|
"""
|
|
|
|
Downloads the given input location to a file.
|
|
|
|
|
|
|
|
Args:
|
2018-05-06 12:46:04 +03:00
|
|
|
input_location (:tl:`FileLocation` | :tl:`InputFileLocation`):
|
2018-01-18 15:55:03 +03:00
|
|
|
The file location from which the file will be downloaded.
|
2018-05-06 12:46:04 +03:00
|
|
|
See `telethon.utils.get_input_location` source for a complete
|
|
|
|
list of supported types.
|
2018-01-18 15:55:03 +03:00
|
|
|
|
2018-04-27 21:58:08 +03:00
|
|
|
file (`str` | `file`, optional):
|
2018-01-18 15:55:03 +03:00
|
|
|
The output file path, directory, or stream-like object.
|
|
|
|
If the path exists and is a file, it will be overwritten.
|
|
|
|
|
2018-04-27 21:58:08 +03:00
|
|
|
If the file path is ``None``, then the result will be
|
|
|
|
saved in memory and returned as `bytes`.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
part_size_kb (`int`, optional):
|
2018-01-18 15:55:03 +03:00
|
|
|
Chunk size when downloading files. The larger, the less
|
|
|
|
requests will be made (up to 512KB maximum).
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
file_size (`int`, optional):
|
2018-01-18 15:55:03 +03:00
|
|
|
The file size that is about to be downloaded, if known.
|
|
|
|
Only used if ``progress_callback`` is specified.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
progress_callback (`callable`, optional):
|
2018-01-18 15:55:03 +03:00
|
|
|
A callback function accepting two parameters:
|
|
|
|
``(downloaded bytes, total)``. Note that the
|
|
|
|
``total`` is the provided ``file_size``.
|
|
|
|
"""
|
|
|
|
if not part_size_kb:
|
|
|
|
if not file_size:
|
|
|
|
part_size_kb = 64 # Reasonable default
|
|
|
|
else:
|
|
|
|
part_size_kb = utils.get_appropriated_part_size(file_size)
|
|
|
|
|
|
|
|
part_size = int(part_size_kb * 1024)
|
|
|
|
# https://core.telegram.org/api/files says:
|
|
|
|
# > part_size % 1024 = 0 (divisible by 1KB)
|
|
|
|
#
|
|
|
|
# But https://core.telegram.org/cdn (more recent) says:
|
|
|
|
# > limit must be divisible by 4096 bytes
|
|
|
|
# So we just stick to the 4096 limit.
|
|
|
|
if part_size % 4096 != 0:
|
|
|
|
raise ValueError(
|
|
|
|
'The part size must be evenly divisible by 4096.')
|
|
|
|
|
2018-04-27 21:58:08 +03:00
|
|
|
in_memory = file is None
|
|
|
|
if in_memory:
|
|
|
|
f = io.BytesIO()
|
|
|
|
elif isinstance(file, str):
|
2018-01-18 15:55:03 +03:00
|
|
|
# Ensure that we'll be able to download the media
|
|
|
|
helpers.ensure_parent_dir_exists(file)
|
|
|
|
f = open(file, 'wb')
|
|
|
|
else:
|
|
|
|
f = file
|
|
|
|
|
|
|
|
# The used client will change if FileMigrateError occurs
|
|
|
|
client = self
|
|
|
|
cdn_decrypter = None
|
2018-04-27 22:10:41 +03:00
|
|
|
input_location = utils.get_input_location(input_location)
|
2018-01-18 15:55:03 +03:00
|
|
|
|
|
|
|
__log__.info('Downloading file in chunks of %d bytes', part_size)
|
|
|
|
try:
|
|
|
|
offset = 0
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
if cdn_decrypter:
|
|
|
|
result = cdn_decrypter.get_file()
|
|
|
|
else:
|
|
|
|
result = client(GetFileRequest(
|
|
|
|
input_location, offset, part_size
|
|
|
|
))
|
|
|
|
|
|
|
|
if isinstance(result, FileCdnRedirect):
|
|
|
|
__log__.info('File lives in a CDN')
|
|
|
|
cdn_decrypter, result = \
|
|
|
|
CdnDecrypter.prepare_decrypter(
|
|
|
|
client, self._get_cdn_client(result),
|
|
|
|
result
|
|
|
|
)
|
|
|
|
|
|
|
|
except FileMigrateError as e:
|
|
|
|
__log__.info('File lives in another DC')
|
|
|
|
client = self._get_exported_client(e.new_dc)
|
|
|
|
continue
|
|
|
|
|
|
|
|
offset += part_size
|
|
|
|
|
|
|
|
# If we have received no data (0 bytes), the file is over
|
|
|
|
# So there is nothing left to download and write
|
|
|
|
if not result.bytes:
|
|
|
|
# Return some extra information, unless it's a CDN file
|
2018-04-27 21:58:08 +03:00
|
|
|
if in_memory:
|
|
|
|
f.flush()
|
|
|
|
return f.getvalue()
|
|
|
|
else:
|
|
|
|
return getattr(result, 'type', '')
|
2018-01-18 15:55:03 +03:00
|
|
|
|
|
|
|
f.write(result.bytes)
|
|
|
|
__log__.debug('Saved %d more bytes', len(result.bytes))
|
|
|
|
if progress_callback:
|
|
|
|
progress_callback(f.tell(), file_size)
|
|
|
|
finally:
|
|
|
|
if client != self:
|
|
|
|
client.disconnect()
|
|
|
|
|
|
|
|
if cdn_decrypter:
|
|
|
|
try:
|
|
|
|
cdn_decrypter.client.disconnect()
|
|
|
|
except:
|
|
|
|
pass
|
2018-04-27 21:58:08 +03:00
|
|
|
if isinstance(file, str) or in_memory:
|
2018-01-18 15:55:03 +03:00
|
|
|
f.close()
|
|
|
|
|
2016-09-11 17:24:03 +03:00
|
|
|
# endregion
|
2016-09-10 19:05:20 +03:00
|
|
|
|
2016-09-07 12:36:34 +03:00
|
|
|
# endregion
|
|
|
|
|
2018-02-07 12:41:58 +03:00
|
|
|
# region Event handling
|
|
|
|
|
|
|
|
def on(self, event):
|
|
|
|
"""
|
2018-02-18 15:29:05 +03:00
|
|
|
Decorator helper method around add_event_handler().
|
2018-02-07 12:41:58 +03:00
|
|
|
|
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
event (`_EventBuilder` | `type`):
|
2018-02-07 12:41:58 +03:00
|
|
|
The event builder class or instance to be used,
|
|
|
|
for instance ``events.NewMessage``.
|
|
|
|
"""
|
|
|
|
def decorator(f):
|
2018-02-18 15:29:05 +03:00
|
|
|
self.add_event_handler(f, event)
|
2018-02-07 12:41:58 +03:00
|
|
|
return f
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
2018-02-28 23:09:05 +03:00
|
|
|
def _check_events_pending_resolve(self):
|
|
|
|
if self._events_pending_resolve:
|
|
|
|
for event in self._events_pending_resolve:
|
|
|
|
event.resolve(self)
|
|
|
|
self._events_pending_resolve.clear()
|
|
|
|
|
2018-02-07 12:41:58 +03:00
|
|
|
def _on_handler(self, update):
|
|
|
|
for builder, callback in self._event_builders:
|
|
|
|
event = builder.build(update)
|
|
|
|
if event:
|
|
|
|
event._client = self
|
2018-04-28 13:58:41 +03:00
|
|
|
event.original_update = update
|
2018-02-27 13:30:42 +03:00
|
|
|
try:
|
|
|
|
callback(event)
|
|
|
|
except events.StopPropagation:
|
|
|
|
__log__.debug(
|
|
|
|
"Event handler '{}' stopped chain of "
|
|
|
|
"propagation for event {}."
|
|
|
|
.format(callback.__name__, type(event).__name__)
|
|
|
|
)
|
|
|
|
break
|
2018-02-07 12:41:58 +03:00
|
|
|
|
2018-02-24 20:25:22 +03:00
|
|
|
def add_event_handler(self, callback, event=None):
|
2018-02-18 15:29:05 +03:00
|
|
|
"""
|
|
|
|
Registers the given callback to be called on the specified event.
|
|
|
|
|
|
|
|
Args:
|
2018-03-28 17:03:47 +03:00
|
|
|
callback (`callable`):
|
2018-02-18 15:29:05 +03:00
|
|
|
The callable function accepting one parameter to be used.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
event (`_EventBuilder` | `type`, optional):
|
2018-02-18 15:29:05 +03:00
|
|
|
The event builder class or instance to be used,
|
|
|
|
for instance ``events.NewMessage``.
|
2018-02-24 20:25:22 +03:00
|
|
|
|
2018-05-17 13:00:22 +03:00
|
|
|
If left unspecified, `telethon.events.raw.Raw` (the
|
|
|
|
:tl:`Update` objects with no further processing) will
|
|
|
|
be passed instead.
|
2018-02-18 15:29:05 +03:00
|
|
|
"""
|
|
|
|
if self.updates.workers is None:
|
|
|
|
warnings.warn(
|
|
|
|
"You have not setup any workers, so you won't receive updates."
|
|
|
|
" Pass update_workers=1 when creating the TelegramClient,"
|
|
|
|
" or set client.self.updates.workers = 1"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.updates.handler = self._on_handler
|
|
|
|
if isinstance(event, type):
|
|
|
|
event = event()
|
2018-02-24 20:25:22 +03:00
|
|
|
elif not event:
|
|
|
|
event = events.Raw()
|
2018-02-18 15:29:05 +03:00
|
|
|
|
2018-02-28 23:09:05 +03:00
|
|
|
if self.is_user_authorized():
|
|
|
|
event.resolve(self)
|
|
|
|
self._check_events_pending_resolve()
|
|
|
|
else:
|
|
|
|
self._events_pending_resolve.append(event)
|
|
|
|
|
2018-02-18 15:29:05 +03:00
|
|
|
self._event_builders.append((event, callback))
|
|
|
|
|
2018-03-17 19:03:22 +03:00
|
|
|
def remove_event_handler(self, callback, event=None):
|
|
|
|
"""
|
|
|
|
Inverse operation of :meth:`add_event_handler`.
|
|
|
|
|
|
|
|
If no event is given, all events for this callback are removed.
|
|
|
|
Returns how many callbacks were removed.
|
|
|
|
"""
|
|
|
|
found = 0
|
|
|
|
if event and not isinstance(event, type):
|
|
|
|
event = type(event)
|
|
|
|
|
2018-04-08 12:22:59 +03:00
|
|
|
i = len(self._event_builders)
|
|
|
|
while i:
|
|
|
|
i -= 1
|
|
|
|
ev, cb = self._event_builders[i]
|
2018-03-17 19:03:22 +03:00
|
|
|
if cb == callback and (not event or isinstance(ev, event)):
|
|
|
|
del self._event_builders[i]
|
|
|
|
found += 1
|
|
|
|
|
|
|
|
return found
|
|
|
|
|
|
|
|
def list_event_handlers(self):
|
|
|
|
"""
|
|
|
|
Lists all added event handlers, returning a list of pairs
|
|
|
|
consisting of (callback, event).
|
|
|
|
"""
|
|
|
|
return [(callback, event) for event, callback in self._event_builders]
|
|
|
|
|
2018-02-18 15:29:05 +03:00
|
|
|
def add_update_handler(self, handler):
|
2018-05-07 20:53:32 +03:00
|
|
|
"""Deprecated, see :meth:`add_event_handler`."""
|
2018-02-18 15:29:05 +03:00
|
|
|
warnings.warn(
|
|
|
|
'add_update_handler is deprecated, use the @client.on syntax '
|
|
|
|
'or add_event_handler(callback, events.Raw) instead (see '
|
|
|
|
'https://telethon.rtfd.io/en/latest/extra/basic/working-'
|
|
|
|
'with-updates.html)'
|
|
|
|
)
|
2018-03-17 19:03:22 +03:00
|
|
|
return self.add_event_handler(handler, events.Raw)
|
2018-02-18 15:29:05 +03:00
|
|
|
|
|
|
|
def remove_update_handler(self, handler):
|
2018-03-17 19:03:22 +03:00
|
|
|
return self.remove_event_handler(handler)
|
2018-02-18 15:29:05 +03:00
|
|
|
|
|
|
|
def list_update_handlers(self):
|
2018-03-17 19:03:22 +03:00
|
|
|
return [callback for callback, _ in self.list_event_handlers()]
|
2018-02-18 15:29:05 +03:00
|
|
|
|
2018-05-07 19:02:15 +03:00
|
|
|
def catch_up(self):
|
|
|
|
state = self.session.get_update_state(0)
|
2018-05-22 11:39:40 +03:00
|
|
|
if not state or not state.pts:
|
2018-05-17 11:42:20 +03:00
|
|
|
return
|
|
|
|
|
2018-05-07 19:02:15 +03:00
|
|
|
self.session.catching_up = True
|
|
|
|
try:
|
|
|
|
while True:
|
|
|
|
d = self(GetDifferenceRequest(state.pts, state.date, state.qts))
|
|
|
|
if isinstance(d, DifferenceEmpty):
|
|
|
|
state.date = d.date
|
|
|
|
state.seq = d.seq
|
|
|
|
break
|
|
|
|
elif isinstance(d, (DifferenceSlice, Difference)):
|
|
|
|
if isinstance(d, Difference):
|
|
|
|
state = d.state
|
|
|
|
elif d.intermediate_state.pts > state.pts:
|
|
|
|
state = d.intermediate_state
|
|
|
|
else:
|
|
|
|
# TODO Figure out why other applications can rely on
|
|
|
|
# using always the intermediate_state to eventually
|
|
|
|
# reach a DifferenceEmpty, but that leads to an
|
|
|
|
# infinite loop here (so check against old pts to stop)
|
|
|
|
break
|
|
|
|
|
|
|
|
self.updates.process(Updates(
|
|
|
|
users=d.users,
|
|
|
|
chats=d.chats,
|
|
|
|
date=state.date,
|
|
|
|
seq=state.seq,
|
|
|
|
updates=d.other_updates + [UpdateNewMessage(m, 0, 0)
|
|
|
|
for m in d.new_messages]
|
|
|
|
))
|
|
|
|
elif isinstance(d, DifferenceTooLong):
|
|
|
|
break
|
|
|
|
finally:
|
|
|
|
self.session.set_update_state(0, state)
|
|
|
|
self.session.catching_up = False
|
|
|
|
|
2018-02-07 12:41:58 +03:00
|
|
|
# endregion
|
|
|
|
|
2017-08-23 01:01:10 +03:00
|
|
|
# region Small utilities to make users' life easier
|
|
|
|
|
2018-02-28 23:09:05 +03:00
|
|
|
def _set_connected_and_authorized(self):
|
|
|
|
super()._set_connected_and_authorized()
|
|
|
|
self._check_events_pending_resolve()
|
|
|
|
|
2017-12-27 13:56:05 +03:00
|
|
|
def get_entity(self, entity):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Turns the given entity into a valid Telegram user or chat.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`str` | `int` | :tl:`Peer` | :tl:`InputPeer`):
|
2017-12-27 14:36:38 +03:00
|
|
|
The entity (or iterable of entities) to be transformed.
|
2017-10-21 17:59:20 +03:00
|
|
|
If it's a string which can be converted to an integer or starts
|
|
|
|
with '+' it will be resolved as if it were a phone number.
|
2017-08-23 01:01:10 +03:00
|
|
|
|
2017-10-21 17:59:20 +03:00
|
|
|
If it doesn't start with '+' or starts with a '@' it will be
|
|
|
|
be resolved from the username. If no exact match is returned,
|
|
|
|
an error will be raised.
|
2017-08-23 01:01:10 +03:00
|
|
|
|
2017-10-21 17:59:20 +03:00
|
|
|
If the entity is an integer or a Peer, its information will be
|
|
|
|
returned through a call to self.get_input_peer(entity).
|
2017-10-01 15:19:04 +03:00
|
|
|
|
2017-10-21 17:59:20 +03:00
|
|
|
If the entity is neither, and it's not a TLObject, an
|
|
|
|
error will be raised.
|
2017-11-20 12:58:11 +03:00
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Returns:
|
2018-04-04 21:58:58 +03:00
|
|
|
:tl:`User`, :tl:`Chat` or :tl:`Channel` corresponding to the
|
|
|
|
input entity. A list will be returned if more than one was given.
|
2017-08-23 01:01:10 +03:00
|
|
|
"""
|
2018-04-04 21:58:58 +03:00
|
|
|
single = not utils.is_list_like(entity)
|
|
|
|
if single:
|
2017-12-27 14:36:38 +03:00
|
|
|
entity = (entity,)
|
|
|
|
|
|
|
|
# Group input entities by string (resolve username),
|
|
|
|
# input users (get users), input chat (get chats) and
|
|
|
|
# input channels (get channels) to get the most entities
|
|
|
|
# in the less amount of calls possible.
|
|
|
|
inputs = [
|
|
|
|
x if isinstance(x, str) else self.get_input_entity(x)
|
|
|
|
for x in entity
|
|
|
|
]
|
2018-04-13 14:15:02 +03:00
|
|
|
users = [x for x in inputs
|
|
|
|
if isinstance(x, (InputPeerUser, InputPeerSelf))]
|
2017-12-27 14:36:38 +03:00
|
|
|
chats = [x.chat_id for x in inputs if isinstance(x, InputPeerChat)]
|
|
|
|
channels = [x for x in inputs if isinstance(x, InputPeerChannel)]
|
|
|
|
if users:
|
2017-12-27 17:08:29 +03:00
|
|
|
# GetUsersRequest has a limit of 200 per call
|
|
|
|
tmp = []
|
|
|
|
while users:
|
|
|
|
curr, users = users[:200], users[200:]
|
|
|
|
tmp.extend(self(GetUsersRequest(curr)))
|
|
|
|
users = tmp
|
2017-12-27 14:36:38 +03:00
|
|
|
if chats: # TODO Handle chats slice?
|
|
|
|
chats = self(GetChatsRequest(chats)).chats
|
|
|
|
if channels:
|
|
|
|
channels = self(GetChannelsRequest(channels)).chats
|
|
|
|
|
|
|
|
# Merge users, chats and channels into a single dictionary
|
|
|
|
id_entity = {
|
2017-12-28 15:31:43 +03:00
|
|
|
utils.get_peer_id(x): x
|
2017-12-27 14:36:38 +03:00
|
|
|
for x in itertools.chain(users, chats, channels)
|
|
|
|
}
|
|
|
|
|
|
|
|
# We could check saved usernames and put them into the users,
|
|
|
|
# chats and channels list from before. While this would reduce
|
|
|
|
# the amount of ResolveUsername calls, it would fail to catch
|
|
|
|
# username changes.
|
|
|
|
result = [
|
|
|
|
self._get_entity_from_string(x) if isinstance(x, str)
|
2018-04-13 14:15:02 +03:00
|
|
|
else (
|
|
|
|
id_entity[utils.get_peer_id(x)]
|
|
|
|
if not isinstance(x, InputPeerSelf)
|
|
|
|
else next(u for u in id_entity.values()
|
|
|
|
if isinstance(u, User) and u.is_self)
|
|
|
|
)
|
2017-12-27 14:36:38 +03:00
|
|
|
for x in inputs
|
|
|
|
]
|
|
|
|
return result[0] if single else result
|
2017-08-23 01:01:10 +03:00
|
|
|
|
2017-12-27 13:56:05 +03:00
|
|
|
def _get_entity_from_string(self, string):
|
2017-10-05 13:27:05 +03:00
|
|
|
"""
|
2017-12-27 13:56:05 +03:00
|
|
|
Gets a full entity from the given string, which may be a phone or
|
2017-12-27 02:50:09 +03:00
|
|
|
an username, and processes all the found entities on the session.
|
2017-12-27 13:56:05 +03:00
|
|
|
The string may also be a user link, or a channel/chat invite link.
|
2017-08-23 01:01:10 +03:00
|
|
|
|
2017-12-27 13:56:05 +03:00
|
|
|
This method has the side effect of adding the found users to the
|
|
|
|
session database, so it can be queried later without API calls,
|
|
|
|
if this option is enabled on the session.
|
2017-08-23 01:01:10 +03:00
|
|
|
|
2017-12-28 13:55:05 +03:00
|
|
|
Returns the found entity, or raises TypeError if not found.
|
2017-10-05 13:27:05 +03:00
|
|
|
"""
|
2017-12-27 02:50:09 +03:00
|
|
|
phone = utils.parse_phone(string)
|
2017-10-05 14:01:00 +03:00
|
|
|
if phone:
|
2017-12-27 13:56:05 +03:00
|
|
|
for user in self(GetContactsRequest(0)).users:
|
|
|
|
if user.phone == phone:
|
|
|
|
return user
|
2017-10-05 13:27:05 +03:00
|
|
|
else:
|
2018-02-19 23:04:41 +03:00
|
|
|
username, is_join_chat = utils.parse_username(string)
|
2017-11-10 15:27:51 +03:00
|
|
|
if is_join_chat:
|
2018-02-19 23:04:41 +03:00
|
|
|
invite = self(CheckChatInviteRequest(username))
|
2017-11-10 15:27:51 +03:00
|
|
|
if isinstance(invite, ChatInvite):
|
2018-02-02 19:23:28 +03:00
|
|
|
raise ValueError(
|
2018-04-04 14:35:51 +03:00
|
|
|
'Cannot get entity from a channel (or group) '
|
|
|
|
'that you are not part of. Join the group and retry'
|
2018-02-02 19:23:28 +03:00
|
|
|
)
|
2017-11-10 15:27:51 +03:00
|
|
|
elif isinstance(invite, ChatInviteAlready):
|
|
|
|
return invite.chat
|
2018-02-19 23:04:41 +03:00
|
|
|
elif username:
|
|
|
|
if username in ('me', 'self'):
|
2018-01-05 21:51:44 +03:00
|
|
|
return self.get_me()
|
2018-04-04 14:35:51 +03:00
|
|
|
|
|
|
|
try:
|
|
|
|
result = self(ResolveUsernameRequest(username))
|
|
|
|
except UsernameNotOccupiedError as e:
|
|
|
|
raise ValueError('No user has "{}" as username'
|
|
|
|
.format(username)) from e
|
|
|
|
|
2017-12-27 13:56:05 +03:00
|
|
|
for entity in itertools.chain(result.users, result.chats):
|
2018-03-13 14:43:59 +03:00
|
|
|
if getattr(entity, 'username', None) or ''\
|
|
|
|
.lower() == username:
|
2017-12-27 13:56:05 +03:00
|
|
|
return entity
|
2018-02-19 23:04:41 +03:00
|
|
|
try:
|
|
|
|
# Nobody with this username, maybe it's an exact name/title
|
2018-02-20 19:30:01 +03:00
|
|
|
return self.get_entity(self.session.get_input_entity(string))
|
|
|
|
except ValueError:
|
2018-02-19 23:04:41 +03:00
|
|
|
pass
|
2017-10-01 20:02:53 +03:00
|
|
|
|
2018-04-04 14:35:51 +03:00
|
|
|
raise ValueError(
|
|
|
|
'Cannot find any entity corresponding to "{}"'.format(string)
|
2017-12-28 13:55:05 +03:00
|
|
|
)
|
|
|
|
|
2017-10-01 15:19:04 +03:00
|
|
|
def get_input_entity(self, peer):
|
2017-10-21 17:59:20 +03:00
|
|
|
"""
|
|
|
|
Turns the given peer into its input entity version. Most requests
|
|
|
|
use this kind of InputUser, InputChat and so on, so this is the
|
|
|
|
most suitable call to make for those cases.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
entity (`str` | `int` | :tl:`Peer` | :tl:`InputPeer`):
|
2017-10-21 17:59:20 +03:00
|
|
|
The integer ID of an user or otherwise either of a
|
2018-03-23 23:40:24 +03:00
|
|
|
:tl:`PeerUser`, :tl:`PeerChat` or :tl:`PeerChannel`, for
|
2018-01-05 15:30:21 +03:00
|
|
|
which to get its ``Input*`` version.
|
2017-10-21 17:59:20 +03:00
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
If this ``Peer`` hasn't been seen before by the library, the top
|
2017-10-21 17:59:20 +03:00
|
|
|
dialogs will be loaded and their entities saved to the session
|
|
|
|
file (unless this feature was disabled explicitly).
|
|
|
|
|
|
|
|
If in the end the access hash required for the peer was not found,
|
|
|
|
a ValueError will be raised.
|
2017-12-27 13:56:05 +03:00
|
|
|
|
2018-01-05 15:30:21 +03:00
|
|
|
Returns:
|
2018-04-15 17:21:15 +03:00
|
|
|
:tl:`InputPeerUser`, :tl:`InputPeerChat` or :tl:`InputPeerChannel`
|
|
|
|
or :tl:`InputPeerSelf` if the parameter is ``'me'`` or ``'self'``.
|
|
|
|
|
|
|
|
If you need to get the ID of yourself, you should use
|
|
|
|
`get_me` with ``input_peer=True``) instead.
|
2017-10-01 15:19:04 +03:00
|
|
|
"""
|
2018-03-27 12:29:47 +03:00
|
|
|
if peer in ('me', 'self'):
|
|
|
|
return InputPeerSelf()
|
|
|
|
|
2017-10-05 13:27:05 +03:00
|
|
|
try:
|
|
|
|
# First try to get the entity from cache, otherwise figure it out
|
2017-12-27 02:50:09 +03:00
|
|
|
return self.session.get_input_entity(peer)
|
2017-12-27 14:36:14 +03:00
|
|
|
except ValueError:
|
2017-10-05 13:27:05 +03:00
|
|
|
pass
|
|
|
|
|
2017-10-01 17:57:07 +03:00
|
|
|
if isinstance(peer, str):
|
2017-10-05 13:27:05 +03:00
|
|
|
return utils.get_input_peer(self._get_entity_from_string(peer))
|
2017-10-01 17:57:07 +03:00
|
|
|
|
2018-04-06 20:21:02 +03:00
|
|
|
if not isinstance(peer, int) and (not isinstance(peer, TLObject)
|
|
|
|
or peer.SUBCLASS_OF_ID != 0x2d45687):
|
|
|
|
# Try casting the object into an input peer. Might TypeError.
|
|
|
|
# Don't do it if a not-found ID was given (instead ValueError).
|
|
|
|
# Also ignore Peer (0x2d45687 == crc32(b'Peer'))'s, lacking hash.
|
|
|
|
return utils.get_input_peer(peer)
|
2017-10-01 15:19:04 +03:00
|
|
|
|
2018-04-04 14:35:51 +03:00
|
|
|
raise ValueError(
|
2018-04-14 13:03:08 +03:00
|
|
|
'Could not find the input entity for "{}". Please read https://'
|
|
|
|
'telethon.readthedocs.io/en/latest/extra/basic/entities.html to'
|
2018-04-28 14:42:36 +03:00
|
|
|
' find out more details.'
|
2018-04-04 14:35:51 +03:00
|
|
|
.format(peer)
|
2017-10-01 15:19:04 +03:00
|
|
|
)
|
|
|
|
|
2018-03-27 18:35:33 +03:00
|
|
|
def edit_2fa(self, current_password=None, new_password=None, hint='',
|
|
|
|
email=None):
|
|
|
|
"""
|
|
|
|
Changes the 2FA settings of the logged in user, according to the
|
|
|
|
passed parameters. Take note of the parameter explanations.
|
|
|
|
|
|
|
|
Has no effect if both current and new password are omitted.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
current_password (`str`, optional):
|
2018-03-27 18:35:33 +03:00
|
|
|
The current password, to authorize changing to ``new_password``.
|
|
|
|
Must be set if changing existing 2FA settings.
|
|
|
|
Must **not** be set if 2FA is currently disabled.
|
|
|
|
Passing this by itself will remove 2FA (if correct).
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
new_password (`str`, optional):
|
2018-03-27 18:35:33 +03:00
|
|
|
The password to set as 2FA.
|
|
|
|
If 2FA was already enabled, ``current_password`` **must** be set.
|
|
|
|
Leaving this blank or ``None`` will remove the password.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
hint (`str`, optional):
|
2018-03-27 18:35:33 +03:00
|
|
|
Hint to be displayed by Telegram when it asks for 2FA.
|
|
|
|
Leaving unspecified is highly discouraged.
|
|
|
|
Has no effect if ``new_password`` is not set.
|
|
|
|
|
2018-03-28 17:03:47 +03:00
|
|
|
email (`str`, optional):
|
2018-03-27 18:35:33 +03:00
|
|
|
Recovery and verification email. Raises ``EmailUnconfirmedError``
|
|
|
|
if value differs from current one, and has no effect if
|
|
|
|
``new_password`` is not set.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
``True`` if successful, ``False`` otherwise.
|
|
|
|
"""
|
|
|
|
if new_password is None and current_password is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
pass_result = self(GetPasswordRequest())
|
|
|
|
if isinstance(pass_result, NoPassword) and current_password:
|
|
|
|
current_password = None
|
|
|
|
|
|
|
|
salt_random = os.urandom(8)
|
|
|
|
salt = pass_result.new_salt + salt_random
|
|
|
|
if not current_password:
|
|
|
|
current_password_hash = salt
|
|
|
|
else:
|
|
|
|
current_password = pass_result.current_salt +\
|
|
|
|
current_password.encode() + pass_result.current_salt
|
|
|
|
current_password_hash = hashlib.sha256(current_password).digest()
|
|
|
|
|
|
|
|
if new_password: # Setting new password
|
|
|
|
new_password = salt + new_password.encode('utf-8') + salt
|
|
|
|
new_password_hash = hashlib.sha256(new_password).digest()
|
|
|
|
new_settings = PasswordInputSettings(
|
|
|
|
new_salt=salt,
|
|
|
|
new_password_hash=new_password_hash,
|
|
|
|
hint=hint
|
|
|
|
)
|
|
|
|
if email: # If enabling 2FA or changing email
|
|
|
|
new_settings.email = email # TG counts empty string as None
|
|
|
|
return self(UpdatePasswordSettingsRequest(
|
|
|
|
current_password_hash, new_settings=new_settings
|
|
|
|
))
|
|
|
|
else: # Removing existing password
|
|
|
|
return self(UpdatePasswordSettingsRequest(
|
|
|
|
current_password_hash,
|
|
|
|
new_settings=PasswordInputSettings(
|
|
|
|
new_salt=bytes(),
|
|
|
|
new_password_hash=bytes(),
|
|
|
|
hint=hint
|
|
|
|
)
|
|
|
|
))
|
|
|
|
|
2018-01-15 20:15:30 +03:00
|
|
|
# endregion
|