2019-09-08 11:56:35 +03:00
|
|
|
import inspect
|
2018-06-09 23:05:06 +03:00
|
|
|
import itertools
|
2019-05-03 22:37:27 +03:00
|
|
|
import typing
|
2020-10-01 13:22:55 +03:00
|
|
|
import warnings
|
2018-06-09 23:05:06 +03:00
|
|
|
|
2021-09-26 20:58:42 +03:00
|
|
|
from .._misc import helpers, utils, requestiter, hints
|
2021-09-13 21:37:29 +03:00
|
|
|
from ..types import _custom
|
2021-09-26 20:58:42 +03:00
|
|
|
from .. import errors, _tl
|
2019-02-26 22:26:40 +03:00
|
|
|
|
2019-02-27 14:57:54 +03:00
|
|
|
_MAX_CHUNK_SIZE = 100
|
|
|
|
|
2019-05-03 22:37:27 +03:00
|
|
|
if typing.TYPE_CHECKING:
|
|
|
|
from .telegramclient import TelegramClient
|
|
|
|
|
2019-02-26 22:26:40 +03:00
|
|
|
|
2021-09-12 14:27:13 +03:00
|
|
|
class _MessagesIter(requestiter.RequestIter):
|
2019-02-27 11:31:15 +03:00
|
|
|
"""
|
|
|
|
Common factor for all requests that need to iterate over messages.
|
|
|
|
"""
|
|
|
|
async def _init(
|
2019-02-27 14:57:54 +03:00
|
|
|
self, entity, offset_id, min_id, max_id,
|
2021-08-29 12:53:06 +03:00
|
|
|
from_user, offset_date, add_offset, filter, search, reply_to,
|
|
|
|
scheduled
|
2019-02-27 11:31:15 +03:00
|
|
|
):
|
2019-07-06 13:10:25 +03:00
|
|
|
# Note that entity being `None` will perform a global search.
|
2019-02-27 11:31:15 +03:00
|
|
|
if entity:
|
|
|
|
self.entity = await self.client.get_input_entity(entity)
|
|
|
|
else:
|
|
|
|
self.entity = None
|
|
|
|
if self.reverse:
|
|
|
|
raise ValueError('Cannot reverse global search')
|
2019-02-26 22:26:40 +03:00
|
|
|
|
|
|
|
# Telegram doesn't like min_id/max_id. If these IDs are low enough
|
|
|
|
# (starting from last_id - 100), the request will return nothing.
|
|
|
|
#
|
|
|
|
# We can emulate their behaviour locally by setting offset = max_id
|
|
|
|
# and simply stopping once we hit a message with ID <= min_id.
|
|
|
|
if self.reverse:
|
|
|
|
offset_id = max(offset_id, min_id)
|
|
|
|
if offset_id and max_id:
|
|
|
|
if max_id - offset_id <= 1:
|
|
|
|
raise StopAsyncIteration
|
|
|
|
|
|
|
|
if not max_id:
|
|
|
|
max_id = float('inf')
|
|
|
|
else:
|
|
|
|
offset_id = max(offset_id, max_id)
|
|
|
|
if offset_id and min_id:
|
|
|
|
if offset_id - min_id <= 1:
|
|
|
|
raise StopAsyncIteration
|
|
|
|
|
|
|
|
if self.reverse:
|
|
|
|
if offset_id:
|
|
|
|
offset_id += 1
|
2019-05-22 13:20:02 +03:00
|
|
|
elif not offset_date:
|
|
|
|
# offset_id has priority over offset_date, so don't
|
|
|
|
# set offset_id to 1 if we want to offset by date.
|
2019-02-26 22:26:40 +03:00
|
|
|
offset_id = 1
|
|
|
|
|
|
|
|
if from_user:
|
|
|
|
from_user = await self.client.get_input_entity(from_user)
|
2020-10-09 22:14:31 +03:00
|
|
|
self.from_id = await self.client.get_peer_id(from_user)
|
2019-02-27 11:31:15 +03:00
|
|
|
else:
|
|
|
|
self.from_id = None
|
2019-02-26 22:26:40 +03:00
|
|
|
|
2020-10-01 13:06:51 +03:00
|
|
|
# `messages.searchGlobal` only works with text `search` or `filter` queries.
|
|
|
|
# If we want to perform global a search with `from_user` we have to perform
|
|
|
|
# a normal `messages.search`, *but* we can make the entity be `inputPeerEmpty`.
|
|
|
|
if not self.entity and from_user:
|
2021-09-12 13:16:02 +03:00
|
|
|
self.entity = _tl.InputPeerEmpty()
|
2019-12-05 18:19:46 +03:00
|
|
|
|
2020-10-01 13:06:51 +03:00
|
|
|
if filter is None:
|
2021-09-12 13:16:02 +03:00
|
|
|
filter = _tl.InputMessagesFilterEmpty()
|
2021-02-09 00:56:27 +03:00
|
|
|
else:
|
|
|
|
filter = filter() if isinstance(filter, type) else filter
|
2020-10-01 13:06:51 +03:00
|
|
|
|
2019-02-27 11:31:15 +03:00
|
|
|
if not self.entity:
|
2021-09-12 13:16:02 +03:00
|
|
|
self.request = _tl.fn.messages.SearchGlobal(
|
2019-02-27 11:31:15 +03:00
|
|
|
q=search or '',
|
2020-10-01 13:06:51 +03:00
|
|
|
filter=filter,
|
2020-10-01 13:22:55 +03:00
|
|
|
min_date=None,
|
|
|
|
max_date=offset_date,
|
2021-01-26 23:06:23 +03:00
|
|
|
offset_rate=0,
|
2021-09-12 13:16:02 +03:00
|
|
|
offset_peer=_tl.InputPeerEmpty(),
|
2019-02-27 11:31:15 +03:00
|
|
|
offset_id=offset_id,
|
|
|
|
limit=1
|
|
|
|
)
|
2021-08-29 12:53:06 +03:00
|
|
|
elif scheduled:
|
2021-09-12 13:16:02 +03:00
|
|
|
self.request = _tl.fn.messages.GetScheduledHistory(
|
2021-08-29 12:53:06 +03:00
|
|
|
peer=entity,
|
|
|
|
hash=0
|
|
|
|
)
|
2020-10-16 11:39:02 +03:00
|
|
|
elif reply_to is not None:
|
2021-09-12 13:16:02 +03:00
|
|
|
self.request = _tl.fn.messages.GetReplies(
|
2020-10-16 11:39:02 +03:00
|
|
|
peer=self.entity,
|
|
|
|
msg_id=reply_to,
|
|
|
|
offset_id=offset_id,
|
|
|
|
offset_date=offset_date,
|
|
|
|
add_offset=add_offset,
|
|
|
|
limit=1,
|
|
|
|
max_id=0,
|
|
|
|
min_id=0,
|
|
|
|
hash=0
|
|
|
|
)
|
2021-09-12 13:16:02 +03:00
|
|
|
elif search is not None or not isinstance(filter, _tl.InputMessagesFilterEmpty) or from_user:
|
2019-02-27 11:31:15 +03:00
|
|
|
# Telegram completely ignores `from_id` in private chats
|
2019-12-23 15:52:07 +03:00
|
|
|
ty = helpers._entity_type(self.entity)
|
|
|
|
if ty == helpers._EntityType.USER:
|
2019-02-27 11:31:15 +03:00
|
|
|
# Don't bother sending `from_user` (it's ignored anyway),
|
|
|
|
# but keep `from_id` defined above to check it locally.
|
|
|
|
from_user = None
|
|
|
|
else:
|
|
|
|
# Do send `from_user` to do the filtering server-side,
|
|
|
|
# and set `from_id` to None to avoid checking it locally.
|
|
|
|
self.from_id = None
|
|
|
|
|
2021-09-12 13:16:02 +03:00
|
|
|
self.request = _tl.fn.messages.Search(
|
2019-02-27 11:31:15 +03:00
|
|
|
peer=self.entity,
|
|
|
|
q=search or '',
|
2021-02-09 00:56:27 +03:00
|
|
|
filter=filter,
|
2019-02-27 11:31:15 +03:00
|
|
|
min_date=None,
|
|
|
|
max_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
|
|
|
add_offset=add_offset,
|
|
|
|
limit=0, # Search actually returns 0 items if we ask it to
|
|
|
|
max_id=0,
|
|
|
|
min_id=0,
|
|
|
|
hash=0,
|
|
|
|
from_id=from_user
|
|
|
|
)
|
2019-03-18 19:36:06 +03:00
|
|
|
|
|
|
|
# Workaround issue #1124 until a better solution is found.
|
|
|
|
# Telegram seemingly ignores `max_date` if `filter` (and
|
|
|
|
# nothing else) is specified, so we have to rely on doing
|
|
|
|
# a first request to offset from the ID instead.
|
|
|
|
#
|
|
|
|
# Even better, using `filter` and `from_id` seems to always
|
|
|
|
# trigger `RPC_CALL_FAIL` which is "internal issues"...
|
2021-09-12 13:16:02 +03:00
|
|
|
if not isinstance(filter, _tl.InputMessagesFilterEmpty) \
|
2020-10-25 12:33:36 +03:00
|
|
|
and offset_date and not search and not offset_id:
|
2019-03-18 19:36:06 +03:00
|
|
|
async for m in self.client.iter_messages(
|
|
|
|
self.entity, 1, offset_date=offset_date):
|
|
|
|
self.request.offset_id = m.id + 1
|
2019-02-27 11:31:15 +03:00
|
|
|
else:
|
2021-09-12 13:16:02 +03:00
|
|
|
self.request = _tl.fn.messages.GetHistory(
|
2019-02-27 11:31:15 +03:00
|
|
|
peer=self.entity,
|
|
|
|
limit=1,
|
|
|
|
offset_date=offset_date,
|
|
|
|
offset_id=offset_id,
|
|
|
|
min_id=0,
|
|
|
|
max_id=0,
|
|
|
|
add_offset=add_offset,
|
|
|
|
hash=0
|
|
|
|
)
|
2019-02-26 22:26:40 +03:00
|
|
|
|
2019-02-27 15:01:04 +03:00
|
|
|
if self.limit <= 0:
|
2019-02-26 22:26:40 +03:00
|
|
|
# No messages, but we still need to know the total message count
|
|
|
|
result = await self.client(self.request)
|
2021-09-12 13:16:02 +03:00
|
|
|
if isinstance(result, _tl.messages.MessagesNotModified):
|
2019-02-26 22:26:40 +03:00
|
|
|
self.total = result.count
|
|
|
|
else:
|
|
|
|
self.total = getattr(result, 'count', len(result.messages))
|
|
|
|
raise StopAsyncIteration
|
|
|
|
|
2019-02-27 11:32:33 +03:00
|
|
|
if self.wait_time is None:
|
|
|
|
self.wait_time = 1 if self.limit > 3000 else 0
|
|
|
|
|
2019-02-26 22:26:40 +03:00
|
|
|
# When going in reverse we need an offset of `-limit`, but we
|
|
|
|
# also want to respect what the user passed, so add them together.
|
|
|
|
if self.reverse:
|
2019-02-27 14:57:54 +03:00
|
|
|
self.request.add_offset -= _MAX_CHUNK_SIZE
|
2019-02-26 22:26:40 +03:00
|
|
|
|
|
|
|
self.add_offset = add_offset
|
|
|
|
self.max_id = max_id
|
|
|
|
self.min_id = min_id
|
|
|
|
self.last_id = 0 if self.reverse else float('inf')
|
|
|
|
|
|
|
|
async def _load_next_chunk(self):
|
2019-02-27 14:57:54 +03:00
|
|
|
self.request.limit = min(self.left, _MAX_CHUNK_SIZE)
|
|
|
|
if self.reverse and self.request.limit != _MAX_CHUNK_SIZE:
|
2019-02-26 22:26:40 +03:00
|
|
|
# Remember that we need -limit when going in reverse
|
|
|
|
self.request.add_offset = self.add_offset - self.request.limit
|
|
|
|
|
|
|
|
r = await self.client(self.request)
|
|
|
|
self.total = getattr(r, 'count', len(r.messages))
|
|
|
|
|
|
|
|
entities = {utils.get_peer_id(x): x
|
|
|
|
for x in itertools.chain(r.users, r.chats)}
|
|
|
|
|
|
|
|
messages = reversed(r.messages) if self.reverse else r.messages
|
|
|
|
for message in messages:
|
2021-09-12 13:16:02 +03:00
|
|
|
if (isinstance(message, _tl.MessageEmpty)
|
2020-10-09 22:14:31 +03:00
|
|
|
or self.from_id and message.sender_id != self.from_id):
|
2019-02-26 22:26:40 +03:00
|
|
|
continue
|
|
|
|
|
2019-02-27 11:31:15 +03:00
|
|
|
if not self._message_in_range(message):
|
2019-02-27 13:24:47 +03:00
|
|
|
return True
|
2019-02-26 22:26:40 +03:00
|
|
|
|
|
|
|
# There has been reports that on bad connections this method
|
|
|
|
# was returning duplicated IDs sometimes. Using ``last_id``
|
|
|
|
# is an attempt to avoid these duplicates, since the message
|
|
|
|
# IDs are returned in descending order (or asc if reverse).
|
|
|
|
self.last_id = message.id
|
2021-09-13 21:37:29 +03:00
|
|
|
self.buffer.append(_custom.Message._new(self.client, message, entities, self.entity))
|
2019-02-26 22:26:40 +03:00
|
|
|
|
|
|
|
if len(r.messages) < self.request.limit:
|
2019-02-27 13:24:47 +03:00
|
|
|
return True
|
2019-02-26 22:26:40 +03:00
|
|
|
|
2019-02-27 11:37:12 +03:00
|
|
|
# Get the last message that's not empty (in some rare cases
|
2019-02-26 22:26:40 +03:00
|
|
|
# it can happen that the last message is :tl:`MessageEmpty`)
|
2019-02-27 13:24:47 +03:00
|
|
|
if self.buffer:
|
2020-10-01 13:06:51 +03:00
|
|
|
self._update_offset(self.buffer[-1], r)
|
2019-02-27 11:31:15 +03:00
|
|
|
else:
|
2019-02-26 22:26:40 +03:00
|
|
|
# There are some cases where all the messages we get start
|
|
|
|
# being empty. This can happen on migrated mega-groups if
|
|
|
|
# the history was cleared, and we're using search. Telegram
|
|
|
|
# acts incredibly weird sometimes. Messages are returned but
|
|
|
|
# only "empty", not their contents. If this is the case we
|
|
|
|
# should just give up since there won't be any new Message.
|
2019-02-27 13:24:47 +03:00
|
|
|
return True
|
2018-06-09 23:05:06 +03:00
|
|
|
|
2019-02-27 11:31:15 +03:00
|
|
|
def _message_in_range(self, message):
|
|
|
|
"""
|
|
|
|
Determine whether the given message is in the range or
|
|
|
|
it should be ignored (and avoid loading more chunks).
|
|
|
|
"""
|
|
|
|
# No entity means message IDs between chats may vary
|
|
|
|
if self.entity:
|
|
|
|
if self.reverse:
|
|
|
|
if message.id <= self.last_id or message.id >= self.max_id:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
if message.id >= self.last_id or message.id <= self.min_id:
|
|
|
|
return False
|
2018-06-09 23:05:06 +03:00
|
|
|
|
2019-02-27 11:31:15 +03:00
|
|
|
return True
|
2019-02-26 23:04:46 +03:00
|
|
|
|
2020-10-01 13:06:51 +03:00
|
|
|
def _update_offset(self, last_message, response):
|
2019-02-27 11:31:15 +03:00
|
|
|
"""
|
|
|
|
After making the request, update its offset with the last message.
|
|
|
|
"""
|
|
|
|
self.request.offset_id = last_message.id
|
2019-02-26 23:04:46 +03:00
|
|
|
if self.reverse:
|
2019-02-27 11:31:15 +03:00
|
|
|
# We want to skip the one we already have
|
|
|
|
self.request.offset_id += 1
|
2019-02-26 23:04:46 +03:00
|
|
|
|
2021-09-13 22:00:31 +03:00
|
|
|
if isinstance(self.request, _tl.fn.messages.Search):
|
2019-02-27 11:48:47 +03:00
|
|
|
# Unlike getHistory and searchGlobal that use *offset* date,
|
|
|
|
# this is *max* date. This means that doing a search in reverse
|
|
|
|
# will break it. Since it's not really needed once we're going
|
|
|
|
# (only for the first request), it's safe to just clear it off.
|
|
|
|
self.request.max_date = None
|
2019-02-26 23:04:46 +03:00
|
|
|
else:
|
2020-10-16 11:39:02 +03:00
|
|
|
# getHistory, searchGlobal and getReplies call it offset_date
|
2019-02-27 11:31:15 +03:00
|
|
|
self.request.offset_date = last_message.date
|
2019-02-26 23:04:46 +03:00
|
|
|
|
2021-09-13 22:00:31 +03:00
|
|
|
if isinstance(self.request, _tl.fn.messages.SearchGlobal):
|
2020-10-01 14:18:54 +03:00
|
|
|
if last_message.input_chat:
|
|
|
|
self.request.offset_peer = last_message.input_chat
|
|
|
|
else:
|
2021-09-12 13:16:02 +03:00
|
|
|
self.request.offset_peer = _tl.InputPeerEmpty()
|
2020-10-01 14:18:54 +03:00
|
|
|
|
2021-01-26 23:06:23 +03:00
|
|
|
self.request.offset_rate = getattr(response, 'next_rate', 0)
|
2019-02-26 23:04:46 +03:00
|
|
|
|
|
|
|
|
2021-09-12 14:27:13 +03:00
|
|
|
class _IDsIter(requestiter.RequestIter):
|
2019-02-27 12:04:12 +03:00
|
|
|
async def _init(self, entity, ids):
|
2019-08-26 13:16:16 +03:00
|
|
|
self.total = len(ids)
|
|
|
|
self._ids = list(reversed(ids)) if self.reverse else ids
|
|
|
|
self._offset = 0
|
|
|
|
self._entity = (await self.client.get_input_entity(entity)) if entity else None
|
2019-12-30 12:56:20 +03:00
|
|
|
self._ty = helpers._entity_type(self._entity) if self._entity else None
|
2019-02-26 23:04:46 +03:00
|
|
|
|
2019-08-26 13:16:16 +03:00
|
|
|
# 30s flood wait every 300 messages (3 requests of 100 each, 30 of 10, etc.)
|
|
|
|
if self.wait_time is None:
|
|
|
|
self.wait_time = 10 if self.limit > 300 else 0
|
2019-02-27 12:04:12 +03:00
|
|
|
|
2019-08-26 13:16:16 +03:00
|
|
|
async def _load_next_chunk(self):
|
|
|
|
ids = self._ids[self._offset:self._offset + _MAX_CHUNK_SIZE]
|
|
|
|
if not ids:
|
|
|
|
raise StopAsyncIteration
|
|
|
|
|
|
|
|
self._offset += _MAX_CHUNK_SIZE
|
2019-02-27 12:04:12 +03:00
|
|
|
|
|
|
|
from_id = None # By default, no need to validate from_id
|
2019-12-23 15:52:07 +03:00
|
|
|
if self._ty == helpers._EntityType.CHANNEL:
|
2019-02-27 12:04:12 +03:00
|
|
|
try:
|
|
|
|
r = await self.client(
|
2021-09-12 13:16:02 +03:00
|
|
|
_tl.fn.channels.GetMessages(self._entity, ids))
|
2019-02-27 12:04:12 +03:00
|
|
|
except errors.MessageIdsEmptyError:
|
|
|
|
# All IDs were invalid, use a dummy result
|
2021-09-12 13:16:02 +03:00
|
|
|
r = _tl.messages.MessagesNotModified(len(ids))
|
2019-02-27 12:04:12 +03:00
|
|
|
else:
|
2021-09-12 13:16:02 +03:00
|
|
|
r = await self.client(_tl.fn.messages.GetMessages(ids))
|
2019-08-26 13:16:16 +03:00
|
|
|
if self._entity:
|
2021-09-19 18:21:11 +03:00
|
|
|
from_id = await _get_peer(self.client, self._entity)
|
2019-02-27 12:04:12 +03:00
|
|
|
|
2021-09-12 13:16:02 +03:00
|
|
|
if isinstance(r, _tl.messages.MessagesNotModified):
|
2019-02-27 13:24:47 +03:00
|
|
|
self.buffer.extend(None for _ in ids)
|
2019-02-27 12:04:12 +03:00
|
|
|
return
|
|
|
|
|
|
|
|
entities = {utils.get_peer_id(x): x
|
|
|
|
for x in itertools.chain(r.users, r.chats)}
|
|
|
|
|
|
|
|
# Telegram seems to return the messages in the order in which
|
|
|
|
# we asked them for, so we don't need to check it ourselves,
|
|
|
|
# unless some messages were invalid in which case Telegram
|
|
|
|
# may decide to not send them at all.
|
|
|
|
#
|
|
|
|
# The passed message IDs may not belong to the desired entity
|
|
|
|
# since the user can enter arbitrary numbers which can belong to
|
|
|
|
# arbitrary chats. Validate these unless ``from_id is None``.
|
|
|
|
for message in r.messages:
|
2021-09-12 13:16:02 +03:00
|
|
|
if isinstance(message, _tl.MessageEmpty) or (
|
2020-10-01 13:22:55 +03:00
|
|
|
from_id and message.peer_id != from_id):
|
2019-02-27 13:24:47 +03:00
|
|
|
self.buffer.append(None)
|
2019-02-27 12:04:12 +03:00
|
|
|
else:
|
2021-09-13 21:37:29 +03:00
|
|
|
self.buffer.append(_custom.Message._new(self.client, message, entities, self._entity))
|
2019-02-26 23:04:46 +03:00
|
|
|
|
|
|
|
|
2021-09-19 18:21:11 +03:00
|
|
|
async def _get_peer(self: 'TelegramClient', input_peer: 'hints.EntityLike'):
|
|
|
|
try:
|
|
|
|
return utils.get_peer(input_peer)
|
|
|
|
except TypeError:
|
|
|
|
# Can only be self by now
|
|
|
|
return _tl.PeerUser(await self.get_peer_id(input_peer))
|
|
|
|
|
|
|
|
|
2021-09-17 20:35:10 +03:00
|
|
|
def get_messages(
|
2021-09-11 14:33:27 +03:00
|
|
|
self: 'TelegramClient',
|
|
|
|
entity: 'hints.EntityLike',
|
2021-09-26 19:37:09 +03:00
|
|
|
limit: float = (),
|
2021-09-11 14:33:27 +03:00
|
|
|
*,
|
|
|
|
offset_date: 'hints.DateLike' = None,
|
|
|
|
offset_id: int = 0,
|
|
|
|
max_id: int = 0,
|
|
|
|
min_id: int = 0,
|
|
|
|
add_offset: int = 0,
|
|
|
|
search: str = None,
|
2021-09-12 13:16:02 +03:00
|
|
|
filter: 'typing.Union[_tl.TypeMessagesFilter, typing.Type[_tl.TypeMessagesFilter]]' = None,
|
2021-09-11 14:33:27 +03:00
|
|
|
from_user: 'hints.EntityLike' = None,
|
|
|
|
wait_time: float = None,
|
|
|
|
ids: 'typing.Union[int, typing.Sequence[int]]' = None,
|
|
|
|
reverse: bool = False,
|
|
|
|
reply_to: int = None,
|
|
|
|
scheduled: bool = False
|
|
|
|
) -> 'typing.Union[_MessagesIter, _IDsIter]':
|
|
|
|
if ids is not None:
|
|
|
|
if not utils.is_list_like(ids):
|
|
|
|
ids = [ids]
|
|
|
|
|
|
|
|
return _IDsIter(
|
2019-02-27 11:31:15 +03:00
|
|
|
client=self,
|
|
|
|
reverse=reverse,
|
|
|
|
wait_time=wait_time,
|
2021-09-11 14:33:27 +03:00
|
|
|
limit=len(ids),
|
2019-02-27 11:31:15 +03:00
|
|
|
entity=entity,
|
2021-09-11 14:33:27 +03:00
|
|
|
ids=ids
|
2019-02-27 11:31:15 +03:00
|
|
|
)
|
2018-06-09 23:05:06 +03:00
|
|
|
|
2021-09-11 14:33:27 +03:00
|
|
|
return _MessagesIter(
|
|
|
|
client=self,
|
|
|
|
reverse=reverse,
|
|
|
|
wait_time=wait_time,
|
|
|
|
limit=limit,
|
|
|
|
entity=entity,
|
|
|
|
offset_id=offset_id,
|
|
|
|
min_id=min_id,
|
|
|
|
max_id=max_id,
|
|
|
|
from_user=from_user,
|
|
|
|
offset_date=offset_date,
|
|
|
|
add_offset=add_offset,
|
|
|
|
filter=filter,
|
|
|
|
search=search,
|
|
|
|
reply_to=reply_to,
|
|
|
|
scheduled=scheduled
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
async def _get_comment_data(
|
|
|
|
self: 'TelegramClient',
|
|
|
|
entity: 'hints.EntityLike',
|
2021-09-12 13:16:02 +03:00
|
|
|
message: 'typing.Union[int, _tl.Message]'
|
2021-09-11 14:33:27 +03:00
|
|
|
):
|
2021-09-12 13:16:02 +03:00
|
|
|
r = await self(_tl.fn.messages.GetDiscussionMessage(
|
2021-09-11 14:33:27 +03:00
|
|
|
peer=entity,
|
|
|
|
msg_id=utils.get_message_id(message)
|
|
|
|
))
|
|
|
|
m = r.messages[0]
|
|
|
|
chat = next(c for c in r.chats if c.id == m.peer_id.channel_id)
|
|
|
|
return utils.get_input_peer(chat), m.id
|
|
|
|
|
|
|
|
async def send_message(
|
|
|
|
self: 'TelegramClient',
|
|
|
|
entity: 'hints.EntityLike',
|
|
|
|
message: 'hints.MessageLike' = '',
|
|
|
|
*,
|
2021-09-12 13:16:02 +03:00
|
|
|
reply_to: 'typing.Union[int, _tl.Message]' = None,
|
|
|
|
attributes: 'typing.Sequence[_tl.TypeDocumentAttribute]' = None,
|
2021-09-11 14:33:27 +03:00
|
|
|
parse_mode: typing.Optional[str] = (),
|
2021-09-12 13:16:02 +03:00
|
|
|
formatting_entities: typing.Optional[typing.List[_tl.TypeMessageEntity]] = None,
|
2021-09-11 14:33:27 +03:00
|
|
|
link_preview: bool = True,
|
|
|
|
file: 'typing.Union[hints.FileLike, typing.Sequence[hints.FileLike]]' = None,
|
|
|
|
thumb: 'hints.FileLike' = None,
|
|
|
|
force_document: bool = False,
|
|
|
|
clear_draft: bool = False,
|
|
|
|
buttons: 'hints.MarkupLike' = None,
|
|
|
|
silent: bool = None,
|
|
|
|
background: bool = None,
|
|
|
|
supports_streaming: bool = False,
|
|
|
|
schedule: 'hints.DateLike' = None,
|
2021-09-12 13:16:02 +03:00
|
|
|
comment_to: 'typing.Union[int, _tl.Message]' = None
|
|
|
|
) -> '_tl.Message':
|
2021-09-11 14:33:27 +03:00
|
|
|
if file is not None:
|
|
|
|
return await self.send_file(
|
|
|
|
entity, file, caption=message, reply_to=reply_to,
|
|
|
|
attributes=attributes, parse_mode=parse_mode,
|
|
|
|
force_document=force_document, thumb=thumb,
|
|
|
|
buttons=buttons, clear_draft=clear_draft, silent=silent,
|
|
|
|
schedule=schedule, supports_streaming=supports_streaming,
|
|
|
|
formatting_entities=formatting_entities,
|
|
|
|
comment_to=comment_to, background=background
|
|
|
|
)
|
2019-04-02 11:46:37 +03:00
|
|
|
|
2021-09-11 14:33:27 +03:00
|
|
|
entity = await self.get_input_entity(entity)
|
|
|
|
if comment_to is not None:
|
2021-09-12 15:09:53 +03:00
|
|
|
entity, reply_to = await _get_comment_data(self, entity, comment_to)
|
2019-04-02 11:46:37 +03:00
|
|
|
|
2021-09-12 13:16:02 +03:00
|
|
|
if isinstance(message, _tl.Message):
|
2021-09-11 14:33:27 +03:00
|
|
|
if buttons is None:
|
|
|
|
markup = message.reply_markup
|
|
|
|
else:
|
2021-09-25 21:42:51 +03:00
|
|
|
markup = _custom.button.build_reply_markup(buttons)
|
2019-04-02 11:46:37 +03:00
|
|
|
|
2021-09-11 14:33:27 +03:00
|
|
|
if silent is None:
|
|
|
|
silent = message.silent
|
2019-04-02 11:46:37 +03:00
|
|
|
|
2021-09-11 14:33:27 +03:00
|
|
|
if (message.media and not isinstance(
|
2021-09-12 13:16:02 +03:00
|
|
|
message.media, _tl.MessageMediaWebPage)):
|
2021-09-11 14:33:27 +03:00
|
|
|
return await self.send_file(
|
|
|
|
entity,
|
|
|
|
message.media,
|
|
|
|
caption=message.message,
|
2019-04-11 13:47:14 +03:00
|
|
|
silent=silent,
|
2021-08-05 11:54:07 +03:00
|
|
|
background=background,
|
2021-09-11 14:33:27 +03:00
|
|
|
reply_to=reply_to,
|
|
|
|
buttons=markup,
|
|
|
|
formatting_entities=message.entities,
|
|
|
|
schedule=schedule
|
2019-04-02 11:46:37 +03:00
|
|
|
)
|
2018-06-09 23:05:06 +03:00
|
|
|
|
2021-09-12 13:16:02 +03:00
|
|
|
request = _tl.fn.messages.SendMessage(
|
2021-09-11 14:33:27 +03:00
|
|
|
peer=entity,
|
|
|
|
message=message.message or '',
|
|
|
|
silent=silent,
|
|
|
|
background=background,
|
|
|
|
reply_to_msg_id=utils.get_message_id(reply_to),
|
|
|
|
reply_markup=markup,
|
|
|
|
entities=message.entities,
|
|
|
|
clear_draft=clear_draft,
|
|
|
|
no_webpage=not isinstance(
|
2021-09-12 13:16:02 +03:00
|
|
|
message.media, _tl.MessageMediaWebPage),
|
2021-09-11 14:33:27 +03:00
|
|
|
schedule_date=schedule
|
|
|
|
)
|
|
|
|
message = message.message
|
|
|
|
else:
|
2020-10-02 23:06:48 +03:00
|
|
|
if formatting_entities is None:
|
2021-09-11 14:33:27 +03:00
|
|
|
message, formatting_entities = await self._parse_message_text(message, parse_mode)
|
|
|
|
if not message:
|
|
|
|
raise ValueError(
|
|
|
|
'The message cannot be empty unless a file is provided'
|
2020-04-03 19:37:46 +03:00
|
|
|
)
|
2019-03-23 21:25:45 +03:00
|
|
|
|
2021-09-12 13:16:02 +03:00
|
|
|
request = _tl.fn.messages.SendMessage(
|
2018-06-09 23:05:06 +03:00
|
|
|
peer=entity,
|
2021-09-11 14:33:27 +03:00
|
|
|
message=message,
|
2020-10-02 23:06:48 +03:00
|
|
|
entities=formatting_entities,
|
2021-09-11 14:33:27 +03:00
|
|
|
no_webpage=not link_preview,
|
|
|
|
reply_to_msg_id=utils.get_message_id(reply_to),
|
|
|
|
clear_draft=clear_draft,
|
|
|
|
silent=silent,
|
|
|
|
background=background,
|
2021-09-25 21:42:51 +03:00
|
|
|
reply_markup=_custom.button.build_reply_markup(buttons),
|
2019-09-06 14:45:31 +03:00
|
|
|
schedule_date=schedule
|
2018-06-09 23:05:06 +03:00
|
|
|
)
|
2019-03-25 10:45:25 +03:00
|
|
|
|
2021-09-11 14:33:27 +03:00
|
|
|
result = await self(request)
|
2021-09-12 13:16:02 +03:00
|
|
|
if isinstance(result, _tl.UpdateShortSentMessage):
|
2021-09-13 21:37:29 +03:00
|
|
|
return _custom.Message._new(self, _tl.Message(
|
2021-09-11 14:33:27 +03:00
|
|
|
id=result.id,
|
2021-09-19 18:21:11 +03:00
|
|
|
peer_id=await _get_peer(self, entity),
|
2021-09-11 14:33:27 +03:00
|
|
|
message=message,
|
|
|
|
date=result.date,
|
|
|
|
out=result.out,
|
|
|
|
media=result.media,
|
|
|
|
entities=result.entities,
|
|
|
|
reply_markup=request.reply_markup,
|
|
|
|
ttl_period=result.ttl_period
|
2021-09-13 21:37:29 +03:00
|
|
|
), {}, entity)
|
2021-09-11 14:33:27 +03:00
|
|
|
|
|
|
|
return self._get_response_message(request, result, entity)
|
|
|
|
|
|
|
|
async def forward_messages(
|
|
|
|
self: 'TelegramClient',
|
|
|
|
entity: 'hints.EntityLike',
|
|
|
|
messages: 'typing.Union[hints.MessageIDLike, typing.Sequence[hints.MessageIDLike]]',
|
|
|
|
from_peer: 'hints.EntityLike' = None,
|
|
|
|
*,
|
|
|
|
background: bool = None,
|
|
|
|
with_my_score: bool = None,
|
|
|
|
silent: bool = None,
|
|
|
|
as_album: bool = None,
|
|
|
|
schedule: 'hints.DateLike' = None
|
2021-09-12 13:16:02 +03:00
|
|
|
) -> 'typing.Sequence[_tl.Message]':
|
2021-09-11 14:33:27 +03:00
|
|
|
if as_album is not None:
|
|
|
|
warnings.warn('the as_album argument is deprecated and no longer has any effect')
|
|
|
|
|
|
|
|
single = not utils.is_list_like(messages)
|
|
|
|
if single:
|
|
|
|
messages = (messages,)
|
|
|
|
|
|
|
|
entity = await self.get_input_entity(entity)
|
|
|
|
|
|
|
|
if from_peer:
|
|
|
|
from_peer = await self.get_input_entity(from_peer)
|
|
|
|
from_peer_id = await self.get_peer_id(from_peer)
|
|
|
|
else:
|
|
|
|
from_peer_id = None
|
|
|
|
|
|
|
|
def get_key(m):
|
|
|
|
if isinstance(m, int):
|
|
|
|
if from_peer_id is not None:
|
|
|
|
return from_peer_id
|
|
|
|
|
|
|
|
raise ValueError('from_peer must be given if integer IDs are used')
|
2021-09-12 13:16:02 +03:00
|
|
|
elif isinstance(m, _tl.Message):
|
2021-09-11 14:33:27 +03:00
|
|
|
return m.chat_id
|
2020-04-23 21:40:23 +03:00
|
|
|
else:
|
2021-09-11 14:33:27 +03:00
|
|
|
raise TypeError('Cannot forward messages of type {}'.format(type(m)))
|
2020-04-23 21:40:23 +03:00
|
|
|
|
2021-09-11 14:33:27 +03:00
|
|
|
sent = []
|
|
|
|
for _chat_id, chunk in itertools.groupby(messages, key=get_key):
|
|
|
|
chunk = list(chunk)
|
|
|
|
if isinstance(chunk[0], int):
|
|
|
|
chat = from_peer
|
2018-06-09 23:05:06 +03:00
|
|
|
else:
|
2021-09-11 14:33:27 +03:00
|
|
|
chat = await chunk[0].get_input_chat()
|
|
|
|
chunk = [m.id for m in chunk]
|
|
|
|
|
2021-09-12 13:16:02 +03:00
|
|
|
req = _tl.fn.messages.ForwardMessages(
|
2021-09-11 14:33:27 +03:00
|
|
|
from_peer=chat,
|
|
|
|
id=chunk,
|
|
|
|
to_peer=entity,
|
|
|
|
silent=silent,
|
|
|
|
background=background,
|
|
|
|
with_my_score=with_my_score,
|
|
|
|
schedule_date=schedule
|
|
|
|
)
|
|
|
|
result = await self(req)
|
|
|
|
sent.extend(self._get_response_message(req, result, entity))
|
|
|
|
|
|
|
|
return sent[0] if single else sent
|
|
|
|
|
|
|
|
async def edit_message(
|
|
|
|
self: 'TelegramClient',
|
2021-09-12 13:16:02 +03:00
|
|
|
entity: 'typing.Union[hints.EntityLike, _tl.Message]',
|
2021-09-11 14:33:27 +03:00
|
|
|
message: 'hints.MessageLike' = None,
|
|
|
|
text: str = None,
|
|
|
|
*,
|
|
|
|
parse_mode: str = (),
|
2021-09-12 13:16:02 +03:00
|
|
|
attributes: 'typing.Sequence[_tl.TypeDocumentAttribute]' = None,
|
|
|
|
formatting_entities: typing.Optional[typing.List[_tl.TypeMessageEntity]] = None,
|
2021-09-11 14:33:27 +03:00
|
|
|
link_preview: bool = True,
|
|
|
|
file: 'hints.FileLike' = None,
|
|
|
|
thumb: 'hints.FileLike' = None,
|
|
|
|
force_document: bool = False,
|
|
|
|
buttons: 'hints.MarkupLike' = None,
|
|
|
|
supports_streaming: bool = False,
|
|
|
|
schedule: 'hints.DateLike' = None
|
2021-09-12 13:16:02 +03:00
|
|
|
) -> '_tl.Message':
|
2021-09-11 14:33:27 +03:00
|
|
|
if formatting_entities is None:
|
|
|
|
text, formatting_entities = await self._parse_message_text(text, parse_mode)
|
|
|
|
file_handle, media, image = await self._file_to_media(file,
|
|
|
|
supports_streaming=supports_streaming,
|
|
|
|
thumb=thumb,
|
|
|
|
attributes=attributes,
|
|
|
|
force_document=force_document)
|
|
|
|
|
2021-09-18 17:54:54 +03:00
|
|
|
if isinstance(message, _tl.InputBotInlineMessageID):
|
2021-09-12 13:16:02 +03:00
|
|
|
request = _tl.fn.messages.EditInlineBotMessage(
|
2021-09-18 17:54:54 +03:00
|
|
|
id=message,
|
2021-09-11 14:33:27 +03:00
|
|
|
message=text,
|
|
|
|
no_webpage=not link_preview,
|
|
|
|
entities=formatting_entities,
|
|
|
|
media=media,
|
2021-09-25 21:42:51 +03:00
|
|
|
reply_markup=_custom.button.build_reply_markup(buttons)
|
2021-09-11 14:33:27 +03:00
|
|
|
)
|
|
|
|
# Invoke `messages.editInlineBotMessage` from the right datacenter.
|
|
|
|
# Otherwise, Telegram will error with `MESSAGE_ID_INVALID` and do nothing.
|
2021-09-19 18:51:05 +03:00
|
|
|
exported = self._session_state.dc_id != entity.dc_id
|
2021-09-11 14:33:27 +03:00
|
|
|
if exported:
|
|
|
|
try:
|
|
|
|
sender = await self._borrow_exported_sender(entity.dc_id)
|
|
|
|
return await self._call(sender, request)
|
|
|
|
finally:
|
|
|
|
await self._return_exported_sender(sender)
|
|
|
|
else:
|
|
|
|
return await self(request)
|
|
|
|
|
|
|
|
entity = await self.get_input_entity(entity)
|
2021-09-12 13:16:02 +03:00
|
|
|
request = _tl.fn.messages.EditMessage(
|
2021-09-11 14:33:27 +03:00
|
|
|
peer=entity,
|
|
|
|
id=utils.get_message_id(message),
|
|
|
|
message=text,
|
|
|
|
no_webpage=not link_preview,
|
|
|
|
entities=formatting_entities,
|
|
|
|
media=media,
|
2021-09-25 21:42:51 +03:00
|
|
|
reply_markup=_custom.button.build_reply_markup(buttons),
|
2021-09-11 14:33:27 +03:00
|
|
|
schedule_date=schedule
|
|
|
|
)
|
|
|
|
msg = self._get_response_message(request, await self(request), entity)
|
|
|
|
return msg
|
|
|
|
|
|
|
|
async def delete_messages(
|
|
|
|
self: 'TelegramClient',
|
|
|
|
entity: 'hints.EntityLike',
|
|
|
|
message_ids: 'typing.Union[hints.MessageIDLike, typing.Sequence[hints.MessageIDLike]]',
|
|
|
|
*,
|
2021-09-12 13:16:02 +03:00
|
|
|
revoke: bool = True) -> 'typing.Sequence[_tl.messages.AffectedMessages]':
|
2021-09-11 14:33:27 +03:00
|
|
|
if not utils.is_list_like(message_ids):
|
|
|
|
message_ids = (message_ids,)
|
|
|
|
|
|
|
|
message_ids = (
|
|
|
|
m.id if isinstance(m, (
|
2021-09-12 13:16:02 +03:00
|
|
|
_tl.Message, _tl.MessageService, _tl.MessageEmpty))
|
2021-09-11 14:33:27 +03:00
|
|
|
else int(m) for m in message_ids
|
|
|
|
)
|
|
|
|
|
|
|
|
if entity:
|
2018-06-09 23:05:06 +03:00
|
|
|
entity = await self.get_input_entity(entity)
|
2021-09-11 14:33:27 +03:00
|
|
|
ty = helpers._entity_type(entity)
|
|
|
|
else:
|
|
|
|
# no entity (None), set a value that's not a channel for private delete
|
|
|
|
ty = helpers._EntityType.USER
|
|
|
|
|
|
|
|
if ty == helpers._EntityType.CHANNEL:
|
2021-09-17 21:36:40 +03:00
|
|
|
res = await self([_tl.fn.channels.DeleteMessages(
|
|
|
|
entity, list(c)) for c in utils.chunks(message_ids)])
|
2021-09-11 14:33:27 +03:00
|
|
|
else:
|
2021-09-17 21:36:40 +03:00
|
|
|
res = await self([_tl.fn.messages.DeleteMessages(
|
|
|
|
list(c), revoke) for c in utils.chunks(message_ids)])
|
|
|
|
|
|
|
|
return sum(r.pts_count for r in res)
|
2021-09-11 14:33:27 +03:00
|
|
|
|
|
|
|
async def send_read_acknowledge(
|
|
|
|
self: 'TelegramClient',
|
|
|
|
entity: 'hints.EntityLike',
|
|
|
|
message: 'typing.Union[hints.MessageIDLike, typing.Sequence[hints.MessageIDLike]]' = None,
|
|
|
|
*,
|
|
|
|
max_id: int = None,
|
|
|
|
clear_mentions: bool = False) -> bool:
|
|
|
|
if max_id is None:
|
|
|
|
if not message:
|
|
|
|
max_id = 0
|
|
|
|
else:
|
|
|
|
if utils.is_list_like(message):
|
|
|
|
max_id = max(msg.id for msg in message)
|
2018-06-09 23:05:06 +03:00
|
|
|
else:
|
2021-09-11 14:33:27 +03:00
|
|
|
max_id = message.id
|
2019-05-30 18:15:50 +03:00
|
|
|
|
2021-09-11 14:33:27 +03:00
|
|
|
entity = await self.get_input_entity(entity)
|
|
|
|
if clear_mentions:
|
2021-09-12 13:16:02 +03:00
|
|
|
await self(_tl.fn.messages.ReadMentions(entity))
|
2021-09-11 14:33:27 +03:00
|
|
|
if max_id is None:
|
|
|
|
return True
|
2018-06-09 23:05:06 +03:00
|
|
|
|
2021-09-11 14:33:27 +03:00
|
|
|
if max_id is not None:
|
|
|
|
if helpers._entity_type(entity) == helpers._EntityType.CHANNEL:
|
2021-09-12 13:16:02 +03:00
|
|
|
return await self(_tl.fn.channels.ReadHistory(
|
2021-09-11 14:33:27 +03:00
|
|
|
utils.get_input_channel(entity), max_id=max_id))
|
|
|
|
else:
|
2021-09-12 13:16:02 +03:00
|
|
|
return await self(_tl.fn.messages.ReadHistory(
|
2021-09-11 14:33:27 +03:00
|
|
|
entity, max_id=max_id))
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
async def pin_message(
|
|
|
|
self: 'TelegramClient',
|
|
|
|
entity: 'hints.EntityLike',
|
|
|
|
message: 'typing.Optional[hints.MessageIDLike]',
|
|
|
|
*,
|
|
|
|
notify: bool = False,
|
|
|
|
pm_oneside: bool = False
|
|
|
|
):
|
2021-09-12 15:09:53 +03:00
|
|
|
return await _pin(self, entity, message, unpin=False, notify=notify, pm_oneside=pm_oneside)
|
2021-09-11 14:33:27 +03:00
|
|
|
|
|
|
|
async def unpin_message(
|
|
|
|
self: 'TelegramClient',
|
|
|
|
entity: 'hints.EntityLike',
|
|
|
|
message: 'typing.Optional[hints.MessageIDLike]' = None,
|
|
|
|
*,
|
|
|
|
notify: bool = False
|
|
|
|
):
|
2021-09-12 15:09:53 +03:00
|
|
|
return await _pin(self, entity, message, unpin=True, notify=notify)
|
2021-09-11 14:33:27 +03:00
|
|
|
|
|
|
|
async def _pin(self, entity, message, *, unpin, notify=False, pm_oneside=False):
|
|
|
|
message = utils.get_message_id(message) or 0
|
|
|
|
entity = await self.get_input_entity(entity)
|
|
|
|
if message <= 0: # old behaviour accepted negative IDs to unpin
|
2021-09-12 13:16:02 +03:00
|
|
|
await self(_tl.fn.messages.UnpinAllMessages(entity))
|
2021-09-11 14:33:27 +03:00
|
|
|
return
|
|
|
|
|
2021-09-12 13:16:02 +03:00
|
|
|
request = _tl.fn.messages.UpdatePinnedMessage(
|
2021-09-11 14:33:27 +03:00
|
|
|
peer=entity,
|
|
|
|
id=message,
|
|
|
|
silent=not notify,
|
|
|
|
unpin=unpin,
|
|
|
|
pm_oneside=pm_oneside
|
|
|
|
)
|
|
|
|
result = await self(request)
|
|
|
|
|
|
|
|
# Unpinning does not produce a service message.
|
|
|
|
# Pinning a message that was already pinned also produces no service message.
|
|
|
|
# Pinning a message in your own chat does not produce a service message,
|
|
|
|
# but pinning on a private conversation with someone else does.
|
|
|
|
if unpin or not result.updates:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Pinning a message that doesn't exist would RPC-error earlier
|
|
|
|
return self._get_response_message(request, result, entity)
|