2017-06-07 13:48:54 +03:00
|
|
|
import json
|
2016-09-16 14:35:14 +03:00
|
|
|
import os
|
2017-06-10 14:15:04 +03:00
|
|
|
import platform
|
2017-12-26 18:45:47 +03:00
|
|
|
import sqlite3
|
2016-11-30 00:29:42 +03:00
|
|
|
import time
|
2017-12-26 18:45:47 +03:00
|
|
|
from base64 import b64decode
|
2016-11-30 00:29:42 +03:00
|
|
|
from os.path import isfile as file_exists
|
2017-10-01 14:24:04 +03:00
|
|
|
from threading import Lock
|
2016-11-30 00:29:42 +03:00
|
|
|
|
2017-12-27 02:50:09 +03:00
|
|
|
from .. import utils, helpers
|
|
|
|
from ..tl import TLObject
|
|
|
|
from ..tl.types import (
|
|
|
|
PeerUser, PeerChat, PeerChannel,
|
|
|
|
InputPeerUser, InputPeerChat, InputPeerChannel
|
|
|
|
)
|
2016-08-28 15:16:52 +03:00
|
|
|
|
2017-12-26 18:45:47 +03:00
|
|
|
EXTENSION = '.session'
|
2018-01-05 17:33:25 +03:00
|
|
|
CURRENT_VERSION = 2 # database version
|
2017-12-26 18:45:47 +03:00
|
|
|
|
2016-08-28 15:16:52 +03:00
|
|
|
|
|
|
|
class Session:
|
2017-06-07 13:48:54 +03:00
|
|
|
"""This session contains the required information to login into your
|
|
|
|
Telegram account. NEVER give the saved JSON file to anyone, since
|
|
|
|
they would gain instant access to all your messages and contacts.
|
|
|
|
|
|
|
|
If you think the session has been compromised, close all the sessions
|
|
|
|
through an official Telegram client to revoke the authorization.
|
|
|
|
"""
|
2017-12-26 18:59:30 +03:00
|
|
|
def __init__(self, session_id):
|
2017-06-17 23:04:29 +03:00
|
|
|
"""session_user_id should either be a string or another Session.
|
|
|
|
Note that if another session is given, only parameters like
|
|
|
|
those required to init a connection will be copied.
|
|
|
|
"""
|
2017-06-10 14:15:04 +03:00
|
|
|
# These values will NOT be saved
|
2017-12-26 18:45:47 +03:00
|
|
|
self.filename = ':memory:'
|
2017-06-10 14:15:04 +03:00
|
|
|
|
2017-12-26 18:59:30 +03:00
|
|
|
# For connection purposes
|
|
|
|
if isinstance(session_id, Session):
|
|
|
|
self.device_model = session_id.device_model
|
|
|
|
self.system_version = session_id.system_version
|
|
|
|
self.app_version = session_id.app_version
|
|
|
|
self.lang_code = session_id.lang_code
|
|
|
|
self.system_lang_code = session_id.system_lang_code
|
|
|
|
self.lang_pack = session_id.lang_pack
|
|
|
|
self.report_errors = session_id.report_errors
|
|
|
|
self.save_entities = session_id.save_entities
|
|
|
|
self.flood_sleep_threshold = session_id.flood_sleep_threshold
|
2017-06-19 00:34:23 +03:00
|
|
|
else: # str / None
|
2017-12-26 18:59:30 +03:00
|
|
|
if session_id:
|
|
|
|
self.filename = session_id
|
2017-12-26 18:45:47 +03:00
|
|
|
if not self.filename.endswith(EXTENSION):
|
|
|
|
self.filename += EXTENSION
|
2017-06-19 00:34:23 +03:00
|
|
|
|
2017-07-04 20:47:55 +03:00
|
|
|
system = platform.uname()
|
2017-12-26 18:59:30 +03:00
|
|
|
self.device_model = system.system or 'Unknown'
|
|
|
|
self.system_version = system.release or '1.0'
|
2017-07-04 20:47:55 +03:00
|
|
|
self.app_version = '1.0' # '0' will provoke error
|
2017-06-19 00:34:23 +03:00
|
|
|
self.lang_code = 'en'
|
2017-06-30 12:48:45 +03:00
|
|
|
self.system_lang_code = self.lang_code
|
|
|
|
self.lang_pack = ''
|
2017-08-25 16:34:20 +03:00
|
|
|
self.report_errors = True
|
2017-10-01 14:32:27 +03:00
|
|
|
self.save_entities = True
|
2017-10-08 17:15:30 +03:00
|
|
|
self.flood_sleep_threshold = 60
|
2017-06-19 00:34:23 +03:00
|
|
|
|
2017-12-26 18:59:30 +03:00
|
|
|
self.id = helpers.generate_random_long(signed=True)
|
|
|
|
self._sequence = 0
|
|
|
|
self.time_offset = 0
|
|
|
|
self._last_msg_id = 0 # Long
|
2017-12-28 03:04:11 +03:00
|
|
|
self.salt = 0 # Long
|
2017-12-26 18:59:30 +03:00
|
|
|
|
|
|
|
# Cross-thread safety
|
|
|
|
self._seq_no_lock = Lock()
|
|
|
|
self._msg_id_lock = Lock()
|
|
|
|
self._db_lock = Lock()
|
|
|
|
|
2017-12-26 18:45:47 +03:00
|
|
|
# These values will be saved
|
2017-12-28 03:04:11 +03:00
|
|
|
self._dc_id = 0
|
2017-12-26 18:45:47 +03:00
|
|
|
self._server_address = None
|
|
|
|
self._port = None
|
|
|
|
self._auth_key = None
|
|
|
|
|
|
|
|
# Migrating from .json -> SQL
|
2017-12-27 02:50:09 +03:00
|
|
|
entities = self._check_migrate_json()
|
2017-12-26 18:45:47 +03:00
|
|
|
|
|
|
|
self._conn = sqlite3.connect(self.filename, check_same_thread=False)
|
|
|
|
c = self._conn.cursor()
|
|
|
|
c.execute("select name from sqlite_master "
|
|
|
|
"where type='table' and name='version'")
|
|
|
|
if c.fetchone():
|
|
|
|
# Tables already exist, check for the version
|
|
|
|
c.execute("select version from version")
|
|
|
|
version = c.fetchone()[0]
|
|
|
|
if version != CURRENT_VERSION:
|
|
|
|
self._upgrade_database(old=version)
|
2018-01-05 17:33:25 +03:00
|
|
|
c.execute("delete from version")
|
|
|
|
c.execute("insert into version values (?)", (CURRENT_VERSION,))
|
2017-12-26 18:45:47 +03:00
|
|
|
self.save()
|
|
|
|
|
|
|
|
# These values will be saved
|
|
|
|
c.execute('select * from sessions')
|
2017-12-28 16:58:42 +03:00
|
|
|
tuple_ = c.fetchone()
|
|
|
|
if tuple_:
|
|
|
|
self._dc_id, self._server_address, self._port, key, = tuple_
|
|
|
|
from ..crypto import AuthKey
|
|
|
|
self._auth_key = AuthKey(data=key)
|
2017-12-26 18:45:47 +03:00
|
|
|
|
|
|
|
c.close()
|
|
|
|
else:
|
|
|
|
# Tables don't exist, create new ones
|
2018-01-06 21:35:24 +03:00
|
|
|
self._create_table(
|
|
|
|
c,
|
|
|
|
"version (version integer primary key)"
|
|
|
|
,
|
|
|
|
"""sessions (
|
2017-12-28 03:04:11 +03:00
|
|
|
dc_id integer primary key,
|
2017-12-26 18:45:47 +03:00
|
|
|
server_address text,
|
|
|
|
port integer,
|
2017-12-28 03:04:11 +03:00
|
|
|
auth_key blob
|
2018-01-06 21:35:24 +03:00
|
|
|
)"""
|
|
|
|
,
|
|
|
|
"""entities (
|
2017-12-27 02:50:09 +03:00
|
|
|
id integer primary key,
|
|
|
|
hash integer not null,
|
2017-12-26 18:45:47 +03:00
|
|
|
username text,
|
|
|
|
phone integer,
|
|
|
|
name text
|
2018-01-06 21:35:24 +03:00
|
|
|
)"""
|
|
|
|
,
|
|
|
|
"""sent_files (
|
2018-01-05 17:33:25 +03:00
|
|
|
md5_digest blob,
|
|
|
|
file_size integer,
|
|
|
|
file_id integer,
|
|
|
|
part_count integer,
|
|
|
|
primary key(md5_digest, file_size)
|
2018-01-06 21:35:24 +03:00
|
|
|
)"""
|
2018-01-05 17:33:25 +03:00
|
|
|
)
|
2018-01-06 21:35:24 +03:00
|
|
|
c.execute("insert into version values (?)", (CURRENT_VERSION,))
|
2017-12-27 02:50:09 +03:00
|
|
|
# Migrating from JSON -> new table and may have entities
|
|
|
|
if entities:
|
|
|
|
c.executemany(
|
|
|
|
'insert or replace into entities values (?,?,?,?,?)',
|
|
|
|
entities
|
|
|
|
)
|
2017-12-26 18:45:47 +03:00
|
|
|
c.close()
|
|
|
|
self.save()
|
2017-06-10 14:15:04 +03:00
|
|
|
|
2017-12-26 18:45:47 +03:00
|
|
|
def _check_migrate_json(self):
|
|
|
|
if file_exists(self.filename):
|
|
|
|
try:
|
|
|
|
with open(self.filename, encoding='utf-8') as f:
|
|
|
|
data = json.load(f)
|
2017-12-27 02:50:09 +03:00
|
|
|
self.delete() # Delete JSON file to create database
|
|
|
|
|
2017-12-26 18:45:47 +03:00
|
|
|
self._port = data.get('port', self._port)
|
|
|
|
self._server_address = \
|
|
|
|
data.get('server_address', self._server_address)
|
|
|
|
|
|
|
|
from ..crypto import AuthKey
|
|
|
|
if data.get('auth_key_data', None) is not None:
|
|
|
|
key = b64decode(data['auth_key_data'])
|
|
|
|
self._auth_key = AuthKey(data=key)
|
|
|
|
|
2017-12-27 02:50:09 +03:00
|
|
|
rows = []
|
|
|
|
for p_id, p_hash in data.get('entities', []):
|
|
|
|
rows.append((p_id, p_hash, None, None, None))
|
|
|
|
return rows
|
2017-12-28 19:06:14 +03:00
|
|
|
except UnicodeDecodeError:
|
2017-12-27 02:50:09 +03:00
|
|
|
return [] # No entities
|
2017-12-26 18:45:47 +03:00
|
|
|
|
|
|
|
def _upgrade_database(self, old):
|
2018-01-06 21:35:24 +03:00
|
|
|
c = self._conn.cursor()
|
2018-01-05 17:33:25 +03:00
|
|
|
if old == 1:
|
2018-01-06 21:35:24 +03:00
|
|
|
self._create_table(c,"""sent_files (
|
|
|
|
md5_digest blob,
|
|
|
|
file_size integer,
|
|
|
|
file_id integer,
|
|
|
|
part_count integer,
|
|
|
|
primary key(md5_digest, file_size)
|
|
|
|
)""")
|
2018-01-05 17:33:25 +03:00
|
|
|
old = 2
|
2018-01-06 21:35:24 +03:00
|
|
|
c.close()
|
|
|
|
|
|
|
|
def _create_table(self, c, *definitions):
|
|
|
|
"""
|
|
|
|
Creates a table given its definition 'name (columns).
|
|
|
|
If the sqlite version is >= 3.8.2, it will use "without rowid".
|
|
|
|
See http://www.sqlite.org/releaselog/3_8_2.html.
|
|
|
|
"""
|
|
|
|
required = (3, 8, 2)
|
|
|
|
sqlite_v = tuple(int(x) for x in sqlite3.sqlite_version.split('.'))
|
|
|
|
extra = ' without rowid' if sqlite_v >= required else ''
|
|
|
|
for definition in definitions:
|
|
|
|
c.execute('create table {}{}'.format(definition, extra))
|
2017-12-26 18:45:47 +03:00
|
|
|
|
|
|
|
# Data from sessions should be kept as properties
|
|
|
|
# not to fetch the database every time we need it
|
2017-12-28 03:04:11 +03:00
|
|
|
def set_dc(self, dc_id, server_address, port):
|
|
|
|
self._dc_id = dc_id
|
|
|
|
self._server_address = server_address
|
|
|
|
self._port = port
|
|
|
|
self._update_session_table()
|
|
|
|
|
2017-12-29 21:41:12 +03:00
|
|
|
# Fetch the auth_key corresponding to this data center
|
|
|
|
c = self._conn.cursor()
|
|
|
|
c.execute('select auth_key from sessions')
|
|
|
|
tuple_ = c.fetchone()
|
|
|
|
if tuple_:
|
|
|
|
from ..crypto import AuthKey
|
|
|
|
self._auth_key = AuthKey(data=tuple_[0])
|
|
|
|
else:
|
|
|
|
self._auth_key = None
|
|
|
|
c.close()
|
|
|
|
|
2017-12-26 18:45:47 +03:00
|
|
|
@property
|
|
|
|
def server_address(self):
|
|
|
|
return self._server_address
|
|
|
|
|
|
|
|
@property
|
|
|
|
def port(self):
|
|
|
|
return self._port
|
|
|
|
|
|
|
|
@property
|
|
|
|
def auth_key(self):
|
|
|
|
return self._auth_key
|
|
|
|
|
|
|
|
@auth_key.setter
|
|
|
|
def auth_key(self, value):
|
|
|
|
self._auth_key = value
|
|
|
|
self._update_session_table()
|
|
|
|
|
|
|
|
def _update_session_table(self):
|
|
|
|
with self._db_lock:
|
|
|
|
c = self._conn.cursor()
|
2018-01-07 02:38:30 +03:00
|
|
|
# While we can save multiple rows into the sessions table
|
|
|
|
# currently we only want to keep ONE as the tables don't
|
|
|
|
# tell us which auth_key's are usable and will work. Needs
|
|
|
|
# some more work before being able to save auth_key's for
|
|
|
|
# multiple DCs. Probably done differently.
|
|
|
|
c.execute('delete from sessions')
|
2017-12-29 21:41:12 +03:00
|
|
|
c.execute('insert or replace into sessions values (?,?,?,?)', (
|
2017-12-28 03:04:11 +03:00
|
|
|
self._dc_id,
|
2017-12-26 18:45:47 +03:00
|
|
|
self._server_address,
|
|
|
|
self._port,
|
2017-12-28 03:04:11 +03:00
|
|
|
self._auth_key.key if self._auth_key else b''
|
2017-12-26 18:45:47 +03:00
|
|
|
))
|
|
|
|
c.close()
|
2017-06-07 13:48:54 +03:00
|
|
|
|
|
|
|
def save(self):
|
|
|
|
"""Saves the current session object as session_user_id.session"""
|
2017-12-26 18:45:47 +03:00
|
|
|
with self._db_lock:
|
|
|
|
self._conn.commit()
|
2017-06-07 13:48:54 +03:00
|
|
|
|
|
|
|
def delete(self):
|
|
|
|
"""Deletes the current session file"""
|
2017-12-26 18:45:47 +03:00
|
|
|
if self.filename == ':memory:':
|
|
|
|
return True
|
2017-06-07 13:48:54 +03:00
|
|
|
try:
|
2017-12-26 18:45:47 +03:00
|
|
|
os.remove(self.filename)
|
2017-06-07 13:48:54 +03:00
|
|
|
return True
|
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
|
2017-06-14 14:56:42 +03:00
|
|
|
@staticmethod
|
|
|
|
def list_sessions():
|
|
|
|
"""Lists all the sessions of the users who have ever connected
|
|
|
|
using this client and never logged out
|
|
|
|
"""
|
|
|
|
return [os.path.splitext(os.path.basename(f))[0]
|
2017-12-26 18:45:47 +03:00
|
|
|
for f in os.listdir('.') if f.endswith(EXTENSION)]
|
2017-06-07 13:48:54 +03:00
|
|
|
|
2017-09-14 12:44:42 +03:00
|
|
|
def generate_sequence(self, content_related):
|
2017-06-07 14:00:01 +03:00
|
|
|
"""Thread safe method to generates the next sequence number,
|
|
|
|
based on whether it was confirmed yet or not.
|
|
|
|
|
|
|
|
Note that if confirmed=True, the sequence number
|
|
|
|
will be increased by one too
|
|
|
|
"""
|
2017-10-01 14:26:33 +03:00
|
|
|
with self._seq_no_lock:
|
2017-09-14 12:44:42 +03:00
|
|
|
if content_related:
|
2017-06-07 14:00:01 +03:00
|
|
|
result = self._sequence * 2 + 1
|
|
|
|
self._sequence += 1
|
|
|
|
return result
|
|
|
|
else:
|
|
|
|
return self._sequence * 2
|
|
|
|
|
2017-06-07 13:48:54 +03:00
|
|
|
def get_new_msg_id(self):
|
|
|
|
"""Generates a new unique message ID based on the current
|
|
|
|
time (in ms) since epoch"""
|
2017-06-26 12:00:43 +03:00
|
|
|
# Refer to mtproto_plain_sender.py for the original method
|
|
|
|
now = time.time()
|
|
|
|
nanoseconds = int((now - int(now)) * 1e+9)
|
|
|
|
# "message identifiers are divisible by 4"
|
|
|
|
new_msg_id = (int(now) << 32) | (nanoseconds << 2)
|
2017-06-07 13:48:54 +03:00
|
|
|
|
2017-10-01 14:26:33 +03:00
|
|
|
with self._msg_id_lock:
|
2017-08-14 16:15:18 +03:00
|
|
|
if self._last_msg_id >= new_msg_id:
|
|
|
|
new_msg_id = self._last_msg_id + 4
|
|
|
|
|
|
|
|
self._last_msg_id = new_msg_id
|
2017-06-07 13:48:54 +03:00
|
|
|
|
|
|
|
return new_msg_id
|
|
|
|
|
|
|
|
def update_time_offset(self, correct_msg_id):
|
|
|
|
"""Updates the time offset based on a known correct message ID"""
|
|
|
|
now = int(time.time())
|
|
|
|
correct = correct_msg_id >> 32
|
|
|
|
self.time_offset = correct - now
|
2017-10-01 14:24:04 +03:00
|
|
|
|
2017-12-27 02:50:09 +03:00
|
|
|
# Entity processing
|
|
|
|
|
|
|
|
def process_entities(self, tlo):
|
|
|
|
"""Processes all the found entities on the given TLObject,
|
|
|
|
unless .enabled is False.
|
|
|
|
|
|
|
|
Returns True if new input entities were added.
|
|
|
|
"""
|
|
|
|
if not self.save_entities:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not isinstance(tlo, TLObject) and hasattr(tlo, '__iter__'):
|
|
|
|
# This may be a list of users already for instance
|
|
|
|
entities = tlo
|
|
|
|
else:
|
|
|
|
entities = []
|
|
|
|
if hasattr(tlo, 'chats') and hasattr(tlo.chats, '__iter__'):
|
|
|
|
entities.extend(tlo.chats)
|
|
|
|
if hasattr(tlo, 'users') and hasattr(tlo.users, '__iter__'):
|
|
|
|
entities.extend(tlo.users)
|
|
|
|
if not entities:
|
|
|
|
return
|
|
|
|
|
|
|
|
rows = [] # Rows to add (id, hash, username, phone, name)
|
|
|
|
for e in entities:
|
|
|
|
if not isinstance(e, TLObject):
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
p = utils.get_input_peer(e, allow_self=False)
|
2017-12-28 15:31:43 +03:00
|
|
|
marked_id = utils.get_peer_id(p)
|
2017-12-27 02:50:09 +03:00
|
|
|
except ValueError:
|
2017-12-27 14:58:50 +03:00
|
|
|
continue
|
|
|
|
|
2018-01-06 17:54:27 +03:00
|
|
|
if isinstance(p, (InputPeerUser, InputPeerChannel)):
|
|
|
|
if not p.access_hash:
|
|
|
|
# Some users and channels seem to be returned without
|
|
|
|
# an 'access_hash', meaning Telegram doesn't want you
|
|
|
|
# to access them. This is the reason behind ensuring
|
|
|
|
# that the 'access_hash' is non-zero. See issue #354.
|
|
|
|
# Note that this checks for zero or None, see #392.
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
p_hash = p.access_hash
|
|
|
|
elif isinstance(p, InputPeerChat):
|
|
|
|
p_hash = 0
|
|
|
|
else:
|
2017-12-27 14:58:50 +03:00
|
|
|
continue
|
|
|
|
|
|
|
|
username = getattr(e, 'username', None) or None
|
|
|
|
if username is not None:
|
|
|
|
username = username.lower()
|
|
|
|
phone = getattr(e, 'phone', None)
|
|
|
|
name = utils.get_display_name(e) or None
|
|
|
|
rows.append((marked_id, p_hash, username, phone, name))
|
2017-12-27 02:50:09 +03:00
|
|
|
if not rows:
|
|
|
|
return
|
|
|
|
|
|
|
|
with self._db_lock:
|
|
|
|
self._conn.executemany(
|
|
|
|
'insert or replace into entities values (?,?,?,?,?)', rows
|
|
|
|
)
|
|
|
|
self.save()
|
|
|
|
|
|
|
|
def get_input_entity(self, key):
|
|
|
|
"""Parses the given string, integer or TLObject key into a
|
|
|
|
marked entity ID, which is then used to fetch the hash
|
|
|
|
from the database.
|
|
|
|
|
|
|
|
If a callable key is given, every row will be fetched,
|
|
|
|
and passed as a tuple to a function, that should return
|
|
|
|
a true-like value when the desired row is found.
|
|
|
|
|
|
|
|
Raises ValueError if it cannot be found.
|
|
|
|
"""
|
2017-12-27 13:54:08 +03:00
|
|
|
if isinstance(key, TLObject):
|
2017-12-30 00:07:16 +03:00
|
|
|
try:
|
|
|
|
# Try to early return if this key can be casted as input peer
|
|
|
|
return utils.get_input_peer(key)
|
|
|
|
except TypeError:
|
|
|
|
# Otherwise, get the ID of the peer
|
|
|
|
key = utils.get_peer_id(key)
|
2017-12-27 13:54:08 +03:00
|
|
|
|
2017-12-27 15:06:03 +03:00
|
|
|
c = self._conn.cursor()
|
2017-12-27 02:50:09 +03:00
|
|
|
if isinstance(key, str):
|
|
|
|
phone = utils.parse_phone(key)
|
|
|
|
if phone:
|
|
|
|
c.execute('select id, hash from entities where phone=?',
|
|
|
|
(phone,))
|
|
|
|
else:
|
|
|
|
username, _ = utils.parse_username(key)
|
|
|
|
c.execute('select id, hash from entities where username=?',
|
|
|
|
(username,))
|
|
|
|
|
|
|
|
if isinstance(key, int):
|
|
|
|
c.execute('select id, hash from entities where id=?', (key,))
|
|
|
|
|
|
|
|
result = c.fetchone()
|
2017-12-27 15:06:03 +03:00
|
|
|
c.close()
|
2017-12-27 02:50:09 +03:00
|
|
|
if result:
|
|
|
|
i, h = result # unpack resulting tuple
|
|
|
|
i, k = utils.resolve_id(i) # removes the mark and returns kind
|
|
|
|
if k == PeerUser:
|
|
|
|
return InputPeerUser(i, h)
|
|
|
|
elif k == PeerChat:
|
|
|
|
return InputPeerChat(i)
|
|
|
|
elif k == PeerChannel:
|
|
|
|
return InputPeerChannel(i, h)
|
|
|
|
else:
|
|
|
|
raise ValueError('Could not find input entity with key ', key)
|
2018-01-05 17:33:25 +03:00
|
|
|
|
|
|
|
# File processing
|
|
|
|
|
|
|
|
def get_file(self, md5_digest, file_size):
|
|
|
|
return self._conn.execute(
|
|
|
|
'select * from sent_files '
|
|
|
|
'where md5_digest = ? and file_size = ?', (md5_digest, file_size)
|
|
|
|
).fetchone()
|
|
|
|
|
|
|
|
def cache_file(self, md5_digest, file_size, file_id, part_count):
|
|
|
|
with self._db_lock:
|
|
|
|
self._conn.execute(
|
|
|
|
'insert into sent_files values (?,?,?,?)',
|
|
|
|
(md5_digest, file_size, file_id, part_count)
|
|
|
|
)
|
|
|
|
self.save()
|