2018-04-27 21:51:23 +03:00
|
|
|
import datetime
|
2017-06-07 13:48:54 +03:00
|
|
|
import json
|
2016-09-16 14:35:14 +03:00
|
|
|
import os
|
2017-12-26 18:45:47 +03:00
|
|
|
import sqlite3
|
|
|
|
from base64 import b64decode
|
2016-11-30 00:29:42 +03:00
|
|
|
from os.path import isfile as file_exists
|
|
|
|
|
2018-04-25 14:37:29 +03:00
|
|
|
from telethon.tl import types
|
2018-03-02 00:34:32 +03:00
|
|
|
from .memory import MemorySession, _SentFileType
|
2018-03-08 12:05:40 +03:00
|
|
|
from .. import utils
|
2018-03-02 00:34:32 +03:00
|
|
|
from ..crypto import AuthKey
|
|
|
|
from ..tl.types import (
|
2018-03-08 12:05:40 +03:00
|
|
|
InputPhoto, InputDocument, PeerUser, PeerChat, PeerChannel
|
2017-12-27 02:50:09 +03:00
|
|
|
)
|
2016-08-28 15:16:52 +03:00
|
|
|
|
2017-12-26 18:45:47 +03:00
|
|
|
EXTENSION = '.session'
|
2018-04-23 22:16:09 +03:00
|
|
|
CURRENT_VERSION = 4 # database version
|
2018-01-18 11:52:39 +03:00
|
|
|
|
|
|
|
|
2018-03-02 00:34:32 +03:00
|
|
|
class SQLiteSession(MemorySession):
|
2017-06-07 13:48:54 +03:00
|
|
|
"""This session contains the required information to login into your
|
2018-04-17 14:01:23 +03:00
|
|
|
Telegram account. NEVER give the saved session file to anyone, since
|
2017-06-07 13:48:54 +03:00
|
|
|
they would gain instant access to all your messages and contacts.
|
|
|
|
|
|
|
|
If you think the session has been compromised, close all the sessions
|
|
|
|
through an official Telegram client to revoke the authorization.
|
|
|
|
"""
|
2018-03-02 00:34:32 +03:00
|
|
|
|
2018-03-02 13:36:39 +03:00
|
|
|
def __init__(self, session_id=None):
|
2018-03-02 00:34:32 +03:00
|
|
|
super().__init__()
|
2017-12-26 18:45:47 +03:00
|
|
|
self.filename = ':memory:'
|
2018-03-02 13:36:39 +03:00
|
|
|
self.save_entities = True
|
2017-06-10 14:15:04 +03:00
|
|
|
|
2018-03-02 13:36:39 +03:00
|
|
|
if session_id:
|
|
|
|
self.filename = session_id
|
|
|
|
if not self.filename.endswith(EXTENSION):
|
|
|
|
self.filename += EXTENSION
|
2017-06-19 00:34:23 +03:00
|
|
|
|
2017-12-26 18:45:47 +03:00
|
|
|
# Migrating from .json -> SQL
|
2018-06-14 18:04:15 +03:00
|
|
|
# TODO ^ Deprecate
|
2017-12-27 02:50:09 +03:00
|
|
|
entities = self._check_migrate_json()
|
2017-12-26 18:45:47 +03:00
|
|
|
|
2018-01-26 11:59:49 +03:00
|
|
|
self._conn = None
|
|
|
|
c = self._cursor()
|
2017-12-26 18:45:47 +03:00
|
|
|
c.execute("select name from sqlite_master "
|
|
|
|
"where type='table' and name='version'")
|
|
|
|
if c.fetchone():
|
|
|
|
# Tables already exist, check for the version
|
|
|
|
c.execute("select version from version")
|
|
|
|
version = c.fetchone()[0]
|
|
|
|
if version != CURRENT_VERSION:
|
|
|
|
self._upgrade_database(old=version)
|
2018-01-05 17:33:25 +03:00
|
|
|
c.execute("delete from version")
|
|
|
|
c.execute("insert into version values (?)", (CURRENT_VERSION,))
|
2017-12-26 18:45:47 +03:00
|
|
|
self.save()
|
|
|
|
|
|
|
|
# These values will be saved
|
|
|
|
c.execute('select * from sessions')
|
2017-12-28 16:58:42 +03:00
|
|
|
tuple_ = c.fetchone()
|
|
|
|
if tuple_:
|
|
|
|
self._dc_id, self._server_address, self._port, key, = tuple_
|
|
|
|
self._auth_key = AuthKey(data=key)
|
2017-12-26 18:45:47 +03:00
|
|
|
|
|
|
|
c.close()
|
|
|
|
else:
|
|
|
|
# Tables don't exist, create new ones
|
2018-01-06 21:35:24 +03:00
|
|
|
self._create_table(
|
|
|
|
c,
|
|
|
|
"version (version integer primary key)"
|
|
|
|
,
|
|
|
|
"""sessions (
|
2017-12-28 03:04:11 +03:00
|
|
|
dc_id integer primary key,
|
2017-12-26 18:45:47 +03:00
|
|
|
server_address text,
|
|
|
|
port integer,
|
2017-12-28 03:04:11 +03:00
|
|
|
auth_key blob
|
2018-01-06 21:35:24 +03:00
|
|
|
)"""
|
|
|
|
,
|
|
|
|
"""entities (
|
2017-12-27 02:50:09 +03:00
|
|
|
id integer primary key,
|
|
|
|
hash integer not null,
|
2017-12-26 18:45:47 +03:00
|
|
|
username text,
|
|
|
|
phone integer,
|
|
|
|
name text
|
2018-01-06 21:35:24 +03:00
|
|
|
)"""
|
|
|
|
,
|
|
|
|
"""sent_files (
|
2018-01-05 17:33:25 +03:00
|
|
|
md5_digest blob,
|
|
|
|
file_size integer,
|
2018-01-18 11:52:39 +03:00
|
|
|
type integer,
|
|
|
|
id integer,
|
|
|
|
hash integer,
|
|
|
|
primary key(md5_digest, file_size, type)
|
2018-01-06 21:35:24 +03:00
|
|
|
)"""
|
2018-04-23 22:16:09 +03:00
|
|
|
,
|
|
|
|
"""update_state (
|
|
|
|
id integer primary key,
|
|
|
|
pts integer,
|
|
|
|
qts integer,
|
|
|
|
date integer,
|
|
|
|
seq integer
|
|
|
|
)"""
|
2018-01-05 17:33:25 +03:00
|
|
|
)
|
2018-01-06 21:35:24 +03:00
|
|
|
c.execute("insert into version values (?)", (CURRENT_VERSION,))
|
2017-12-27 02:50:09 +03:00
|
|
|
# Migrating from JSON -> new table and may have entities
|
|
|
|
if entities:
|
|
|
|
c.executemany(
|
|
|
|
'insert or replace into entities values (?,?,?,?,?)',
|
|
|
|
entities
|
|
|
|
)
|
2018-01-31 23:01:53 +03:00
|
|
|
self._update_session_table()
|
2017-12-26 18:45:47 +03:00
|
|
|
c.close()
|
|
|
|
self.save()
|
2017-06-10 14:15:04 +03:00
|
|
|
|
2018-03-02 14:20:11 +03:00
|
|
|
def clone(self, to_instance=None):
|
|
|
|
cloned = super().clone(to_instance)
|
2018-03-02 13:36:39 +03:00
|
|
|
cloned.save_entities = self.save_entities
|
|
|
|
return cloned
|
2018-03-02 00:34:32 +03:00
|
|
|
|
2017-12-26 18:45:47 +03:00
|
|
|
def _check_migrate_json(self):
|
|
|
|
if file_exists(self.filename):
|
|
|
|
try:
|
|
|
|
with open(self.filename, encoding='utf-8') as f:
|
|
|
|
data = json.load(f)
|
2017-12-27 02:50:09 +03:00
|
|
|
self.delete() # Delete JSON file to create database
|
|
|
|
|
2017-12-26 18:45:47 +03:00
|
|
|
self._port = data.get('port', self._port)
|
|
|
|
self._server_address = \
|
|
|
|
data.get('server_address', self._server_address)
|
|
|
|
|
|
|
|
if data.get('auth_key_data', None) is not None:
|
|
|
|
key = b64decode(data['auth_key_data'])
|
|
|
|
self._auth_key = AuthKey(data=key)
|
|
|
|
|
2017-12-27 02:50:09 +03:00
|
|
|
rows = []
|
|
|
|
for p_id, p_hash in data.get('entities', []):
|
2018-01-10 12:46:43 +03:00
|
|
|
if p_hash is not None:
|
|
|
|
rows.append((p_id, p_hash, None, None, None))
|
2017-12-27 02:50:09 +03:00
|
|
|
return rows
|
2017-12-28 19:06:14 +03:00
|
|
|
except UnicodeDecodeError:
|
2017-12-27 02:50:09 +03:00
|
|
|
return [] # No entities
|
2017-12-26 18:45:47 +03:00
|
|
|
|
|
|
|
def _upgrade_database(self, old):
|
2018-01-26 11:59:49 +03:00
|
|
|
c = self._cursor()
|
2018-04-23 22:16:09 +03:00
|
|
|
if old == 1:
|
|
|
|
old += 1
|
|
|
|
# old == 1 doesn't have the old sent_files so no need to drop
|
2018-01-18 11:52:39 +03:00
|
|
|
if old == 2:
|
2018-04-23 22:16:09 +03:00
|
|
|
old += 1
|
2018-01-18 11:52:39 +03:00
|
|
|
# Old cache from old sent_files lasts then a day anyway, drop
|
|
|
|
c.execute('drop table sent_files')
|
2018-04-23 22:16:09 +03:00
|
|
|
self._create_table(c, """sent_files (
|
|
|
|
md5_digest blob,
|
|
|
|
file_size integer,
|
|
|
|
type integer,
|
|
|
|
id integer,
|
|
|
|
hash integer,
|
|
|
|
primary key(md5_digest, file_size, type)
|
|
|
|
)""")
|
|
|
|
if old == 3:
|
|
|
|
old += 1
|
|
|
|
self._create_table(c, """update_state (
|
|
|
|
id integer primary key,
|
|
|
|
pts integer,
|
|
|
|
qts integer,
|
|
|
|
date integer,
|
|
|
|
seq integer
|
|
|
|
)""")
|
2018-01-06 21:35:24 +03:00
|
|
|
c.close()
|
|
|
|
|
2018-01-18 11:52:39 +03:00
|
|
|
@staticmethod
|
|
|
|
def _create_table(c, *definitions):
|
2018-01-06 21:35:24 +03:00
|
|
|
for definition in definitions:
|
2018-05-14 18:41:47 +03:00
|
|
|
c.execute('create table {}'.format(definition))
|
2017-12-26 18:45:47 +03:00
|
|
|
|
|
|
|
# Data from sessions should be kept as properties
|
|
|
|
# not to fetch the database every time we need it
|
2017-12-28 03:04:11 +03:00
|
|
|
def set_dc(self, dc_id, server_address, port):
|
2018-03-02 14:20:11 +03:00
|
|
|
super().set_dc(dc_id, server_address, port)
|
2017-12-28 03:04:11 +03:00
|
|
|
self._update_session_table()
|
|
|
|
|
2017-12-29 21:41:12 +03:00
|
|
|
# Fetch the auth_key corresponding to this data center
|
2018-06-25 21:11:48 +03:00
|
|
|
row = self._execute('select auth_key from sessions')
|
|
|
|
if row and row[0]:
|
|
|
|
self._auth_key = AuthKey(data=row[0])
|
2017-12-29 21:41:12 +03:00
|
|
|
else:
|
|
|
|
self._auth_key = None
|
|
|
|
|
2018-03-02 19:39:04 +03:00
|
|
|
@MemorySession.auth_key.setter
|
2017-12-26 18:45:47 +03:00
|
|
|
def auth_key(self, value):
|
|
|
|
self._auth_key = value
|
|
|
|
self._update_session_table()
|
|
|
|
|
|
|
|
def _update_session_table(self):
|
2018-06-14 18:04:15 +03:00
|
|
|
c = self._cursor()
|
|
|
|
# While we can save multiple rows into the sessions table
|
|
|
|
# currently we only want to keep ONE as the tables don't
|
|
|
|
# tell us which auth_key's are usable and will work. Needs
|
|
|
|
# some more work before being able to save auth_key's for
|
|
|
|
# multiple DCs. Probably done differently.
|
|
|
|
c.execute('delete from sessions')
|
|
|
|
c.execute('insert or replace into sessions values (?,?,?,?)', (
|
|
|
|
self._dc_id,
|
|
|
|
self._server_address,
|
|
|
|
self._port,
|
|
|
|
self._auth_key.key if self._auth_key else b''
|
|
|
|
))
|
|
|
|
c.close()
|
2017-06-07 13:48:54 +03:00
|
|
|
|
2018-04-25 14:37:29 +03:00
|
|
|
def get_update_state(self, entity_id):
|
2018-06-25 21:11:48 +03:00
|
|
|
row = self._execute('select pts, qts, date, seq from update_state '
|
|
|
|
'where id = ?', entity_id)
|
2018-04-25 14:37:29 +03:00
|
|
|
if row:
|
2018-04-27 21:51:23 +03:00
|
|
|
pts, qts, date, seq = row
|
|
|
|
date = datetime.datetime.utcfromtimestamp(date)
|
|
|
|
return types.updates.State(pts, qts, date, seq, unread_count=0)
|
2018-04-25 14:37:29 +03:00
|
|
|
|
|
|
|
def set_update_state(self, entity_id, state):
|
2018-06-25 21:11:48 +03:00
|
|
|
self._execute('insert or replace into update_state values (?,?,?,?,?)',
|
|
|
|
entity_id, state.pts, state.qts,
|
|
|
|
state.date.timestamp(), state.seq)
|
2018-04-25 14:37:29 +03:00
|
|
|
|
2017-06-07 13:48:54 +03:00
|
|
|
def save(self):
|
|
|
|
"""Saves the current session object as session_user_id.session"""
|
2018-06-24 13:21:58 +03:00
|
|
|
# This is a no-op if there are no changes to commit, so there's
|
|
|
|
# no need for us to keep track of an "unsaved changes" variable.
|
2018-06-14 18:04:15 +03:00
|
|
|
self._conn.commit()
|
2017-06-07 13:48:54 +03:00
|
|
|
|
2018-01-26 11:59:49 +03:00
|
|
|
def _cursor(self):
|
|
|
|
"""Asserts that the connection is open and returns a cursor"""
|
2018-06-14 18:04:15 +03:00
|
|
|
if self._conn is None:
|
|
|
|
self._conn = sqlite3.connect(self.filename,
|
|
|
|
check_same_thread=False)
|
|
|
|
return self._conn.cursor()
|
2018-01-26 11:59:49 +03:00
|
|
|
|
2018-06-25 21:11:48 +03:00
|
|
|
def _execute(self, stmt, *values):
|
|
|
|
"""
|
|
|
|
Gets a cursor, executes `stmt` and closes the cursor,
|
|
|
|
fetching one row afterwards and returning its result.
|
|
|
|
"""
|
|
|
|
c = self._cursor()
|
|
|
|
try:
|
|
|
|
return c.execute(stmt, values).fetchone()
|
|
|
|
finally:
|
|
|
|
c.close()
|
|
|
|
|
2018-01-26 11:59:49 +03:00
|
|
|
def close(self):
|
|
|
|
"""Closes the connection unless we're working in-memory"""
|
|
|
|
if self.filename != ':memory:':
|
2018-06-14 18:04:15 +03:00
|
|
|
if self._conn is not None:
|
2018-06-24 13:21:58 +03:00
|
|
|
self._conn.commit()
|
2018-06-14 18:04:15 +03:00
|
|
|
self._conn.close()
|
|
|
|
self._conn = None
|
2018-01-26 11:59:49 +03:00
|
|
|
|
2017-06-07 13:48:54 +03:00
|
|
|
def delete(self):
|
|
|
|
"""Deletes the current session file"""
|
2017-12-26 18:45:47 +03:00
|
|
|
if self.filename == ':memory:':
|
|
|
|
return True
|
2017-06-07 13:48:54 +03:00
|
|
|
try:
|
2017-12-26 18:45:47 +03:00
|
|
|
os.remove(self.filename)
|
2017-06-07 13:48:54 +03:00
|
|
|
return True
|
|
|
|
except OSError:
|
|
|
|
return False
|
|
|
|
|
2018-03-02 00:34:32 +03:00
|
|
|
@classmethod
|
|
|
|
def list_sessions(cls):
|
2017-06-14 14:56:42 +03:00
|
|
|
"""Lists all the sessions of the users who have ever connected
|
|
|
|
using this client and never logged out
|
|
|
|
"""
|
|
|
|
return [os.path.splitext(os.path.basename(f))[0]
|
2017-12-26 18:45:47 +03:00
|
|
|
for f in os.listdir('.') if f.endswith(EXTENSION)]
|
2017-06-07 13:48:54 +03:00
|
|
|
|
2017-12-27 02:50:09 +03:00
|
|
|
# Entity processing
|
|
|
|
|
|
|
|
def process_entities(self, tlo):
|
|
|
|
"""Processes all the found entities on the given TLObject,
|
|
|
|
unless .enabled is False.
|
|
|
|
|
|
|
|
Returns True if new input entities were added.
|
|
|
|
"""
|
|
|
|
if not self.save_entities:
|
|
|
|
return
|
|
|
|
|
2018-03-02 00:34:32 +03:00
|
|
|
rows = self._entities_to_rows(tlo)
|
2017-12-27 02:50:09 +03:00
|
|
|
if not rows:
|
|
|
|
return
|
|
|
|
|
2018-06-25 20:49:33 +03:00
|
|
|
c = self._cursor()
|
2018-06-25 21:11:48 +03:00
|
|
|
try:
|
|
|
|
c.executemany(
|
|
|
|
'insert or replace into entities values (?,?,?,?,?)', rows)
|
|
|
|
finally:
|
|
|
|
c.close()
|
2017-12-27 02:50:09 +03:00
|
|
|
|
2018-03-02 00:34:32 +03:00
|
|
|
def get_entity_rows_by_phone(self, phone):
|
2018-06-25 21:11:48 +03:00
|
|
|
return self._execute(
|
|
|
|
'select id, hash from entities where phone = ?', phone)
|
2017-12-27 13:54:08 +03:00
|
|
|
|
2018-03-02 00:34:32 +03:00
|
|
|
def get_entity_rows_by_username(self, username):
|
2018-06-25 21:11:48 +03:00
|
|
|
return self._execute(
|
|
|
|
'select id, hash from entities where username = ?', username)
|
2017-12-27 02:50:09 +03:00
|
|
|
|
2018-03-02 00:34:32 +03:00
|
|
|
def get_entity_rows_by_name(self, name):
|
2018-06-25 21:11:48 +03:00
|
|
|
return self._execute(
|
|
|
|
'select id, hash from entities where name = ?', name)
|
2017-12-27 02:50:09 +03:00
|
|
|
|
2018-03-08 12:05:40 +03:00
|
|
|
def get_entity_rows_by_id(self, id, exact=True):
|
|
|
|
if exact:
|
2018-06-25 21:11:48 +03:00
|
|
|
return self._execute(
|
|
|
|
'select id, hash from entities where id = ?', id)
|
2018-03-08 12:05:40 +03:00
|
|
|
else:
|
2018-06-25 21:11:48 +03:00
|
|
|
return self._execute(
|
|
|
|
'select id, hash from entities where id in (?,?,?)',
|
2018-03-08 12:05:40 +03:00
|
|
|
utils.get_peer_id(PeerUser(id)),
|
|
|
|
utils.get_peer_id(PeerChat(id)),
|
|
|
|
utils.get_peer_id(PeerChannel(id))
|
|
|
|
)
|
2018-01-05 17:33:25 +03:00
|
|
|
|
|
|
|
# File processing
|
|
|
|
|
2018-01-18 11:52:39 +03:00
|
|
|
def get_file(self, md5_digest, file_size, cls):
|
2018-06-25 21:11:48 +03:00
|
|
|
row = self._execute(
|
2018-01-18 11:52:39 +03:00
|
|
|
'select id, hash from sent_files '
|
|
|
|
'where md5_digest = ? and file_size = ? and type = ?',
|
2018-06-25 21:11:48 +03:00
|
|
|
md5_digest, file_size, _SentFileType.from_type(cls).value
|
|
|
|
)
|
|
|
|
if row:
|
2018-01-18 11:52:39 +03:00
|
|
|
# Both allowed classes have (id, access_hash) as parameters
|
2018-06-25 21:11:48 +03:00
|
|
|
return cls(row[0], row[1])
|
2018-01-05 17:33:25 +03:00
|
|
|
|
2018-01-18 11:52:39 +03:00
|
|
|
def cache_file(self, md5_digest, file_size, instance):
|
|
|
|
if not isinstance(instance, (InputDocument, InputPhoto)):
|
|
|
|
raise TypeError('Cannot cache %s instance' % type(instance))
|
2018-01-15 14:36:46 +03:00
|
|
|
|
2018-06-25 21:11:48 +03:00
|
|
|
self._execute(
|
|
|
|
'insert or replace into sent_files values (?,?,?,?,?)',
|
|
|
|
md5_digest, file_size,
|
|
|
|
_SentFileType.from_type(type(instance)).value,
|
|
|
|
instance.id, instance.access_hash
|
|
|
|
)
|