Merge branch 'master' into docs-and-compatibility

This commit is contained in:
M1ha 2019-10-04 18:00:29 +05:00
commit ade6586ddf
16 changed files with 269 additions and 91 deletions

84
.travis.yml Normal file
View File

@ -0,0 +1,84 @@
dist: xenial
sudo: required
language: python
cache:
pip: true
apt: true
services:
- postgresql
- redis-server
addons:
postgresql: "11"
apt:
sources:
- sourceline: "deb http://repo.yandex.ru/clickhouse/deb/stable/ main/"
- sourceline: "deb https://packages.erlang-solutions.com/ubuntu xenial contrib"
key_url: "https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc"
- sourceline: "deb https://dl.bintray.com/rabbitmq/debian xenial main"
key_url: "https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc"
packages:
- dirmngr
- apt-transport-https
- postgresql-contrib-9.6
- postgresql-10
- postgresql-contrib-10
- postgresql-client-10
- postgresql-11
- postgresql-contrib-11
- postgresql-client-11
- unzip
- rabbitmq-server
python:
- 3.6
- 3.7
env:
- PG=9.6 DJANGO=2.1
- PG=10 DJANGO=2.1
- PG=11 DJANGO=2.1
- PG=9.6 DJANGO=2.2
- PG=10 DJANGO=2.2
- PG=11 DJANGO=2.2
before_install:
# Use default PostgreSQL 11 port
- sudo sed -i 's/port = 5433/port = 5432/' /etc/postgresql/11/main/postgresql.conf
- sudo cp /etc/postgresql/{10,11}/main/pg_hba.conf
# Start PostgreSQL version we need
- sudo systemctl stop postgresql && sudo systemctl start postgresql@$PG-main
# ClickHouse sources
- sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv E0C56BD4
- sudo apt-get update
install:
# Install ClickHouse
- sudo apt-get install clickhouse-client clickhouse-server clickhouse-common-static
- sudo service clickhouse-server restart
- pip install -r requirements.txt
- pip install -q Django==$DJANGO.*
- pip install redis
- python setup.py -q install
before_script:
# Output software versions
- erl -eval 'erlang:display(erlang:system_info(otp_release)), halt().' -noshell
- rabbitmqctl status | grep "RabbitMQ"
- clickhouse-client --query "SELECT version();"
- psql -tc 'SHOW server_version' -U postgres
- psql -tc 'SHOW server_version' -U postgres
- psql -c 'CREATE ROLE test;' -U postgres
- psql -c 'ALTER ROLE test WITH SUPERUSER;' -U postgres
- psql -c 'ALTER ROLE test WITH LOGIN;' -U postgres
- psql -c "ALTER ROLE test PASSWORD 'test';" -U postgres
- psql -c 'CREATE DATABASE test OWNER test;' -U postgres
- psql -c 'CREATE DATABASE test2 OWNER test;' -U postgres
script:
python runtests.py

View File

@ -23,5 +23,5 @@ setup(
description='Django extension to integrate with ClickHouse database', description='Django extension to integrate with ClickHouse database',
long_description=long_description, long_description=long_description,
long_description_content_type="text/markdown", long_description_content_type="text/markdown",
# requires=requires install_requires=requires
) )

View File

@ -59,6 +59,9 @@ class ClickHouseModel(with_metaclass(ClickHouseModelMeta, InfiModel)):
sync_delay = None sync_delay = None
sync_lock_timeout = None sync_lock_timeout = None
# This flag gives ability to disable to_db_string while inserting data, if it is already formatted
sync_formatted_tuples = False
# This attribute is initialized in metaclass, as it must get model class as a parameter # This attribute is initialized in metaclass, as it must get model class as a parameter
objects = None # type: QuerySet objects = None # type: QuerySet
@ -66,16 +69,21 @@ class ClickHouseModel(with_metaclass(ClickHouseModelMeta, InfiModel)):
def get_tuple_class(cls, field_names=None, defaults=None): def get_tuple_class(cls, field_names=None, defaults=None):
field_names = field_names or cls.fields(writable=False).keys() field_names = field_names or cls.fields(writable=False).keys()
# Strange, but sometimes the columns are in different order...
field_names = tuple(sorted(field_names))
if defaults: if defaults:
defaults_new = deepcopy(cls._defaults) defaults_new = deepcopy(cls._defaults)
defaults_new.update(defaults) defaults_new.update(defaults)
else: else:
defaults_new = cls._defaults defaults_new = cls._defaults
return namedtuple("%sTuple" % cls.__name__, field_names, defaults=defaults_new) # defaults should be rightmost arguments
required_field_names = tuple(name for name in field_names if name not in defaults_new)
default_field_names, default_values = zip(*sorted(defaults_new.items(), key=lambda t: t[0]))
# Strange, but sometimes the columns are in different order...
field_names = tuple(sorted(required_field_names)) + default_field_names
return namedtuple("%sTuple" % cls.__name__, field_names, defaults=default_values)
@classmethod @classmethod
def objects_in(cls, database): # type: (Database) -> QuerySet def objects_in(cls, database): # type: (Database) -> QuerySet
@ -199,11 +207,11 @@ class ClickHouseModel(with_metaclass(ClickHouseModelMeta, InfiModel)):
def insert_batch(cls, batch): def insert_batch(cls, batch):
""" """
Inserts batch into database Inserts batch into database
:param batch: :param batch: Batch of tuples to insert
:return: :return:
""" """
if batch: if batch:
cls.get_database(for_write=True).insert_tuples(cls, batch) cls.get_database(for_write=True).insert_tuples(cls, batch, formatted=cls.sync_formatted_tuples)
@classmethod @classmethod
def sync_batch_from_storage(cls): def sync_batch_from_storage(cls):

View File

@ -1,8 +1,5 @@
import sys import sys
from collections import namedtuple as basenamedtuple, Mapping from collections import namedtuple as basenamedtuple
from functools import lru_cache
from copy import deepcopy
def namedtuple(*args, **kwargs): def namedtuple(*args, **kwargs):
@ -13,14 +10,9 @@ def namedtuple(*args, **kwargs):
:return: namedtuple class :return: namedtuple class
""" """
if sys.version_info < (3, 7): if sys.version_info < (3, 7):
defaults = kwargs.pop('defaults', {}) defaults = kwargs.pop('defaults', ())
TupleClass = basenamedtuple(*args, **kwargs) TupleClass = basenamedtuple(*args, **kwargs)
TupleClass.__new__.__defaults__ = (None,) * len(TupleClass._fields) TupleClass.__new__.__defaults__ = (None,) * (len(TupleClass._fields) - len(defaults)) + tuple(defaults)
if isinstance(defaults, Mapping):
prototype = TupleClass(**defaults)
else:
prototype = TupleClass(*defaults)
TupleClass.__new__.__defaults__ = tuple(prototype)
return TupleClass return TupleClass
else: else:
return basenamedtuple(*args, **kwargs) return basenamedtuple(*args, **kwargs)

View File

@ -67,13 +67,14 @@ class Database(InfiDatabase):
yield item yield item
def insert_tuples(self, model_class, model_tuples, batch_size=None): def insert_tuples(self, model_class, model_tuples, batch_size=None, formatted=False):
# type: (Type['ClickHouseModel'], Iterable[tuple], Optional[int]) -> None # type: (Type['ClickHouseModel'], Iterable[tuple], Optional[int], bool) -> None
""" """
Inserts model_class namedtuples Inserts model_class namedtuples
:param model_class: Clickhouse model, namedtuples are made from :param model_class: Clickhouse model, namedtuples are made from
:param model_tuples: An iterable of tuples to insert :param model_tuples: An iterable of tuples to insert
:param batch_size: Size of batch :param batch_size: Size of batch
:param formatted: If flag is set, tuples are expected to be ready to insert without calling field.to_db_string
:return: None :return: None
""" """
tuples_iterator = iter(model_tuples) tuples_iterator = iter(model_tuples)
@ -88,17 +89,23 @@ class Database(InfiDatabase):
fields_list = ','.join('`%s`' % name for name in first_tuple._fields) fields_list = ','.join('`%s`' % name for name in first_tuple._fields)
fields_dict = model_class.fields(writable=True) fields_dict = model_class.fields(writable=True)
fields = [fields_dict[name] for name in first_tuple._fields]
statsd_key = "%s.inserted_tuples.%s" % (config.STATSD_PREFIX, model_class.__name__) statsd_key = "%s.inserted_tuples.%s" % (config.STATSD_PREFIX, model_class.__name__)
query = 'INSERT INTO `%s`.`%s` (%s) FORMAT TabSeparated\n' \
% (self.db_name, model_class.table_name(), fields_list)
query_enc = query.encode('utf-8')
def tuple_to_csv(tup): def tuple_to_csv(tup):
return '\t'.join(field.to_db_string(val, quote=False) for field, val in zip(fields, tup)) + '\n' if formatted:
str_gen = (getattr(tup, field_name) for field_name in first_tuple._fields)
else:
str_gen = (fields_dict[field_name].to_db_string(getattr(tup, field_name), quote=False)
for field_name in first_tuple._fields)
return '%s\n' % '\t'.join(str_gen)
def gen(): def gen():
buf = BytesIO() buf = BytesIO()
query = 'INSERT INTO `%s`.`%s` (%s) FORMAT TabSeparated\n' \
% (self.db_name, model_class.table_name(), fields_list)
query_enc = query.encode('utf-8')
buf.write(query_enc) buf.write(query_enc)
buf.write(tuple_to_csv(first_tuple).encode('utf-8')) buf.write(tuple_to_csv(first_tuple).encode('utf-8'))

View File

@ -119,12 +119,11 @@ class MigrationHistory(ClickHouseModel):
:return: None :return: None
""" """
# Ensure that table for migration storing is created # Ensure that table for migration storing is created
for db_alias in cls.migrate_non_replicated_db_aliases: for db_name in cls.migrate_non_replicated_db_aliases:
connections[db_alias].create_table(cls) connections[db_name].create_table(cls)
cls.objects.bulk_create([ cls.objects.create(db_alias=db_alias, package_name=migrations_package, module_name=name,
cls(db_alias=db_alias, package_name=migrations_package, module_name=name, applied=datetime.date.today()) applied=datetime.date.today())
])
@classmethod @classmethod
def get_applied_migrations(cls, db_alias, migrations_package): # type: (str, str) -> Set[str] def get_applied_migrations(cls, db_alias, migrations_package): # type: (str, str) -> Set[str]

View File

@ -7,9 +7,10 @@ from typing import Optional, Any, Type, Set
import six import six
from django.db import transaction from django.db import transaction
from django.db.models.manager import BaseManager
from django.db.models.signals import post_save, post_delete from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver from django.dispatch import receiver
from django.db.models import QuerySet as DjangoQuerySet, Manager as DjangoManager, Model as DjangoModel from django.db.models import QuerySet as DjangoQuerySet, Model as DjangoModel
from statsd.defaults.django import statsd from statsd.defaults.django import statsd
from .configuration import config from .configuration import config
@ -31,16 +32,18 @@ except ImportError:
fake = True fake = True
class ClickHouseSyncUpdateReturningQuerySetMixin(UpdateReturningMixin): class ClickHouseSyncRegisterMixin:
def _register_ops(self, operation, result):
pk_name = self.model._meta.pk.name
pk_list = [getattr(item, pk_name) for item in result]
self.model.register_clickhouse_operations(operation, *pk_list, using=self.db)
class ClickHouseSyncUpdateReturningQuerySetMixin(ClickHouseSyncRegisterMixin, UpdateReturningMixin):
""" """
This mixin adopts methods of django-pg-returning library This mixin adopts methods of django-pg-returning library
""" """
def _register_ops(self, operation, result):
pk_name = self.model._meta.pk.name
pk_list = result.values_list(pk_name, flat=True)
self.model.register_clickhouse_operations(operation, *pk_list, using=self.db)
def update_returning(self, **updates): def update_returning(self, **updates):
result = super().update_returning(**updates) result = super().update_returning(**updates)
self._register_ops('update', result) self._register_ops('update', result)
@ -52,7 +55,7 @@ class ClickHouseSyncUpdateReturningQuerySetMixin(UpdateReturningMixin):
return result return result
class ClickHouseSyncBulkUpdateQuerySetMixin(BulkUpdateManagerMixin): class ClickHouseSyncBulkUpdateQuerySetMixin(ClickHouseSyncRegisterMixin, BulkUpdateManagerMixin):
""" """
This mixin adopts methods of django-pg-bulk-update library This mixin adopts methods of django-pg-bulk-update library
""" """
@ -68,44 +71,38 @@ class ClickHouseSyncBulkUpdateQuerySetMixin(BulkUpdateManagerMixin):
return returning return returning
def _register_ops(self, result):
pk_name = self.model._meta.pk.name
pk_list = [getattr(item, pk_name) for item in result]
self.model.register_clickhouse_operations('update', *pk_list, using=self.db)
def bulk_update(self, *args, **kwargs): def bulk_update(self, *args, **kwargs):
original_returning = kwargs.pop('returning', None) original_returning = kwargs.pop('returning', None)
kwargs['returning'] = self._update_returning_param(original_returning) kwargs['returning'] = self._update_returning_param(original_returning)
result = super().bulk_update(*args, **kwargs) result = super().bulk_update(*args, **kwargs)
self._register_ops(result) self._register_ops('update', result)
return result.count() if original_returning is None else result return result.count() if original_returning is None else result
def bulk_update_or_create(self, *args, **kwargs): def bulk_update_or_create(self, *args, **kwargs):
original_returning = kwargs.pop('returning', None) original_returning = kwargs.pop('returning', None)
kwargs['returning'] = self._update_returning_param(original_returning) kwargs['returning'] = self._update_returning_param(original_returning)
result = super().bulk_update_or_create(*args, **kwargs) result = super().bulk_update_or_create(*args, **kwargs)
self._register_ops(result) self._register_ops('update', result)
return result.count() if original_returning is None else result return result.count() if original_returning is None else result
class ClickHouseSyncQuerySetMixin: class ClickHouseSyncQuerySetMixin(ClickHouseSyncRegisterMixin):
def update(self, **kwargs): def update(self, **kwargs):
# BUG I use update_returning method here. But it is not suitable for databases other then PostgreSQL # BUG I use update_returning method here. But it is not suitable for databases other then PostgreSQL
# and requires django-pg-update-returning installed # and requires django-pg-update-returning installed
pk_name = self.model._meta.pk.name pk_name = self.model._meta.pk.name
res = self.only(pk_name).update_returning(**kwargs).values_list(pk_name, flat=True) res = self.only(pk_name).update_returning(**kwargs)
self.model.register_clickhouse_operations('update', *res, using=self.db) self._register_ops('update', res)
return len(res) return len(res)
def bulk_create(self, objs, batch_size=None): def bulk_create(self, objs, batch_size=None):
objs = super().bulk_create(objs, batch_size=batch_size) objs = super().bulk_create(objs, batch_size=batch_size)
self.model.register_clickhouse_operations('insert', *[obj.pk for obj in objs], using=self.db) self._register_ops('insert', objs)
return objs return objs
# I add library dependant mixins to base classes only if libraries are installed # I add library dependant mixins to base classes only if libraries are installed
qs_bases = [ClickHouseSyncQuerySetMixin, DjangoQuerySet] qs_bases = [ClickHouseSyncQuerySetMixin]
if not getattr(UpdateReturningMixin, 'fake', False): if not getattr(UpdateReturningMixin, 'fake', False):
qs_bases.append(ClickHouseSyncUpdateReturningQuerySetMixin) qs_bases.append(ClickHouseSyncUpdateReturningQuerySetMixin)
@ -113,15 +110,13 @@ if not getattr(UpdateReturningMixin, 'fake', False):
if not getattr(BulkUpdateManagerMixin, 'fake', False): if not getattr(BulkUpdateManagerMixin, 'fake', False):
qs_bases.append(ClickHouseSyncBulkUpdateQuerySetMixin) qs_bases.append(ClickHouseSyncBulkUpdateQuerySetMixin)
# QuerySet must be the last one, so it can be redeclared in mixins
qs_bases.append(DjangoQuerySet)
ClickHouseSyncQuerySet = type('ClickHouseSyncModelQuerySet', tuple(qs_bases), {}) ClickHouseSyncQuerySet = type('ClickHouseSyncModelQuerySet', tuple(qs_bases), {})
class ClickHouseSyncManagerMixin: class ClickHouseSyncManager(BaseManager.from_queryset(ClickHouseSyncQuerySet)):
def get_queryset(self):
return ClickHouseSyncQuerySet(model=self.model, using=self._db)
class ClickHouseSyncManager(ClickHouseSyncManagerMixin, DjangoManager):
pass pass

View File

@ -280,6 +280,16 @@ class RedisStorage(with_metaclass(SingletonMeta, Storage)):
key = "%s.sync.%s.queue" % (config.STATSD_PREFIX, model.get_import_key()) key = "%s.sync.%s.queue" % (config.STATSD_PREFIX, model.get_import_key())
statsd.gauge(key, 0) statsd.gauge(key, 0)
def flush_import_key(self, import_key):
keys = [
self.REDIS_KEY_RANK_TEMPLATE.format(import_key=import_key),
self.REDIS_KEY_OPS_TEMPLATE.format(import_key=import_key),
self.REDIS_KEY_LOCK.format(import_key=import_key),
self.REDIS_KEY_LAST_SYNC_TS.format(import_key=import_key)
]
self._redis.delete(*keys)
statsd.gauge("%s.sync.%s.queue" % (config.STATSD_PREFIX, import_key), 0)
def get_last_sync_time(self, import_key): def get_last_sync_time(self, import_key):
sync_ts_key = self.REDIS_KEY_LAST_SYNC_TS.format(import_key=import_key) sync_ts_key = self.REDIS_KEY_LAST_SYNC_TS.format(import_key=import_key)
res = self._redis.get(sync_ts_key) res = self._redis.get(sync_ts_key)

View File

@ -159,7 +159,7 @@ def int_ranges(items: Iterable[int]) -> Iterator[Tuple[int, int]]:
yield interval yield interval
if interval_start is None: if interval_start is None:
raise StopIteration() return
else: else:
yield interval_start, prev_item yield interval_start, prev_item

View File

@ -12,7 +12,7 @@ class ClickHouseTestModel(ClickHouseModel):
id = fields.Int32Field() id = fields.Int32Field()
created_date = fields.DateField() created_date = fields.DateField()
value = fields.Int32Field() value = fields.Int32Field(default=100500)
str_field = fields.StringField() str_field = fields.StringField()
engine = ReplacingMergeTree('created_date', ('id',)) engine = ReplacingMergeTree('created_date', ('id',))

View File

@ -16,5 +16,32 @@
"created_date": "2018-02-01", "created_date": "2018-02-01",
"created": "2018-02-01 00:00:00" "created": "2018-02-01 00:00:00"
} }
},
{
"model": "tests.TestModel",
"pk": 3,
"fields": {
"value": 300,
"created_date": "2018-03-01",
"created": "2018-03-01 00:00:00"
}
},
{
"model": "tests.TestModel",
"pk": 4,
"fields": {
"value": 400,
"created_date": "2018-04-01",
"created": "2018-04-01 00:00:00"
}
},
{
"model": "tests.TestModel",
"pk": 5,
"fields": {
"value": 500,
"created_date": "2018-05-01",
"created": "2018-05-01 00:00:00"
}
} }
] ]

View File

@ -2,17 +2,31 @@
This file contains sample models to use in tests This file contains sample models to use in tests
""" """
from django.db import models from django.db import models
from django.db.models.manager import BaseManager
from django_pg_returning import UpdateReturningModel
from django_clickhouse.models import ClickHouseSyncModel from django_clickhouse.models import ClickHouseSyncModel, ClickHouseSyncQuerySet
class TestModel(ClickHouseSyncModel): class TestQuerySet(ClickHouseSyncQuerySet):
pass
class TestManager(BaseManager.from_queryset(TestQuerySet)):
pass
class TestModel(UpdateReturningModel, ClickHouseSyncModel):
objects = TestManager()
value = models.IntegerField() value = models.IntegerField()
created_date = models.DateField() created_date = models.DateField()
created = models.DateTimeField() created = models.DateTimeField()
class SecondaryTestModel(ClickHouseSyncModel): class SecondaryTestModel(UpdateReturningModel, ClickHouseSyncModel):
objects = TestManager()
value = models.IntegerField() value = models.IntegerField()
created_date = models.DateField() created_date = models.DateField()
created = models.DateTimeField() created = models.DateTimeField()

View File

@ -1,17 +1,17 @@
from unittest import TestCase from django.test import TestCase
from django_clickhouse.compatibility import namedtuple from django_clickhouse.compatibility import namedtuple
class NamedTupleTest(TestCase): class NamedTupleTest(TestCase):
def test_defaults(self): def test_defaults(self):
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 3}) TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[3])
self.assertTupleEqual((1, 2, 3), tuple(TestTuple(1, b=2))) self.assertTupleEqual((1, 2, 3), tuple(TestTuple(1, b=2)))
self.assertTupleEqual((1, 2, 4), tuple(TestTuple(1, 2, 4))) self.assertTupleEqual((1, 2, 4), tuple(TestTuple(1, 2, 4)))
self.assertTupleEqual((1, 2, 4), tuple(TestTuple(a=1, b=2, c=4))) self.assertTupleEqual((1, 2, 4), tuple(TestTuple(a=1, b=2, c=4)))
def test_exceptions(self): def test_exceptions(self):
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 3}) TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[3])
# BUG On python < 3.7 this error is not raised, as not given defaults are filled by None # BUG On python < 3.7 this error is not raised, as not given defaults are filled by None
# with self.assertRaises(TypeError): # with self.assertRaises(TypeError):
@ -22,8 +22,8 @@ class NamedTupleTest(TestCase):
def test_different_defaults(self): def test_different_defaults(self):
# Test that 2 tuple type defaults don't affect each other # Test that 2 tuple type defaults don't affect each other
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 3}) TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[3])
OtherTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 4}) OtherTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[4])
t1 = TestTuple(a=1, b=2) t1 = TestTuple(a=1, b=2)
t2 = OtherTuple(a=3, b=4) t2 = OtherTuple(a=3, b=4)
self.assertTupleEqual((1, 2, 3), tuple(t1)) self.assertTupleEqual((1, 2, 3), tuple(t1))
@ -31,7 +31,7 @@ class NamedTupleTest(TestCase):
def test_defaults_cache(self): def test_defaults_cache(self):
# Test that 2 tuple instances don't affect each other's defaults # Test that 2 tuple instances don't affect each other's defaults
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 3}) TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[3])
self.assertTupleEqual((1, 2, 4), tuple(TestTuple(a=1, b=2, c=4))) self.assertTupleEqual((1, 2, 4), tuple(TestTuple(a=1, b=2, c=4)))
self.assertTupleEqual((1, 2, 3), tuple(TestTuple(a=1, b=2))) self.assertTupleEqual((1, 2, 3), tuple(TestTuple(a=1, b=2)))

View File

@ -31,6 +31,22 @@ class CollapsingMergeTreeTest(TestCase):
'str_field': str(i) 'str_field': str(i)
} for i in range(10)], [item.to_dict() for item in qs]) } for i in range(10)], [item.to_dict() for item in qs])
def test_insert_tuples_defaults(self):
tuple_class = ClickHouseTestModel.get_tuple_class(defaults={'created_date': date.today()})
data = [
tuple_class(id=i, str_field=str(i))
for i in range(10)
]
self.db.insert_tuples(ClickHouseTestModel, data)
qs = ClickHouseTestModel.objects.order_by('id').all()
self.assertListEqual([{
'id': i,
'created_date': date.today(),
'value': 100500,
'str_field': str(i)
} for i in range(10)], [item.to_dict() for item in qs])
def test_insert_tuples_batch_size(self): def test_insert_tuples_batch_size(self):
tuple_class = ClickHouseTestModel.get_tuple_class() tuple_class = ClickHouseTestModel.get_tuple_class()
data = [ data = [

View File

@ -28,6 +28,7 @@ class TestOperations(TransactionTestCase):
def setUp(self): def setUp(self):
self.storage = self.django_model.get_clickhouse_storage() self.storage = self.django_model.get_clickhouse_storage()
self.storage.flush() self.storage.flush()
self.before_op_items = list(self.django_model.objects.all())
def tearDown(self): def tearDown(self):
self.storage.flush() self.storage.flush()
@ -56,8 +57,8 @@ class TestOperations(TransactionTestCase):
for i in range(5)] for i in range(5)]
items = self.django_model.objects.bulk_create(items) items = self.django_model.objects.bulk_create(items)
self.assertEqual(5, len(items)) self.assertEqual(5, len(items))
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk)) for instance in items], self.assertSetEqual({('insert', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)) set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
def test_get_or_create(self): def test_get_or_create(self):
instance, created = self.django_model.objects. \ instance, created = self.django_model.objects. \
@ -96,12 +97,21 @@ class TestOperations(TransactionTestCase):
self.assertListEqual([('update', "%s.1" % self.db_alias)], self.assertListEqual([('update', "%s.1" % self.db_alias)],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)) self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
# Update, after which updated element will not suit update conditions
self.django_model.objects.filter(created_date__lt=datetime.date.today()). \ self.django_model.objects.filter(created_date__lt=datetime.date.today()). \
update(created_date=datetime.date.today()) update(created_date=datetime.date.today())
self.assertListEqual([('update', "%s.1" % self.db_alias), ('update', "%s.2" % self.db_alias)], self.assertListEqual([('update', "%s.%d" % (self.db_alias, item.id)) for item in self.before_op_items],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)) self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
def test_bulk_create_returning(self):
items = [
self.django_model(created_date=datetime.date.today(), created=datetime.datetime.now(), value=i)
for i in range(5)
]
items = self.django_model.objects.bulk_create_returning(items)
self.assertEqual(5, len(items))
self.assertSetEqual({('insert', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
def test_qs_update_returning(self): def test_qs_update_returning(self):
self.django_model.objects.filter(pk=1).update_returning(created_date=datetime.date.today()) self.django_model.objects.filter(pk=1).update_returning(created_date=datetime.date.today())
self.assertListEqual([('update', "%s.1" % self.db_alias)], self.assertListEqual([('update', "%s.1" % self.db_alias)],
@ -110,7 +120,7 @@ class TestOperations(TransactionTestCase):
# Update, after which updated element will not suit update conditions # Update, after which updated element will not suit update conditions
self.django_model.objects.filter(created_date__lt=datetime.date.today()). \ self.django_model.objects.filter(created_date__lt=datetime.date.today()). \
update_returning(created_date=datetime.date.today()) update_returning(created_date=datetime.date.today())
self.assertListEqual([('update', "%s.1" % self.db_alias), ('update', "%s.2" % self.db_alias)], self.assertListEqual([('update', "%s.%d" % (self.db_alias, item.id)) for item in self.before_op_items],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)) self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
def test_qs_delete_returning(self): def test_qs_delete_returning(self):
@ -118,9 +128,22 @@ class TestOperations(TransactionTestCase):
self.assertListEqual([('delete', "%s.1" % self.db_alias)], self.assertListEqual([('delete', "%s.1" % self.db_alias)],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)) self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
# Update, после которого исходный фильтр уже не сработает # Delete, after which updated element will not suit update conditions
self.django_model.objects.filter(created_date__lt=datetime.date.today()).delete_returning() self.django_model.objects.filter(created_date__lt=datetime.date.today()).delete_returning()
self.assertListEqual([('delete', "%s.1" % self.db_alias), ('delete', "%s.2" % self.db_alias)], self.assertListEqual([('delete', "%s.%d" % (self.db_alias, item.id)) for item in self.before_op_items],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
def test_save_returning(self):
# INSERT operation
instance = self.django_model(created_date=datetime.date.today(), created=datetime.datetime.now(), value=2)
instance.save_returning()
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk))],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
# UPDATE operation
instance.save_returning()
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk)),
('update', "%s.%d" % (self.db_alias, instance.pk))],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)) self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
def test_delete(self): def test_delete(self):

View File

@ -1,7 +1,5 @@
import datetime import datetime
import time
from queue import Queue from queue import Queue
from time import gmtime, localtime
import pytz import pytz
from django.test import TestCase from django.test import TestCase
@ -12,9 +10,13 @@ from django_clickhouse.utils import get_tz_offset, format_datetime, lazy_class_i
SingletonMeta SingletonMeta
class GetTZOffsetTest(TestCase): def local_dt_str(dt) -> str:
def test_func(self): """
self.assertEqual(300, get_tz_offset()) Returns string representation of an aware datetime object, localized by adding system_tz_offset()
:param dt: Datetime to change
:return: Formatted string
"""
return (dt + datetime.timedelta(minutes=get_tz_offset())).strftime('%Y-%m-%d %H:%M:%S')
class FormatDateTimeTest(TestCase): class FormatDateTimeTest(TestCase):
@ -25,32 +27,33 @@ class FormatDateTimeTest(TestCase):
:param dt: Объект datetime.datetime :param dt: Объект datetime.datetime
:return: Строковый ожидаемый результат :return: Строковый ожидаемый результат
""" """
moscow_minute_offset = dt.utcoffset().total_seconds() / 60 minute_offset = dt.utcoffset().total_seconds() / 60
zone_h, zone_m = abs(int(moscow_minute_offset / 60)), int(moscow_minute_offset % 60) zone_h, zone_m = abs(int(minute_offset / 60)), int(minute_offset % 60)
# +5 за счет времени тестового сервера ClickHouse return local_dt_str(dt - datetime.timedelta(hours=zone_h, minutes=zone_m))
return (dt - datetime.timedelta(hours=zone_h - 5, minutes=zone_m)).strftime("%Y-%m-%d %H:%M:%S")
def test_conversion(self): def test_conversion(self):
dt = datetime.datetime(2017, 1, 2, 3, 4, 5) dt = datetime.datetime(2017, 1, 2, 3, 4, 5)
self.assertEqual(format_datetime(dt), '2017-01-02 08:04:05') self.assertEqual(format_datetime(dt), local_dt_str(dt))
dt = datetime.datetime(2017, 1, 2, 3, 4, 5, tzinfo=pytz.utc) dt = datetime.datetime(2017, 1, 2, 3, 4, 5, tzinfo=pytz.utc)
self.assertEqual(format_datetime(dt), '2017-01-02 08:04:05') self.assertEqual(format_datetime(dt), local_dt_str(dt))
dt = datetime.datetime(2017, 1, 2, 3, 4, 5, tzinfo=pytz.timezone('Europe/Moscow')) dt = datetime.datetime(2017, 1, 2, 3, 4, 5, tzinfo=pytz.timezone('Europe/Moscow'))
self.assertEqual(format_datetime(dt), self._get_zone_time(dt)) self.assertEqual(format_datetime(dt), self._get_zone_time(dt))
dt = datetime.datetime(2017, 1, 2, 3, 4, 5, tzinfo=pytz.timezone('Europe/Moscow')) dt = datetime.datetime(2017, 1, 2, 3, 4, 5, tzinfo=pytz.timezone('Europe/Moscow'))
offset = int(pytz.timezone('Europe/Moscow').utcoffset(dt).total_seconds() / 60) offset = int(pytz.timezone('Europe/Moscow').utcoffset(dt).total_seconds() / 60)
self.assertEqual(format_datetime(dt, timezone_offset=offset), '2017-01-02 03:04:05') self.assertEqual(format_datetime(dt, timezone_offset=offset),
local_dt_str(datetime.datetime(2017, 1, 2, 3, 4, 5) - datetime.timedelta(minutes=offset*2)))
def test_date_conversion(self): def test_date_conversion(self):
dt = datetime.date(2017, 1, 2) dt = datetime.date(2017, 1, 2)
self.assertEqual(format_datetime(dt), '2017-01-02 05:00:00') self.assertEqual(format_datetime(dt), local_dt_str(datetime.datetime(2017, 1, 2, 0, 0, 0)))
dt = datetime.date(2017, 1, 2) dt = datetime.date(2017, 1, 2)
self.assertEqual(format_datetime(dt, day_end=True), '2017-01-03 04:59:59') self.assertEqual(format_datetime(dt, day_end=True), local_dt_str(datetime.datetime(2017, 1, 2, 23, 59, 59)))
dt = datetime.date(2017, 1, 2) dt = datetime.date(2017, 1, 2)
self.assertEqual(format_datetime(dt, day_end=True, timezone_offset=60), '2017-01-03 03:59:59') self.assertEqual(format_datetime(dt, day_end=True, timezone_offset=60),
local_dt_str(datetime.datetime(2017, 1, 2, 22, 59, 59)))
dt = datetime.date(2017, 1, 2) dt = datetime.date(2017, 1, 2)
self.assertEqual(format_datetime(dt, timezone_offset=60), '2017-01-02 04:00:00') self.assertEqual(format_datetime(dt, timezone_offset=60), local_dt_str(datetime.datetime(2017, 1, 1, 23, 0, 0)))
class TestLazyClassImport(TestCase): class TestLazyClassImport(TestCase):