Fixed some bugs

This commit is contained in:
M1ha 2018-11-26 17:37:31 +05:00
parent 0e4dd6b69f
commit 745b7b7788
5 changed files with 21 additions and 57 deletions

View File

@ -174,7 +174,10 @@ class ClickHouseModel(with_metaclass(ClickHouseModelMeta, InfiModel)):
if operations:
with statsd.timer(statsd_key.format('get_sync_objects')):
import_objects = cls.get_sync_objects(operations)
else:
import_objects = []
if import_objects:
with statsd.timer(statsd_key.format('get_insert_batch')):
batch = cls.get_insert_batch(import_objects)
@ -227,17 +230,21 @@ class ClickHouseMultiModel(ClickHouseModel):
with statsd.timer(statsd_key.format('get_operations')):
operations = storage.get_operations(import_key, cls.get_sync_batch_size())
with statsd.timer(statsd_key.format('get_sync_objects')):
import_objects = cls.get_sync_objects(operations)
if operations:
with statsd.timer(statsd_key.format('get_sync_objects')):
import_objects = cls.get_sync_objects(operations)
else:
import_objects = []
batches = {}
with statsd.timer(statsd_key.format('get_insert_batch')):
for model_cls in cls.sub_models:
batches[model_cls] = model_cls.get_insert_batch(import_objects)
if import_objects:
batches = {}
with statsd.timer(statsd_key.format('get_insert_batch')):
for model_cls in cls.sub_models:
batches[model_cls] = model_cls.get_insert_batch(import_objects)
with statsd.timer(statsd_key.format('insert')):
for model_cls, batch in batches.items():
model_cls.insert_batch(batch)
with statsd.timer(statsd_key.format('insert')):
for model_cls, batch in batches.items():
model_cls.insert_batch(batch)
with statsd.timer(statsd_key.format('post_sync')):
storage.post_sync(import_key)

View File

@ -15,7 +15,8 @@ class Django2ClickHouseModelSerializer:
def serialize(self, obj): # type: (DjangoModel) -> 'ClickHouseModel'
# Standard model_to_dict ignores some fields if they have invalid naming
data = {}
for name in set(self.serialize_fields) - set(self.exclude_serialize_fields):
sync_fields = set(self.serialize_fields) - set(self.exclude_serialize_fields or ())
for name in sync_fields:
val = getattr(obj, name, None)
if val is not None:
data[name] = val

View File

@ -61,27 +61,6 @@ class Storage:
key = "%s.sync.%s.queue" % (config.STATSD_PREFIX, import_key)
statsd.gauge(key, -batch_size, delta=True)
def get_import_batch(self, import_key, **kwargs):
# type: (str, **dict) -> Optional[Tuple[str]]
"""
Returns a saved batch for ClickHouse import or None, if it was not found
:param import_key: A key, returned by ClickHouseModel.get_import_key() method
:param kwargs: Storage dependant arguments
:return: None, if no batch has been formed. A tuple strings, saved in write_import_batch() method.
"""
raise NotImplemented()
def write_import_batch(self, import_key, batch, **kwargs):
# type: (str, Iterable[str], **dict) -> None
"""
Saves batch for ClickHouse import
:param import_key: A key, returned by ClickHouseModel.get_import_key() method
:param batch: An iterable of strings to save as a batch
:param kwargs: Storage dependant arguments
:return: None
"""
raise NotImplemented()
def get_operations(self, import_key, count, **kwargs):
# type: (str, int, **dict) -> List[Tuple[str, str]]
"""
@ -153,7 +132,6 @@ class RedisStorage(Storage):
"""
REDIS_KEY_OPS_TEMPLATE = 'clickhouse_sync:operations:{import_key}'
REDIS_KEY_TS_TEMPLATE = 'clickhouse_sync:timstamp:{import_key}'
REDIS_KEY_BATCH_TEMPLATE = 'clickhouse_sync:batch:{import_key}'
REDIS_KEY_LOCK = 'clickhouse_sync:lock:{import_key}'
REDIS_KEY_LAST_SYNC_TS = 'clickhouse_sync:last_sync:{import_key}'
@ -190,17 +168,6 @@ class RedisStorage(Storage):
else:
return []
def get_import_batch(self, import_key, **kwargs):
batch_key = self.REDIS_KEY_BATCH_TEMPLATE.format(import_key=import_key)
res = self._redis.lrange(batch_key, 0, -1)
return tuple(item.decode() for item in res) if res else None
def write_import_batch(self, import_key, batch, **kwargs):
# Elements are pushed to the head, so we need to invert batch in order to save correct order
if batch:
batch_key = self.REDIS_KEY_BATCH_TEMPLATE.format(import_key=import_key)
self._redis.lpush(batch_key, *reversed(batch))
def get_lock(self, import_key, **kwargs):
if self._lock is None:
from .redis import RedisLock
@ -219,15 +186,11 @@ class RedisStorage(Storage):
def post_sync(self, import_key, **kwargs):
ts_key = self.REDIS_KEY_TS_TEMPLATE.format(import_key=import_key)
ops_key = self.REDIS_KEY_OPS_TEMPLATE.format(import_key=import_key)
batch_key = self.REDIS_KEY_BATCH_TEMPLATE.format(import_key=import_key)
score = self._redis.get(ts_key)
if score:
res = self._redis.pipeline() \
.zremrangebyscore(ops_key, '-inf', float(score)) \
.delete(batch_key) \
.execute()
batch_size = int(res[1])
res = self._redis.zremrangebyscore(ops_key, '-inf', float(score))
batch_size = int(res)
else:
batch_size = 0
@ -240,7 +203,6 @@ class RedisStorage(Storage):
key_tpls = [
self.REDIS_KEY_TS_TEMPLATE.format(import_key='*'),
self.REDIS_KEY_OPS_TEMPLATE.format(import_key='*'),
self.REDIS_KEY_BATCH_TEMPLATE.format(import_key='*'),
self.REDIS_KEY_LOCK.format(import_key='*'),
self.REDIS_KEY_LAST_SYNC_TS.format(import_key='*')
]

View File

@ -22,7 +22,7 @@ class Django2ClickHouseModelSerializerTest(TestCase):
self.assertEqual(self.obj.created_date, res.created_date)
def test_fields(self):
serializer = Django2ClickHouseModelSerializer(ClickHouseTestModel, fields=('value'))
serializer = Django2ClickHouseModelSerializer(ClickHouseTestModel, fields=('value',))
res = serializer.serialize(self.obj)
self.assertIsInstance(res, ClickHouseTestModel)
self.assertEqual(0, res.id)

View File

@ -46,23 +46,17 @@ class StorageTest(TestCase):
('insert', '100501'),
], self.storage.get_operations('test2', 10))
def test_import_batch(self):
self.storage.write_import_batch('test', [str(i) for i in range(10)])
self.assertTupleEqual(tuple(str(i) for i in range(10)), self.storage.get_import_batch('test'))
def test_post_sync(self):
self.storage.pre_sync('test')
self.storage.register_operations_wrapped('test', 'insert', 100500)
self.storage.register_operations_wrapped('test', 'insert', 100501)
self.storage.get_operations('test', 10)
self.storage.write_import_batch('test', [str(i) for i in range(10)])
self.storage.register_operations_wrapped('test', 'insert', 100502)
self.storage.post_sync('test')
self.assertListEqual([
('insert', '100502')
], self.storage.get_operations('test', 10))
self.assertIsNone(self.storage.get_import_batch('test'))
def test_last_sync(self):
dt = datetime.datetime.now()