This commit is contained in:
M1hacka 2019-09-16 21:25:32 +05:00
commit 335fcc634a
15 changed files with 225 additions and 87 deletions

84
.travis.yml Normal file
View File

@ -0,0 +1,84 @@
dist: xenial
sudo: required
language: python
cache:
pip: true
apt: true
services:
- postgresql
- redis-server
addons:
postgresql: "11"
apt:
sources:
- sourceline: "deb http://repo.yandex.ru/clickhouse/deb/stable/ main/"
- sourceline: "deb https://packages.erlang-solutions.com/ubuntu xenial contrib"
key_url: "https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc"
- sourceline: "deb https://dl.bintray.com/rabbitmq/debian xenial main"
key_url: "https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc"
packages:
- dirmngr
- apt-transport-https
- postgresql-contrib-9.6
- postgresql-10
- postgresql-contrib-10
- postgresql-client-10
- postgresql-11
- postgresql-contrib-11
- postgresql-client-11
- unzip
- rabbitmq-server
python:
- 3.6
- 3.7
env:
- PG=9.6 DJANGO=2.1
- PG=10 DJANGO=2.1
- PG=11 DJANGO=2.1
- PG=9.6 DJANGO=2.2
- PG=10 DJANGO=2.2
- PG=11 DJANGO=2.2
before_install:
# Use default PostgreSQL 11 port
- sudo sed -i 's/port = 5433/port = 5432/' /etc/postgresql/11/main/postgresql.conf
- sudo cp /etc/postgresql/{10,11}/main/pg_hba.conf
# Start PostgreSQL version we need
- sudo systemctl stop postgresql && sudo systemctl start postgresql@$PG-main
# ClickHouse sources
- sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv E0C56BD4
- sudo apt-get update
install:
# Install ClickHouse
- sudo apt-get install clickhouse-client clickhouse-server clickhouse-common-static
- sudo service clickhouse-server restart
- pip install -r requirements.txt
- pip install -q Django==$DJANGO.*
- pip install redis
- python setup.py -q install
before_script:
# Output software versions
- erl -eval 'erlang:display(erlang:system_info(otp_release)), halt().' -noshell
- rabbitmqctl status | grep "RabbitMQ"
- clickhouse-client --query "SELECT version();"
- psql -tc 'SHOW server_version' -U postgres
- psql -tc 'SHOW server_version' -U postgres
- psql -c 'CREATE ROLE test;' -U postgres
- psql -c 'ALTER ROLE test WITH SUPERUSER;' -U postgres
- psql -c 'ALTER ROLE test WITH LOGIN;' -U postgres
- psql -c "ALTER ROLE test PASSWORD 'test';" -U postgres
- psql -c 'CREATE DATABASE test OWNER test;' -U postgres
- psql -c 'CREATE DATABASE test2 OWNER test;' -U postgres
script:
python runtests.py

View File

@ -23,5 +23,5 @@ setup(
description='Django extension to integrate with ClickHouse database',
long_description=long_description,
long_description_content_type="text/markdown",
# requires=requires
install_requires=requires
)

View File

@ -59,6 +59,9 @@ class ClickHouseModel(with_metaclass(ClickHouseModelMeta, InfiModel)):
sync_delay = None
sync_lock_timeout = None
# This flag gives ability to disable to_db_string while inserting data, if it is already formatted
sync_formatted_tuples = False
# This attribute is initialized in metaclass, as it must get model class as a parameter
objects = None # type: QuerySet
@ -66,16 +69,21 @@ class ClickHouseModel(with_metaclass(ClickHouseModelMeta, InfiModel)):
def get_tuple_class(cls, field_names=None, defaults=None):
field_names = field_names or cls.fields(writable=False).keys()
# Strange, but sometimes the columns are in different order...
field_names = tuple(sorted(field_names))
if defaults:
defaults_new = deepcopy(cls._defaults)
defaults_new.update(defaults)
else:
defaults_new = cls._defaults
return namedtuple("%sTuple" % cls.__name__, field_names, defaults=defaults_new)
# defaults should be rightmost arguments
required_field_names = tuple(name for name in field_names if name not in defaults_new)
default_field_names, default_values = zip(*sorted(defaults_new.items(), key=lambda t: t[0]))
# Strange, but sometimes the columns are in different order...
field_names = tuple(sorted(required_field_names)) + default_field_names
return namedtuple("%sTuple" % cls.__name__, field_names, defaults=default_values)
@classmethod
def objects_in(cls, database): # type: (Database) -> QuerySet
@ -199,11 +207,11 @@ class ClickHouseModel(with_metaclass(ClickHouseModelMeta, InfiModel)):
def insert_batch(cls, batch):
"""
Inserts batch into database
:param batch:
:param batch: Batch of tuples to insert
:return:
"""
if batch:
cls.get_database(for_write=True).insert_tuples(cls, batch)
cls.get_database(for_write=True).insert_tuples(cls, batch, formatted=cls.sync_formatted_tuples)
@classmethod
def sync_batch_from_storage(cls):

View File

@ -1,8 +1,5 @@
import sys
from collections import namedtuple as basenamedtuple, Mapping
from functools import lru_cache
from copy import deepcopy
from collections import namedtuple as basenamedtuple
def namedtuple(*args, **kwargs):
@ -13,14 +10,9 @@ def namedtuple(*args, **kwargs):
:return: namedtuple class
"""
if sys.version_info < (3, 7):
defaults = kwargs.pop('defaults', {})
defaults = kwargs.pop('defaults', ())
TupleClass = basenamedtuple(*args, **kwargs)
TupleClass.__new__.__defaults__ = (None,) * len(TupleClass._fields)
if isinstance(defaults, Mapping):
prototype = TupleClass(**defaults)
else:
prototype = TupleClass(*defaults)
TupleClass.__new__.__defaults__ = tuple(prototype)
TupleClass.__new__.__defaults__ = (None,) * (len(TupleClass._fields) - len(defaults)) + tuple(defaults)
return TupleClass
else:
return basenamedtuple(*args, **kwargs)

View File

@ -67,13 +67,14 @@ class Database(InfiDatabase):
yield item
def insert_tuples(self, model_class, model_tuples, batch_size=None):
# type: (Type['ClickHouseModel'], Iterable[tuple], Optional[int]) -> None
def insert_tuples(self, model_class, model_tuples, batch_size=None, formatted=False):
# type: (Type['ClickHouseModel'], Iterable[tuple], Optional[int], bool) -> None
"""
Inserts model_class namedtuples
:param model_class: Clickhouse model, namedtuples are made from
:param model_tuples: An iterable of tuples to insert
:param batch_size: Size of batch
:param formatted: If flag is set, tuples are expected to be ready to insert without calling field.to_db_string
:return: None
"""
tuples_iterator = iter(model_tuples)
@ -88,17 +89,23 @@ class Database(InfiDatabase):
fields_list = ','.join('`%s`' % name for name in first_tuple._fields)
fields_dict = model_class.fields(writable=True)
fields = [fields_dict[name] for name in first_tuple._fields]
statsd_key = "%s.inserted_tuples.%s" % (config.STATSD_PREFIX, model_class.__name__)
query = 'INSERT INTO `%s`.`%s` (%s) FORMAT TabSeparated\n' \
% (self.db_name, model_class.table_name(), fields_list)
query_enc = query.encode('utf-8')
def tuple_to_csv(tup):
return '\t'.join(field.to_db_string(val, quote=False) for field, val in zip(fields, tup)) + '\n'
if formatted:
str_gen = (getattr(tup, field_name) for field_name in first_tuple._fields)
else:
str_gen = (fields_dict[field_name].to_db_string(getattr(tup, field_name), quote=False)
for field_name in first_tuple._fields)
return '%s\n' % '\t'.join(str_gen)
def gen():
buf = BytesIO()
query = 'INSERT INTO `%s`.`%s` (%s) FORMAT TabSeparated\n' \
% (self.db_name, model_class.table_name(), fields_list)
query_enc = query.encode('utf-8')
buf.write(query_enc)
buf.write(tuple_to_csv(first_tuple).encode('utf-8'))

View File

@ -119,12 +119,11 @@ class MigrationHistory(ClickHouseModel):
:return: None
"""
# Ensure that table for migration storing is created
for db_alias in cls.migrate_non_replicated_db_aliases:
connections[db_alias].create_table(cls)
for db_name in cls.migrate_non_replicated_db_aliases:
connections[db_name].create_table(cls)
cls.objects.bulk_create([
cls(db_alias=db_alias, package_name=migrations_package, module_name=name, applied=datetime.date.today())
])
cls.objects.create(db_alias=db_alias, package_name=migrations_package, module_name=name,
applied=datetime.date.today())
@classmethod
def get_applied_migrations(cls, db_alias, migrations_package): # type: (str, str) -> Set[str]

View File

@ -7,9 +7,10 @@ from typing import Optional, Any, Type, Set
import six
from django.db import transaction
from django.db.models.manager import BaseManager
from django.db.models.signals import post_save, post_delete
from django.dispatch import receiver
from django.db.models import QuerySet as DjangoQuerySet, Manager as DjangoManager, Model as DjangoModel
from django.db.models import QuerySet as DjangoQuerySet, Model as DjangoModel
from statsd.defaults.django import statsd
from .configuration import config
@ -31,16 +32,18 @@ except ImportError:
fake = True
class ClickHouseSyncUpdateReturningQuerySetMixin(UpdateReturningMixin):
class ClickHouseSyncRegisterMixin:
def _register_ops(self, operation, result):
pk_name = self.model._meta.pk.name
pk_list = [getattr(item, pk_name) for item in result]
self.model.register_clickhouse_operations(operation, *pk_list, using=self.db)
class ClickHouseSyncUpdateReturningQuerySetMixin(ClickHouseSyncRegisterMixin, UpdateReturningMixin):
"""
This mixin adopts methods of django-pg-returning library
"""
def _register_ops(self, operation, result):
pk_name = self.model._meta.pk.name
pk_list = result.values_list(pk_name, flat=True)
self.model.register_clickhouse_operations(operation, *pk_list, using=self.db)
def update_returning(self, **updates):
result = super().update_returning(**updates)
self._register_ops('update', result)
@ -52,7 +55,7 @@ class ClickHouseSyncUpdateReturningQuerySetMixin(UpdateReturningMixin):
return result
class ClickHouseSyncBulkUpdateQuerySetMixin(BulkUpdateManagerMixin):
class ClickHouseSyncBulkUpdateQuerySetMixin(ClickHouseSyncRegisterMixin, BulkUpdateManagerMixin):
"""
This mixin adopts methods of django-pg-bulk-update library
"""
@ -68,39 +71,33 @@ class ClickHouseSyncBulkUpdateQuerySetMixin(BulkUpdateManagerMixin):
return returning
def _register_ops(self, result):
pk_name = self.model._meta.pk.name
pk_list = [getattr(item, pk_name) for item in result]
self.model.register_clickhouse_operations('update', *pk_list, using=self.db)
def bulk_update(self, *args, **kwargs):
original_returning = kwargs.pop('returning', None)
kwargs['returning'] = self._update_returning_param(original_returning)
result = super().bulk_update(*args, **kwargs)
self._register_ops(result)
self._register_ops('update', result)
return result.count() if original_returning is None else result
def bulk_update_or_create(self, *args, **kwargs):
original_returning = kwargs.pop('returning', None)
kwargs['returning'] = self._update_returning_param(original_returning)
result = super().bulk_update_or_create(*args, **kwargs)
self._register_ops(result)
self._register_ops('update', result)
return result.count() if original_returning is None else result
class ClickHouseSyncQuerySetMixin:
class ClickHouseSyncQuerySetMixin(ClickHouseSyncRegisterMixin):
def update(self, **kwargs):
# BUG I use update_returning method here. But it is not suitable for databases other then PostgreSQL
# and requires django-pg-update-returning installed
pk_name = self.model._meta.pk.name
res = self.only(pk_name).update_returning(**kwargs).values_list(pk_name, flat=True)
self.model.register_clickhouse_operations('update', *res, using=self.db)
res = self.only(pk_name).update_returning(**kwargs)
self._register_ops('update', res)
return len(res)
def bulk_create(self, objs, batch_size=None):
objs = super().bulk_create(objs, batch_size=batch_size)
self.model.register_clickhouse_operations('insert', *[obj.pk for obj in objs], using=self.db)
self._register_ops('insert', objs)
return objs
@ -116,12 +113,7 @@ if not getattr(BulkUpdateManagerMixin, 'fake', False):
ClickHouseSyncQuerySet = type('ClickHouseSyncModelQuerySet', tuple(qs_bases), {})
class ClickHouseSyncManagerMixin:
def get_queryset(self):
return ClickHouseSyncQuerySet(model=self.model, using=self._db)
class ClickHouseSyncManager(ClickHouseSyncManagerMixin, DjangoManager):
class ClickHouseSyncManager(BaseManager.from_queryset(ClickHouseSyncQuerySet)):
pass

View File

@ -280,6 +280,16 @@ class RedisStorage(with_metaclass(SingletonMeta, Storage)):
key = "%s.sync.%s.queue" % (config.STATSD_PREFIX, model.get_import_key())
statsd.gauge(key, 0)
def flush_import_key(self, import_key):
keys = [
self.REDIS_KEY_RANK_TEMPLATE.format(import_key=import_key),
self.REDIS_KEY_OPS_TEMPLATE.format(import_key=import_key),
self.REDIS_KEY_LOCK.format(import_key=import_key),
self.REDIS_KEY_LAST_SYNC_TS.format(import_key=import_key)
]
self._redis.delete(*keys)
statsd.gauge("%s.sync.%s.queue" % (config.STATSD_PREFIX, import_key), 0)
def get_last_sync_time(self, import_key):
sync_ts_key = self.REDIS_KEY_LAST_SYNC_TS.format(import_key=import_key)
res = self._redis.get(sync_ts_key)

View File

@ -159,7 +159,7 @@ def int_ranges(items: Iterable[int]) -> Iterator[Tuple[int, int]]:
yield interval
if interval_start is None:
raise StopIteration()
return
else:
yield interval_start, prev_item

View File

@ -12,7 +12,7 @@ class ClickHouseTestModel(ClickHouseModel):
id = fields.Int32Field()
created_date = fields.DateField()
value = fields.Int32Field()
value = fields.Int32Field(default=100500)
str_field = fields.StringField()
engine = ReplacingMergeTree('created_date', ('id',))

View File

@ -16,5 +16,32 @@
"created_date": "2018-02-01",
"created": "2018-02-01 00:00:00"
}
},
{
"model": "tests.TestModel",
"pk": 3,
"fields": {
"value": 300,
"created_date": "2018-03-01",
"created": "2018-03-01 00:00:00"
}
},
{
"model": "tests.TestModel",
"pk": 4,
"fields": {
"value": 400,
"created_date": "2018-04-01",
"created": "2018-04-01 00:00:00"
}
},
{
"model": "tests.TestModel",
"pk": 5,
"fields": {
"value": 500,
"created_date": "2018-05-01",
"created": "2018-05-01 00:00:00"
}
}
]

View File

@ -1,17 +1,17 @@
from unittest import TestCase
from django.test import TestCase
from django_clickhouse.compatibility import namedtuple
class NamedTupleTest(TestCase):
def test_defaults(self):
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 3})
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[3])
self.assertTupleEqual((1, 2, 3), tuple(TestTuple(1, b=2)))
self.assertTupleEqual((1, 2, 4), tuple(TestTuple(1, 2, 4)))
self.assertTupleEqual((1, 2, 4), tuple(TestTuple(a=1, b=2, c=4)))
def test_exceptions(self):
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 3})
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[3])
# BUG On python < 3.7 this error is not raised, as not given defaults are filled by None
# with self.assertRaises(TypeError):
@ -22,8 +22,8 @@ class NamedTupleTest(TestCase):
def test_different_defaults(self):
# Test that 2 tuple type defaults don't affect each other
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 3})
OtherTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 4})
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[3])
OtherTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[4])
t1 = TestTuple(a=1, b=2)
t2 = OtherTuple(a=3, b=4)
self.assertTupleEqual((1, 2, 3), tuple(t1))
@ -31,7 +31,7 @@ class NamedTupleTest(TestCase):
def test_defaults_cache(self):
# Test that 2 tuple instances don't affect each other's defaults
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults={'c': 3})
TestTuple = namedtuple('TestTuple', ('a', 'b', 'c'), defaults=[3])
self.assertTupleEqual((1, 2, 4), tuple(TestTuple(a=1, b=2, c=4)))
self.assertTupleEqual((1, 2, 3), tuple(TestTuple(a=1, b=2)))

View File

@ -31,6 +31,22 @@ class CollapsingMergeTreeTest(TestCase):
'str_field': str(i)
} for i in range(10)], [item.to_dict() for item in qs])
def test_insert_tuples_defaults(self):
tuple_class = ClickHouseTestModel.get_tuple_class(defaults={'created_date': date.today()})
data = [
tuple_class(id=i, str_field=str(i))
for i in range(10)
]
self.db.insert_tuples(ClickHouseTestModel, data)
qs = ClickHouseTestModel.objects.order_by('id').all()
self.assertListEqual([{
'id': i,
'created_date': date.today(),
'value': 100500,
'str_field': str(i)
} for i in range(10)], [item.to_dict() for item in qs])
def test_insert_tuples_batch_size(self):
tuple_class = ClickHouseTestModel.get_tuple_class()
data = [

View File

@ -28,6 +28,7 @@ class TestOperations(TransactionTestCase):
def setUp(self):
self.storage = self.django_model.get_clickhouse_storage()
self.storage.flush()
self.before_op_items = list(self.django_model.objects.all())
def tearDown(self):
self.storage.flush()
@ -56,8 +57,8 @@ class TestOperations(TransactionTestCase):
for i in range(5)]
items = self.django_model.objects.bulk_create(items)
self.assertEqual(5, len(items))
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk)) for instance in items],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
self.assertSetEqual({('insert', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
def test_get_or_create(self):
instance, created = self.django_model.objects. \
@ -96,10 +97,9 @@ class TestOperations(TransactionTestCase):
self.assertListEqual([('update', "%s.1" % self.db_alias)],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
# Update, after which updated element will not suit update conditions
self.django_model.objects.filter(created_date__lt=datetime.date.today()). \
update(created_date=datetime.date.today())
self.assertListEqual([('update', "%s.1" % self.db_alias), ('update', "%s.2" % self.db_alias)],
self.assertListEqual([('update', "%s.%d" % (self.db_alias, item.id)) for item in self.before_op_items],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
def test_qs_update_returning(self):
@ -110,7 +110,7 @@ class TestOperations(TransactionTestCase):
# Update, after which updated element will not suit update conditions
self.django_model.objects.filter(created_date__lt=datetime.date.today()). \
update_returning(created_date=datetime.date.today())
self.assertListEqual([('update', "%s.1" % self.db_alias), ('update', "%s.2" % self.db_alias)],
self.assertListEqual([('update', "%s.%d" % (self.db_alias, item.id)) for item in self.before_op_items],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
def test_qs_delete_returning(self):
@ -118,9 +118,9 @@ class TestOperations(TransactionTestCase):
self.assertListEqual([('delete', "%s.1" % self.db_alias)],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
# Update, после которого исходный фильтр уже не сработает
# Delete, after which updated element will not suit update conditions
self.django_model.objects.filter(created_date__lt=datetime.date.today()).delete_returning()
self.assertListEqual([('delete', "%s.1" % self.db_alias), ('delete', "%s.2" % self.db_alias)],
self.assertListEqual([('delete', "%s.%d" % (self.db_alias, item.id)) for item in self.before_op_items],
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
def test_delete(self):

View File

@ -1,7 +1,5 @@
import datetime
import time
from queue import Queue
from time import gmtime, localtime
import pytz
from django.test import TestCase
@ -12,9 +10,13 @@ from django_clickhouse.utils import get_tz_offset, format_datetime, lazy_class_i
SingletonMeta
class GetTZOffsetTest(TestCase):
def test_func(self):
self.assertEqual(300, get_tz_offset())
def local_dt_str(dt) -> str:
"""
Returns string representation of an aware datetime object, localized by adding system_tz_offset()
:param dt: Datetime to change
:return: Formatted string
"""
return (dt + datetime.timedelta(minutes=get_tz_offset())).strftime('%Y-%m-%d %H:%M:%S')
class FormatDateTimeTest(TestCase):
@ -25,32 +27,33 @@ class FormatDateTimeTest(TestCase):
:param dt: Объект datetime.datetime
:return: Строковый ожидаемый результат
"""
moscow_minute_offset = dt.utcoffset().total_seconds() / 60
zone_h, zone_m = abs(int(moscow_minute_offset / 60)), int(moscow_minute_offset % 60)
minute_offset = dt.utcoffset().total_seconds() / 60
zone_h, zone_m = abs(int(minute_offset / 60)), int(minute_offset % 60)
# +5 за счет времени тестового сервера ClickHouse
return (dt - datetime.timedelta(hours=zone_h - 5, minutes=zone_m)).strftime("%Y-%m-%d %H:%M:%S")
return local_dt_str(dt - datetime.timedelta(hours=zone_h, minutes=zone_m))
def test_conversion(self):
dt = datetime.datetime(2017, 1, 2, 3, 4, 5)
self.assertEqual(format_datetime(dt), '2017-01-02 08:04:05')
self.assertEqual(format_datetime(dt), local_dt_str(dt))
dt = datetime.datetime(2017, 1, 2, 3, 4, 5, tzinfo=pytz.utc)
self.assertEqual(format_datetime(dt), '2017-01-02 08:04:05')
self.assertEqual(format_datetime(dt), local_dt_str(dt))
dt = datetime.datetime(2017, 1, 2, 3, 4, 5, tzinfo=pytz.timezone('Europe/Moscow'))
self.assertEqual(format_datetime(dt), self._get_zone_time(dt))
dt = datetime.datetime(2017, 1, 2, 3, 4, 5, tzinfo=pytz.timezone('Europe/Moscow'))
offset = int(pytz.timezone('Europe/Moscow').utcoffset(dt).total_seconds() / 60)
self.assertEqual(format_datetime(dt, timezone_offset=offset), '2017-01-02 03:04:05')
self.assertEqual(format_datetime(dt, timezone_offset=offset),
local_dt_str(datetime.datetime(2017, 1, 2, 3, 4, 5) - datetime.timedelta(minutes=offset*2)))
def test_date_conversion(self):
dt = datetime.date(2017, 1, 2)
self.assertEqual(format_datetime(dt), '2017-01-02 05:00:00')
self.assertEqual(format_datetime(dt), local_dt_str(datetime.datetime(2017, 1, 2, 0, 0, 0)))
dt = datetime.date(2017, 1, 2)
self.assertEqual(format_datetime(dt, day_end=True), '2017-01-03 04:59:59')
self.assertEqual(format_datetime(dt, day_end=True), local_dt_str(datetime.datetime(2017, 1, 2, 23, 59, 59)))
dt = datetime.date(2017, 1, 2)
self.assertEqual(format_datetime(dt, day_end=True, timezone_offset=60), '2017-01-03 03:59:59')
self.assertEqual(format_datetime(dt, day_end=True, timezone_offset=60),
local_dt_str(datetime.datetime(2017, 1, 2, 22, 59, 59)))
dt = datetime.date(2017, 1, 2)
self.assertEqual(format_datetime(dt, timezone_offset=60), '2017-01-02 04:00:00')
self.assertEqual(format_datetime(dt, timezone_offset=60), local_dt_str(datetime.datetime(2017, 1, 1, 23, 0, 0)))
class TestLazyClassImport(TestCase):