Finished Release v0.9.6

This commit is contained in:
Itai Shirav 2017-08-23 08:15:18 +03:00
commit 4f2c4bae74
26 changed files with 81 additions and 81 deletions

View File

@ -1,6 +1,12 @@
Change Log Change Log
========== ==========
v0.9.6
------
- Fix python3 compatibility (TvoroG)
- Nullable arrays not supported in latest ClickHouse version
- system.parts table no longer includes "replicated" column in latest ClickHouse version
v0.9.5 v0.9.5
------ ------
- Added `QuerySet.paginate()` - Added `QuerySet.paginate()`

View File

@ -3,7 +3,7 @@ prefer-final = false
newest = false newest = false
download-cache = .cache download-cache = .cache
develop = . develop = .
parts = parts =
[project] [project]
name = infi.clickhouse_orm name = infi.clickhouse_orm
@ -29,7 +29,7 @@ homepage = https://github.com/Infinidat/infi.clickhouse_orm
[isolated-python] [isolated-python]
recipe = infi.recipe.python recipe = infi.recipe.python
version = v2.7.9.4 version = v2.7.12.4
[setup.py] [setup.py]
recipe = infi.recipe.template.version recipe = infi.recipe.template.version

View File

@ -1,3 +1,5 @@
from __future__ import unicode_literals
import requests import requests
from collections import namedtuple from collections import namedtuple
from .models import ModelBase from .models import ModelBase
@ -24,11 +26,11 @@ class DatabaseException(Exception):
class Database(object): class Database(object):
''' '''
Database instances connect to a specific ClickHouse database for running queries, Database instances connect to a specific ClickHouse database for running queries,
inserting data and other operations. inserting data and other operations.
''' '''
def __init__(self, db_name, db_url='http://localhost:8123/', def __init__(self, db_name, db_url='http://localhost:8123/',
username=None, password=None, readonly=False, autocreate=True): username=None, password=None, readonly=False, autocreate=True):
''' '''
Initializes a database instance. Unless it's readonly, the database will be Initializes a database instance. Unless it's readonly, the database will be
@ -186,7 +188,7 @@ class Database(object):
- `conditions`: optional SQL conditions (contents of the WHERE clause). - `conditions`: optional SQL conditions (contents of the WHERE clause).
- `settings`: query settings to send as HTTP GET parameters - `settings`: query settings to send as HTTP GET parameters
The result is a namedtuple containing `objects` (list), `number_of_objects`, The result is a namedtuple containing `objects` (list), `number_of_objects`,
`pages_total`, `number` (of the current page), and `page_size`. `pages_total`, `number` (of the current page), and `page_size`.
''' '''
count = self.count(model_class, conditions) count = self.count(model_class, conditions)
@ -214,7 +216,7 @@ class Database(object):
''' '''
Executes schema migrations. Executes schema migrations.
- `migrations_package_name` - fully qualified name of the Python package - `migrations_package_name` - fully qualified name of the Python package
containing the migrations. containing the migrations.
- `up_to` - number of the last migration to apply. - `up_to` - number of the last migration to apply.
''' '''

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
from .utils import comma_join from .utils import comma_join

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
from six import string_types, text_type, binary_type from six import string_types, text_type, binary_type
import datetime import datetime
import pytz import pytz

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
from logging import getLogger from logging import getLogger
from six import with_metaclass from six import with_metaclass
@ -18,7 +19,7 @@ class ModelBase(type):
ad_hoc_model_cache = {} ad_hoc_model_cache = {}
def __new__(cls, name, bases, attrs): def __new__(cls, name, bases, attrs):
new_cls = super(ModelBase, cls).__new__(cls, name, bases, attrs) new_cls = super(ModelBase, cls).__new__(cls, str(name), bases, attrs)
# Collect fields from parent classes # Collect fields from parent classes
base_fields = [] base_fields = []
for base in bases: for base in bases:
@ -76,7 +77,7 @@ class ModelBase(type):
class Model(with_metaclass(ModelBase)): class Model(with_metaclass(ModelBase)):
''' '''
A base class for ORM models. Each model class represent a ClickHouse table. For example: A base class for ORM models. Each model class represent a ClickHouse table. For example:
class CPUStats(Model): class CPUStats(Model):
timestamp = DateTimeField() timestamp = DateTimeField()
cpu_id = UInt16Field() cpu_id = UInt16Field()
@ -123,7 +124,7 @@ class Model(with_metaclass(ModelBase)):
def set_database(self, db): def set_database(self, db):
''' '''
Sets the `Database` that this model instance belongs to. Sets the `Database` that this model instance belongs to.
This is done automatically when the instance is read from the database or written to it. This is done automatically when the instance is read from the database or written to it.
''' '''
# This can not be imported globally due to circular import # This can not be imported globally due to circular import
@ -133,7 +134,7 @@ class Model(with_metaclass(ModelBase)):
def get_database(self): def get_database(self):
''' '''
Gets the `Database` that this model instance belongs to. Gets the `Database` that this model instance belongs to.
Returns `None` unless the instance was read from the database or written to it. Returns `None` unless the instance was read from the database or written to it.
''' '''
return self._database return self._database
@ -214,7 +215,7 @@ class Model(with_metaclass(ModelBase)):
def to_dict(self, include_readonly=True, field_names=None): def to_dict(self, include_readonly=True, field_names=None):
''' '''
Returns the instance's column values as a dict. Returns the instance's column values as a dict.
- `include_readonly`: if false, returns only fields that can be inserted into database. - `include_readonly`: if false, returns only fields that can be inserted into database.
- `field_names`: an iterable of field names to return (optional) - `field_names`: an iterable of field names to return (optional)
''' '''
@ -233,7 +234,7 @@ class Model(with_metaclass(ModelBase)):
''' '''
return QuerySet(cls, database) return QuerySet(cls, database)
class BufferModel(Model): class BufferModel(Model):
@classmethod @classmethod

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import six import six
import pytz import pytz
from copy import copy from copy import copy
@ -167,6 +168,7 @@ class Q(object):
return q return q
@six.python_2_unicode_compatible
class QuerySet(object): class QuerySet(object):
""" """
A queryset is an object that represents a database query using a specific `Model`. A queryset is an object that represents a database query using a specific `Model`.
@ -190,7 +192,6 @@ class QuerySet(object):
""" """
Iterates over the model instances matching this queryset Iterates over the model instances matching this queryset
""" """
print self.as_sql()
return self._database.select(self.as_sql(), self._model_cls) return self._database.select(self.as_sql(), self._model_cls)
def __bool__(self): def __bool__(self):
@ -202,7 +203,7 @@ class QuerySet(object):
def __nonzero__(self): # Python 2 compatibility def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self) return type(self).__bool__(self)
def __unicode__(self): def __str__(self):
return self.as_sql() return self.as_sql()
def __getitem__(self, s): def __getitem__(self, s):
@ -211,7 +212,7 @@ class QuerySet(object):
assert s >= 0, 'negative indexes are not supported' assert s >= 0, 'negative indexes are not supported'
qs = copy(self) qs = copy(self)
qs._limits = (s, 1) qs._limits = (s, 1)
return iter(qs).next() return six.next(iter(qs))
else: else:
# Slice # Slice
assert s.step in (None, 1), 'step is not supported in slices' assert s.step in (None, 1), 'step is not supported in slices'

View File

@ -2,6 +2,7 @@
This file contains system readonly models that can be got from database This file contains system readonly models that can be got from database
https://clickhouse.yandex/reference_en.html#System tables https://clickhouse.yandex/reference_en.html#System tables
""" """
from __future__ import unicode_literals
from six import string_types from six import string_types
from .database import Database from .database import Database
@ -25,7 +26,6 @@ class SystemPart(Model):
engine = StringField() # Name of the table engine, without parameters. engine = StringField() # Name of the table engine, without parameters.
partition = StringField() # Name of the partition, in the format YYYYMM. partition = StringField() # Name of the partition, in the format YYYYMM.
name = StringField() # Name of the part. name = StringField() # Name of the part.
replicated = UInt8Field() # Whether the part belongs to replicated data.
# Whether the part is used in a table, or is no longer needed and will be deleted soon. # Whether the part is used in a table, or is no longer needed and will be deleted soon.
# Inactive parts remain after merging. # Inactive parts remain after merging.

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
from six import string_types, binary_type, text_type, PY3 from six import string_types, binary_type, text_type, PY3
import codecs import codecs
import re import re
@ -43,7 +44,7 @@ def parse_tsv(line):
line = line.decode() line = line.decode()
if line and line[-1] == '\n': if line and line[-1] == '\n':
line = line[:-1] line = line[:-1]
return [unescape(value) for value in line.split('\t')] return [unescape(value) for value in line.split(str('\t'))]
def parse_array(array_string): def parse_array(array_string):

View File

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest import unittest
from infi.clickhouse_orm.database import Database from infi.clickhouse_orm.database import Database

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
from datetime import date from datetime import date
@ -65,5 +66,3 @@ class ModelWithAliasFields(Model):
alias_date = DateField(alias='date_field') alias_date = DateField(alias='date_field')
engine = MergeTree('date_field', ('date_field',)) engine = MergeTree('date_field', ('date_field',))

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
from datetime import date from datetime import date
@ -18,8 +19,8 @@ class ArrayFieldsTest(unittest.TestCase):
def test_insert_and_select(self): def test_insert_and_select(self):
instance = ModelWithArrays( instance = ModelWithArrays(
date_field='2016-08-30', date_field='2016-08-30',
arr_str=['goodbye,', 'cruel', 'world', 'special chars: ,"\\\'` \n\t\\[]'], arr_str=['goodbye,', 'cruel', 'world', 'special chars: ,"\\\'` \n\t\\[]'],
arr_date=['2010-01-01'] arr_date=['2010-01-01']
) )
self.database.insert([instance]) self.database.insert([instance])
@ -52,11 +53,11 @@ class ArrayFieldsTest(unittest.TestCase):
self.assertEquals(parse_array("[1, 2, 395, -44]"), ["1", "2", "395", "-44"]) self.assertEquals(parse_array("[1, 2, 395, -44]"), ["1", "2", "395", "-44"])
self.assertEquals(parse_array("['big','mouse','','!']"), ["big", "mouse", "", "!"]) self.assertEquals(parse_array("['big','mouse','','!']"), ["big", "mouse", "", "!"])
self.assertEquals(parse_array(unescape("['\\r\\n\\0\\t\\b']")), ["\r\n\0\t\b"]) self.assertEquals(parse_array(unescape("['\\r\\n\\0\\t\\b']")), ["\r\n\0\t\b"])
for s in ("", for s in ("",
"[", "[",
"]", "]",
"[1, 2", "[1, 2",
"3, 4]", "3, 4]",
"['aaa', 'aaa]"): "['aaa', 'aaa]"):
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
parse_array(s) parse_array(s)
@ -70,4 +71,3 @@ class ModelWithArrays(Model):
arr_date = ArrayField(DateField()) arr_date = ArrayField(DateField())
engine = MergeTree('date_field', ('date_field',)) engine = MergeTree('date_field', ('date_field',))

View File

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest import unittest
from infi.clickhouse_orm.models import BufferModel from infi.clickhouse_orm.models import BufferModel
@ -23,7 +23,5 @@ class BufferTestCase(TestCaseWithData):
class PersonBuffer(BufferModel, Person): class PersonBuffer(BufferModel, Person):
engine = Buffer(Person)
engine = Buffer(Person)

View File

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest import unittest
from infi.clickhouse_orm.database import Database, DatabaseException from infi.clickhouse_orm.database import Database, DatabaseException
@ -99,7 +99,7 @@ class DatabaseTestCase(TestCaseWithData):
page_a = self.database.paginate(Person, 'first_name, last_name', -1, page_size) page_a = self.database.paginate(Person, 'first_name, last_name', -1, page_size)
page_b = self.database.paginate(Person, 'first_name, last_name', page_a.pages_total, page_size) page_b = self.database.paginate(Person, 'first_name, last_name', page_a.pages_total, page_size)
self.assertEquals(page_a[1:], page_b[1:]) self.assertEquals(page_a[1:], page_b[1:])
self.assertEquals([obj.to_tsv() for obj in page_a.objects], self.assertEquals([obj.to_tsv() for obj in page_a.objects],
[obj.to_tsv() for obj in page_b.objects]) [obj.to_tsv() for obj in page_b.objects])
def test_pagination_invalid_page(self): def test_pagination_invalid_page(self):

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
from infi.clickhouse_orm.database import Database, DatabaseException from infi.clickhouse_orm.database import Database, DatabaseException

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
from infi.clickhouse_orm.database import Database from infi.clickhouse_orm.database import Database

View File

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest import unittest
from infi.clickhouse_orm.database import Database from infi.clickhouse_orm.database import Database
@ -37,7 +37,7 @@ class FixedStringFieldsTest(unittest.TestCase):
query = 'SELECT * from $table ORDER BY date_field' query = 'SELECT * from $table ORDER BY date_field'
results = list(self.database.select(query, FixedStringModel)) results = list(self.database.select(query, FixedStringModel))
self._assert_sample_data(results) self._assert_sample_data(results)
def test_ad_hoc_model(self): def test_ad_hoc_model(self):
self._insert_sample_data() self._insert_sample_data()
query = 'SELECT * from $db.fixedstringmodel ORDER BY date_field' query = 'SELECT * from $db.fixedstringmodel ORDER BY date_field'

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
import datetime import datetime
import pytz import pytz
@ -49,4 +50,3 @@ class Model1(ParentModel):
class Model2(ParentModel): class Model2(ParentModel):
float_field = Float32Field() float_field = Float32Field()

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
from datetime import date from datetime import date
@ -65,5 +66,3 @@ class ModelWithMaterializedFields(Model):
mat_date = DateField(materialized=u'toDate(date_time_field)') mat_date = DateField(materialized=u'toDate(date_time_field)')
engine = MergeTree('mat_date', ('mat_date',)) engine = MergeTree('mat_date', ('mat_date',))

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
from infi.clickhouse_orm.database import Database from infi.clickhouse_orm.database import Database
@ -54,11 +55,11 @@ class MigrationsTestCase(unittest.TestCase):
# Altering enum fields # Altering enum fields
self.database.migrate('tests.sample_migrations', 6) self.database.migrate('tests.sample_migrations', 6)
self.assertTrue(self.tableExists(EnumModel1)) self.assertTrue(self.tableExists(EnumModel1))
self.assertEquals(self.getTableFields(EnumModel1), self.assertEquals(self.getTableFields(EnumModel1),
[('date', 'Date'), ('f1', "Enum8('dog' = 1, 'cat' = 2, 'cow' = 3)")]) [('date', 'Date'), ('f1', "Enum8('dog' = 1, 'cat' = 2, 'cow' = 3)")])
self.database.migrate('tests.sample_migrations', 7) self.database.migrate('tests.sample_migrations', 7)
self.assertTrue(self.tableExists(EnumModel1)) self.assertTrue(self.tableExists(EnumModel1))
self.assertEquals(self.getTableFields(EnumModel2), self.assertEquals(self.getTableFields(EnumModel2),
[('date', 'Date'), ('f1', "Enum16('dog' = 1, 'cat' = 2, 'horse' = 3, 'pig' = 4)")]) [('date', 'Date'), ('f1', "Enum16('dog' = 1, 'cat' = 2, 'horse' = 3, 'pig' = 4)")])
self.database.migrate('tests.sample_migrations', 8) self.database.migrate('tests.sample_migrations', 8)
self.assertTrue(self.tableExists(MaterializedModel)) self.assertTrue(self.tableExists(MaterializedModel))
@ -157,4 +158,4 @@ class AliasModel(Model):
@classmethod @classmethod
def table_name(cls): def table_name(cls):
return 'alias_date' return 'alias_date'

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
import datetime import datetime
import pytz import pytz
@ -89,4 +90,3 @@ class SimpleModel(Model):
alias_field = Float32Field(alias='float_field') alias_field = Float32Field(alias='float_field')
engine = MergeTree('date_field', ('int_field', 'date_field')) engine = MergeTree('date_field', ('int_field', 'date_field'))

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
import pytz import pytz
@ -75,18 +76,10 @@ class NullableFieldsTest(unittest.TestCase):
def _insert_sample_data(self): def _insert_sample_data(self):
dt = date(1970, 1, 1) dt = date(1970, 1, 1)
self.database.insert([ self.database.insert([
ModelWithNullable(date_field='2016-08-30', ModelWithNullable(date_field='2016-08-30', null_str='', null_int=42, null_date=dt),
null_str='', null_int=42, null_date=dt, ModelWithNullable(date_field='2016-08-30', null_str='nothing', null_int=None, null_date=None),
null_array=None), ModelWithNullable(date_field='2016-08-31', null_str=None, null_int=42, null_date=dt),
ModelWithNullable(date_field='2016-08-30', ModelWithNullable(date_field='2016-08-31', null_str=None, null_int=None, null_date=None)
null_str='nothing', null_int=None, null_date=None,
null_array=[1, 2, 3]),
ModelWithNullable(date_field='2016-08-31',
null_str=None, null_int=42, null_date=dt,
null_array=[]),
ModelWithNullable(date_field='2016-08-31',
null_str=None, null_int=None, null_date=None,
null_array=[3, 2, 1])
]) ])
def _assert_sample_data(self, results): def _assert_sample_data(self, results):
@ -105,11 +98,6 @@ class NullableFieldsTest(unittest.TestCase):
self.assertIsNone(results[3].null_str) self.assertIsNone(results[3].null_str)
self.assertIsNone(results[3].null_date) self.assertIsNone(results[3].null_date)
self.assertIsNone(results[0].null_array)
self.assertEquals(results[1].null_array, [1, 2, 3])
self.assertEquals(results[2].null_array, [])
self.assertEquals(results[3].null_array, [3, 2, 1])
def test_insert_and_select(self): def test_insert_and_select(self):
self._insert_sample_data() self._insert_sample_data()
query = 'SELECT * from $table ORDER BY date_field' query = 'SELECT * from $table ORDER BY date_field'
@ -129,6 +117,5 @@ class ModelWithNullable(Model):
null_str = NullableField(StringField(), extra_null_values={''}) null_str = NullableField(StringField(), extra_null_values={''})
null_int = NullableField(Int32Field()) null_int = NullableField(Int32Field())
null_date = NullableField(DateField()) null_date = NullableField(DateField())
null_array = NullableField(ArrayField(Int32Field()))
engine = MergeTree('date_field', ('date_field',)) engine = MergeTree('date_field', ('date_field',))

View File

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import unittest import unittest
from infi.clickhouse_orm.database import Database from infi.clickhouse_orm.database import Database
@ -140,7 +140,7 @@ class QuerySetTestCase(TestCaseWithData):
def test_slicing(self): def test_slicing(self):
db = Database('system') db = Database('system')
numbers = range(100) numbers = list(range(100))
qs = Numbers.objects_in(db) qs = Numbers.objects_in(db)
self.assertEquals(qs[0].number, numbers[0]) self.assertEquals(qs[0].number, numbers[0])
self.assertEquals(qs[5].number, numbers[5]) self.assertEquals(qs[5].number, numbers[5])
@ -211,7 +211,7 @@ class AggregateTestCase(TestCaseWithData):
def test_aggregate_no_grouping(self): def test_aggregate_no_grouping(self):
qs = Person.objects_in(self.database).aggregate(average_height='avg(height)', count='count()') qs = Person.objects_in(self.database).aggregate(average_height='avg(height)', count='count()')
print qs.as_sql() print(qs.as_sql())
self.assertEquals(qs.count(), 1) self.assertEquals(qs.count(), 1)
for row in qs: for row in qs:
self.assertAlmostEqual(row.average_height, 1.6923, places=4) self.assertAlmostEqual(row.average_height, 1.6923, places=4)
@ -220,14 +220,14 @@ class AggregateTestCase(TestCaseWithData):
def test_aggregate_with_filter(self): def test_aggregate_with_filter(self):
# When filter comes before aggregate # When filter comes before aggregate
qs = Person.objects_in(self.database).filter(first_name='Warren').aggregate(average_height='avg(height)', count='count()') qs = Person.objects_in(self.database).filter(first_name='Warren').aggregate(average_height='avg(height)', count='count()')
print qs.as_sql() print(qs.as_sql())
self.assertEquals(qs.count(), 1) self.assertEquals(qs.count(), 1)
for row in qs: for row in qs:
self.assertAlmostEqual(row.average_height, 1.675, places=4) self.assertAlmostEqual(row.average_height, 1.675, places=4)
self.assertEquals(row.count, 2) self.assertEquals(row.count, 2)
# When filter comes after aggregate # When filter comes after aggregate
qs = Person.objects_in(self.database).aggregate(average_height='avg(height)', count='count()').filter(first_name='Warren') qs = Person.objects_in(self.database).aggregate(average_height='avg(height)', count='count()').filter(first_name='Warren')
print qs.as_sql() print(qs.as_sql())
self.assertEquals(qs.count(), 1) self.assertEquals(qs.count(), 1)
for row in qs: for row in qs:
self.assertAlmostEqual(row.average_height, 1.675, places=4) self.assertAlmostEqual(row.average_height, 1.675, places=4)
@ -235,7 +235,7 @@ class AggregateTestCase(TestCaseWithData):
def test_aggregate_with_implicit_grouping(self): def test_aggregate_with_implicit_grouping(self):
qs = Person.objects_in(self.database).aggregate('first_name', average_height='avg(height)', count='count()') qs = Person.objects_in(self.database).aggregate('first_name', average_height='avg(height)', count='count()')
print qs.as_sql() print(qs.as_sql())
self.assertEquals(qs.count(), 94) self.assertEquals(qs.count(), 94)
total = 0 total = 0
for row in qs: for row in qs:
@ -246,7 +246,7 @@ class AggregateTestCase(TestCaseWithData):
def test_aggregate_with_explicit_grouping(self): def test_aggregate_with_explicit_grouping(self):
qs = Person.objects_in(self.database).aggregate(weekday='toDayOfWeek(birthday)', count='count()').group_by('weekday') qs = Person.objects_in(self.database).aggregate(weekday='toDayOfWeek(birthday)', count='count()').group_by('weekday')
print qs.as_sql() print(qs.as_sql())
self.assertEquals(qs.count(), 7) self.assertEquals(qs.count(), 7)
total = 0 total = 0
for row in qs: for row in qs:
@ -256,7 +256,7 @@ class AggregateTestCase(TestCaseWithData):
def test_aggregate_with_order_by(self): def test_aggregate_with_order_by(self):
qs = Person.objects_in(self.database).aggregate(weekday='toDayOfWeek(birthday)', count='count()').group_by('weekday') qs = Person.objects_in(self.database).aggregate(weekday='toDayOfWeek(birthday)', count='count()').group_by('weekday')
days = [row.weekday for row in qs.order_by('weekday')] days = [row.weekday for row in qs.order_by('weekday')]
self.assertEquals(days, range(1, 8)) self.assertEquals(days, list(range(1, 8)))
def test_aggregate_with_indexing(self): def test_aggregate_with_indexing(self):
qs = Person.objects_in(self.database).aggregate(weekday='toDayOfWeek(birthday)', count='count()').group_by('weekday') qs = Person.objects_in(self.database).aggregate(weekday='toDayOfWeek(birthday)', count='count()').group_by('weekday')
@ -326,4 +326,4 @@ class SampleModel(Model):
class Numbers(Model): class Numbers(Model):
number = UInt64Field() number = UInt64Field()

View File

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest import unittest
import six import six

View File

@ -1,3 +1,4 @@
from __future__ import unicode_literals
import unittest import unittest
from infi.clickhouse_orm.fields import * from infi.clickhouse_orm.fields import *
from datetime import date, datetime from datetime import date, datetime
@ -10,7 +11,7 @@ class SimpleFieldsTest(unittest.TestCase):
f = DateTimeField() f = DateTimeField()
epoch = datetime(1970, 1, 1, tzinfo=pytz.utc) epoch = datetime(1970, 1, 1, tzinfo=pytz.utc)
# Valid values # Valid values
for value in (date(1970, 1, 1), datetime(1970, 1, 1), epoch, for value in (date(1970, 1, 1), datetime(1970, 1, 1), epoch,
epoch.astimezone(pytz.timezone('US/Eastern')), epoch.astimezone(pytz.timezone('Asia/Jerusalem')), epoch.astimezone(pytz.timezone('US/Eastern')), epoch.astimezone(pytz.timezone('Asia/Jerusalem')),
'1970-01-01 00:00:00', '1970-01-17 00:00:17', '0000-00-00 00:00:00', 0): '1970-01-01 00:00:00', '1970-01-17 00:00:17', '0000-00-00 00:00:00', 0):
dt = f.to_python(value, pytz.utc) dt = f.to_python(value, pytz.utc)
@ -60,4 +61,4 @@ class SimpleFieldsTest(unittest.TestCase):
# Range check # Range check
for value in (-1, 1000): for value in (-1, 1000):
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
f.validate(value) f.validate(value)

View File

@ -1,6 +1,6 @@
from __future__ import unicode_literals
import unittest import unittest
from datetime import date from datetime import date
import os import os
import shutil import shutil
from infi.clickhouse_orm.database import Database from infi.clickhouse_orm.database import Database
@ -11,7 +11,8 @@ from infi.clickhouse_orm.system_models import SystemPart
class SystemPartTest(unittest.TestCase): class SystemPartTest(unittest.TestCase):
BACKUP_DIR = '/opt/clickhouse/shadow/'
BACKUP_DIRS = ['/var/lib/clickhouse/shadow', '/opt/clickhouse/shadow/']
def setUp(self): def setUp(self):
self.database = Database('test-db') self.database = Database('test-db')
@ -22,10 +23,11 @@ class SystemPartTest(unittest.TestCase):
self.database.drop_database() self.database.drop_database()
def _get_backups(self): def _get_backups(self):
if not os.path.exists(self.BACKUP_DIR): for dir in self.BACKUP_DIRS:
return [] if os.path.exists(dir):
_, dirnames, _ = next(os.walk(self.BACKUP_DIR)) _, dirnames, _ = next(os.walk(dir))
return dirnames return dirnames
raise unittest.SkipTest('Cannot find backups dir')
def test_get_all(self): def test_get_all(self):
parts = SystemPart.get(self.database) parts = SystemPart.get(self.database)
@ -63,8 +65,6 @@ class SystemPartTest(unittest.TestCase):
parts[0].freeze() parts[0].freeze()
backups = set(self._get_backups()) backups = set(self._get_backups())
self.assertEqual(len(backups), len(prev_backups) + 1) self.assertEqual(len(backups), len(prev_backups) + 1)
# Clean created backup
shutil.rmtree(self.BACKUP_DIR + '{0}'.format(list(backups - prev_backups)[0]))
def test_fetch(self): def test_fetch(self):
# TODO Not tested, as I have no replication set # TODO Not tested, as I have no replication set