Compare commits

..

No commits in common. "develop" and "v2.1.0" have entirely different histories.

13 changed files with 39 additions and 100 deletions

View File

@ -1,22 +1,6 @@
Change Log
==========
v2.1.3
------
- Fix pagination for models with alias columns
v2.1.2
------
- Add `QuerySet.model` to support django-rest-framework 3
v2.1.1
------
- Improve support of ClickHouse v21.9 (mangototango)
- Ignore non-numeric parts in ClickHouse version (mangototango)
- Fix precedence of ~ operator in Q objects (mangototango)
- Support for adding a column to the beginning of a table (meanmail)
- Add stddevPop and stddevSamp functions (k.peskov)
v2.1.0
------
- Support for model constraints

View File

@ -30,7 +30,7 @@ homepage = https://github.com/Infinidat/infi.clickhouse_orm
[isolated-python]
recipe = infi.recipe.python
version = v3.8.12
version = v3.8.0.2
[setup.py]
recipe = infi.recipe.template.version

View File

@ -6,10 +6,10 @@ idna==2.9
infi.clickhouse-orm==2.0.1
iso8601==0.1.12
itsdangerous==1.1.0
Jinja2==2.11.3
Jinja2==2.11.2
MarkupSafe==1.1.1
pygal==2.4.0
pytz==2020.1
requests==2.23.0
urllib3==1.26.5
urllib3==1.25.9
Werkzeug==1.0.1

View File

@ -51,11 +51,6 @@ class ServerError(DatabaseException):
Code:\ (?P<code>\d+),
\ e\.displayText\(\)\ =\ (?P<type1>[^ \n]+):\ (?P<msg>.+)
''', re.VERBOSE | re.DOTALL),
# ClickHouse v21+
re.compile(r'''
Code:\ (?P<code>\d+).
\ (?P<type1>[^ \n]+):\ (?P<msg>.+)
''', re.VERBOSE | re.DOTALL),
)
@classmethod
@ -319,8 +314,7 @@ class Database(object):
elif page_num < 1:
raise ValueError('Invalid page number: %d' % page_num)
offset = (page_num - 1) * page_size
query = 'SELECT {} FROM $table'.format(", ".join(model_class.fields().keys()))
query = 'SELECT * FROM $table'
if conditions:
if isinstance(conditions, Q):
conditions = conditions.to_sql(model_class)
@ -414,7 +408,7 @@ class Database(object):
except ServerError as e:
logger.exception('Cannot determine server version (%s), assuming 1.1.0', e)
ver = '1.1.0'
return tuple(int(n) for n in ver.split('.') if n.isdigit()) if as_tuple else ver
return tuple(int(n) for n in ver.split('.')) if as_tuple else ver
def _is_existing_database(self):
r = self._send("SELECT count() FROM system.databases WHERE name = '%s'" % self.db_name)

View File

@ -109,7 +109,7 @@ class Field(FunctionOperatorsMixin):
elif self.default:
default = self.to_db_string(self.default)
sql += ' DEFAULT %s' % default
if self.codec and db and db.has_codec_support and not self.alias:
if self.codec and db and db.has_codec_support:
sql += ' CODEC(%s)' % self.codec
return sql
@ -468,16 +468,9 @@ class BaseEnumField(Field):
return value
try:
if isinstance(value, str):
try:
return self.enum_cls[value]
except Exception:
return self.enum_cls(value)
return self.enum_cls[value]
if isinstance(value, bytes):
decoded = value.decode('UTF-8')
try:
return self.enum_cls[decoded]
except Exception:
return self.enum_cls(decoded)
return self.enum_cls[value.decode('UTF-8')]
if isinstance(value, int):
return self.enum_cls(value)
except (KeyError, ValueError):
@ -672,3 +665,4 @@ class LowCardinalityField(Field):
# Expose only relevant classes in import *
__all__ = get_subclass_names(locals(), Field)

View File

@ -391,11 +391,11 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
return F('toYear', d)
@staticmethod
def toISOYear(d, timezone=NO_VALUE):
def toISOYear(d, timezone=''):
return F('toISOYear', d, timezone)
@staticmethod
def toQuarter(d, timezone=NO_VALUE):
def toQuarter(d, timezone=''):
return F('toQuarter', d, timezone) if timezone else F('toQuarter', d)
@staticmethod
@ -403,11 +403,11 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
return F('toMonth', d)
@staticmethod
def toWeek(d, mode=0, timezone=NO_VALUE):
def toWeek(d, mode=0, timezone=''):
return F('toWeek', d, mode, timezone)
@staticmethod
def toISOWeek(d, timezone=NO_VALUE):
def toISOWeek(d, timezone=''):
return F('toISOWeek', d, timezone) if timezone else F('toISOWeek', d)
@staticmethod
@ -483,7 +483,7 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
return F('toStartOfDay', d)
@staticmethod
def toTime(d, timezone=NO_VALUE):
def toTime(d, timezone=''):
return F('toTime', d, timezone)
@staticmethod
@ -491,47 +491,47 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
return F('toTimeZone', dt, timezone)
@staticmethod
def toUnixTimestamp(dt, timezone=NO_VALUE):
def toUnixTimestamp(dt, timezone=''):
return F('toUnixTimestamp', dt, timezone)
@staticmethod
def toYYYYMM(dt, timezone=NO_VALUE):
def toYYYYMM(dt, timezone=''):
return F('toYYYYMM', dt, timezone) if timezone else F('toYYYYMM', dt)
@staticmethod
def toYYYYMMDD(dt, timezone=NO_VALUE):
def toYYYYMMDD(dt, timezone=''):
return F('toYYYYMMDD', dt, timezone) if timezone else F('toYYYYMMDD', dt)
@staticmethod
def toYYYYMMDDhhmmss(dt, timezone=NO_VALUE):
def toYYYYMMDDhhmmss(dt, timezone=''):
return F('toYYYYMMDDhhmmss', dt, timezone) if timezone else F('toYYYYMMDDhhmmss', dt)
@staticmethod
def toRelativeYearNum(d, timezone=NO_VALUE):
def toRelativeYearNum(d, timezone=''):
return F('toRelativeYearNum', d, timezone)
@staticmethod
def toRelativeMonthNum(d, timezone=NO_VALUE):
def toRelativeMonthNum(d, timezone=''):
return F('toRelativeMonthNum', d, timezone)
@staticmethod
def toRelativeWeekNum(d, timezone=NO_VALUE):
def toRelativeWeekNum(d, timezone=''):
return F('toRelativeWeekNum', d, timezone)
@staticmethod
def toRelativeDayNum(d, timezone=NO_VALUE):
def toRelativeDayNum(d, timezone=''):
return F('toRelativeDayNum', d, timezone)
@staticmethod
def toRelativeHourNum(d, timezone=NO_VALUE):
def toRelativeHourNum(d, timezone=''):
return F('toRelativeHourNum', d, timezone)
@staticmethod
def toRelativeMinuteNum(d, timezone=NO_VALUE):
def toRelativeMinuteNum(d, timezone=''):
return F('toRelativeMinuteNum', d, timezone)
@staticmethod
def toRelativeSecondNum(d, timezone=NO_VALUE):
def toRelativeSecondNum(d, timezone=''):
return F('toRelativeSecondNum', d, timezone)
@staticmethod
@ -555,7 +555,7 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
return F('timeSlots', start_time, F.toUInt32(duration))
@staticmethod
def formatDateTime(d, format, timezone=NO_VALUE):
def formatDateTime(d, format, timezone=''):
return F('formatDateTime', d, format, timezone)
@staticmethod
@ -1633,16 +1633,6 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
def varSamp(x):
return F('varSamp', x)
@staticmethod
@aggregate
def stddevPop(expr):
return F('stddevPop', expr)
@staticmethod
@aggregate
def stddevSamp(expr):
return F('stddevSamp', expr)
@staticmethod
@aggregate
@parametric

View File

@ -83,12 +83,10 @@ class AlterTable(ModelOperation):
is_regular_field = not (field.materialized or field.alias)
if name not in table_fields:
logger.info(' Add column %s', name)
assert prev_name, 'Cannot add a column to the beginning of the table'
cmd = 'ADD COLUMN %s %s' % (name, field.get_sql(db=database))
if is_regular_field:
if prev_name:
cmd += ' AFTER %s' % prev_name
else:
cmd += ' FIRST'
cmd += ' AFTER %s' % prev_name
self._alter_table(database, cmd)
if is_regular_field:

View File

@ -209,10 +209,10 @@ class Q(object):
@classmethod
def _construct_from(cls, l_child, r_child, mode):
if mode == l_child._mode and not l_child._negate:
if mode == l_child._mode:
q = deepcopy(l_child)
q._children.append(deepcopy(r_child))
elif mode == r_child._mode and not r_child._negate:
elif mode == r_child._mode:
q = deepcopy(r_child)
q._children.append(deepcopy(l_child))
else:
@ -292,7 +292,6 @@ class QuerySet(object):
Initializer. It is possible to create a queryset like this, but the standard
way is to use `MyModel.objects_in(database)`.
"""
self.model = model_cls
self._model_cls = model_cls
self._database = database
self._order_by = []

View File

@ -106,7 +106,7 @@ class CompressedFieldsTestCase(unittest.TestCase):
('nullable_field', 'CODEC(ZSTD(1))'),
('array_field', 'CODEC(Delta(2), LZ4HC(0))'),
('float_field', 'CODEC(NONE)'),
('alias_field', '')])
('alias_field', 'CODEC(ZSTD(4))')])
class CompressedModel(Model):
@ -120,4 +120,4 @@ class CompressedModel(Model):
float_field = Float32Field(codec='NONE')
alias_field = Float32Field(alias='float_field', codec='ZSTD(4)')
engine = MergeTree('datetime_field', ('uint64_field', 'datetime_field'))
engine = MergeTree('datetime_field', ('uint64_field', 'datetime_field'))

View File

@ -181,13 +181,12 @@ class DatabaseTestCase(TestCaseWithData):
Database(self.database.db_name, username='default', password='wrong')
exc = cm.exception
print(exc.code, exc.message)
if exc.code == 193: # ClickHouse version < 20.3
self.assertTrue(exc.message.startswith('Wrong password for user default'))
elif exc.code == 516: # ClickHouse version >= 20.3
self.assertTrue(exc.message.startswith('default: Authentication failed'))
else:
raise Exception('Unexpected error code - %s %s' % (exc.code, exc.message))
raise Exception('Unexpected error code - %s' % exc.code)
def test_nonexisting_db(self):
db = Database('db_not_here', autocreate=False)
@ -252,8 +251,6 @@ class DatabaseTestCase(TestCaseWithData):
from infi.clickhouse_orm.models import ModelBase
query = "SELECT DISTINCT type FROM system.columns"
for row in self.database.select(query):
if row.type.startswith('Map'):
continue # Not supported yet
ModelBase.create_ad_hoc_field(row.type)
def test_get_model_for_table(self):
@ -274,12 +271,7 @@ class DatabaseTestCase(TestCaseWithData):
query = "SELECT name FROM system.tables WHERE database='system'"
for row in self.database.select(query):
print(row.name)
if row.name in ('distributed_ddl_queue',):
continue # Not supported
try:
model = self.database.get_model_for_table(row.name, system_table=True)
except NotImplementedError:
continue # Table contains an unsupported field type
model = self.database.get_model_for_table(row.name, system_table=True)
self.assertTrue(model.is_system_model())
self.assertTrue(model.is_read_only())
self.assertEqual(model.table_name(), row.name)

View File

@ -105,7 +105,7 @@ class HierarchicalDictionaryTest(DictionaryTestMixin, unittest.TestCase):
def test_dictgethierarchy(self):
self._test_func(F.dictGetHierarchy(self.dict_name, F.toUInt64(3)), [3, 2, 1])
self._test_func(F.dictGetHierarchy(self.dict_name, F.toUInt64(99)), [])
self._test_func(F.dictGetHierarchy(self.dict_name, F.toUInt64(99)), [99])
def test_dictisin(self):
self._test_func(F.dictIsIn(self.dict_name, F.toUInt64(3), F.toUInt64(1)), 1)

View File

@ -17,10 +17,10 @@ class _EnginesHelperTestCase(unittest.TestCase):
class EnginesTestCase(_EnginesHelperTestCase):
def _create_and_insert(self, model_class, **kwargs):
def _create_and_insert(self, model_class):
self.database.create_table(model_class)
self.database.insert([
model_class(date='2017-01-01', event_id=23423, event_group=13, event_count=7, event_version=1, **kwargs)
model_class(date='2017-01-01', event_id=23423, event_group=13, event_count=7, event_version=1)
])
def test_merge_tree(self):
@ -155,7 +155,7 @@ class EnginesTestCase(_EnginesHelperTestCase):
)
self._create_and_insert(TestModel)
self._create_and_insert(TestCollapseModel, sign=1)
self._create_and_insert(TestCollapseModel)
# Result order may be different, lets sort manually
parts = sorted(list(SystemPart.get(self.database)), key=lambda x: x.table)
@ -188,7 +188,7 @@ class EnginesTestCase(_EnginesHelperTestCase):
)
self._create_and_insert(TestModel)
self._create_and_insert(TestCollapseModel, sign=1)
self._create_and_insert(TestCollapseModel)
self.assertEqual(2, len(list(SystemPart.get(self.database))))

View File

@ -302,18 +302,6 @@ class QuerySetTestCase(TestCaseWithData):
self.assertEqual(qs.conditions_as_sql(),
"(first_name = 'a') AND (greater(`height`, 1.7)) AND (last_name = 'b')")
def test_precedence_of_negation(self):
p = ~Q(first_name='a')
q = Q(last_name='b')
r = p & q
self.assertEqual(r.to_sql(Person), "(last_name = 'b') AND (NOT (first_name = 'a'))")
r = q & p
self.assertEqual(r.to_sql(Person), "(last_name = 'b') AND (NOT (first_name = 'a'))")
r = q | p
self.assertEqual(r.to_sql(Person), "(last_name = 'b') OR (NOT (first_name = 'a'))")
r = ~q & p
self.assertEqual(r.to_sql(Person), "(NOT (last_name = 'b')) AND (NOT (first_name = 'a'))")
def test_invalid_filter(self):
qs = Person.objects_in(self.database)
with self.assertRaises(TypeError):