mirror of
https://github.com/Infinidat/infi.clickhouse_orm.git
synced 2024-11-21 16:46:33 +03:00
Finished Release v2.1.1
* develop: Releasing v2.1.1 fix precedence of ~ operator in Q objects ignore non-numeric parts of version string Fixes to make the tests pass on ClickHouse v21.9 Bump urllib3 from 1.25.9 to 1.26.5 in /examples/db_explorer Bump jinja2 from 2.11.2 to 2.11.3 in /examples/db_explorer Simplified Support for adding a column to the beginning of a table 1. add stddevPop func 2. add stddevSamp func changes reverted after rebase initializing changes related to string enums for pull request
This commit is contained in:
commit
232a8d29ad
|
@ -1,6 +1,14 @@
|
|||
Change Log
|
||||
==========
|
||||
|
||||
v2.1.1
|
||||
------
|
||||
- Improve support of ClickHouse v21.9 (mangototango)
|
||||
- Ignore non-numeric parts in ClickHouse version (mangototango)
|
||||
- Fix precedence of ~ operator in Q objects (mangototango)
|
||||
- Support for adding a column to the beginning of a table (meanmail)
|
||||
- Add stddevPop and stddevSamp functions (k.peskov)
|
||||
|
||||
v2.1.0
|
||||
------
|
||||
- Support for model constraints
|
||||
|
|
|
@ -6,10 +6,10 @@ idna==2.9
|
|||
infi.clickhouse-orm==2.0.1
|
||||
iso8601==0.1.12
|
||||
itsdangerous==1.1.0
|
||||
Jinja2==2.11.2
|
||||
Jinja2==2.11.3
|
||||
MarkupSafe==1.1.1
|
||||
pygal==2.4.0
|
||||
pytz==2020.1
|
||||
requests==2.23.0
|
||||
urllib3==1.25.9
|
||||
urllib3==1.26.5
|
||||
Werkzeug==1.0.1
|
||||
|
|
|
@ -51,6 +51,11 @@ class ServerError(DatabaseException):
|
|||
Code:\ (?P<code>\d+),
|
||||
\ e\.displayText\(\)\ =\ (?P<type1>[^ \n]+):\ (?P<msg>.+)
|
||||
''', re.VERBOSE | re.DOTALL),
|
||||
# ClickHouse v21+
|
||||
re.compile(r'''
|
||||
Code:\ (?P<code>\d+).
|
||||
\ (?P<type1>[^ \n]+):\ (?P<msg>.+)
|
||||
''', re.VERBOSE | re.DOTALL),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -408,7 +413,7 @@ class Database(object):
|
|||
except ServerError as e:
|
||||
logger.exception('Cannot determine server version (%s), assuming 1.1.0', e)
|
||||
ver = '1.1.0'
|
||||
return tuple(int(n) for n in ver.split('.')) if as_tuple else ver
|
||||
return tuple(int(n) for n in ver.split('.') if n.isdigit()) if as_tuple else ver
|
||||
|
||||
def _is_existing_database(self):
|
||||
r = self._send("SELECT count() FROM system.databases WHERE name = '%s'" % self.db_name)
|
||||
|
|
|
@ -109,7 +109,7 @@ class Field(FunctionOperatorsMixin):
|
|||
elif self.default:
|
||||
default = self.to_db_string(self.default)
|
||||
sql += ' DEFAULT %s' % default
|
||||
if self.codec and db and db.has_codec_support:
|
||||
if self.codec and db and db.has_codec_support and not self.alias:
|
||||
sql += ' CODEC(%s)' % self.codec
|
||||
return sql
|
||||
|
||||
|
@ -468,9 +468,16 @@ class BaseEnumField(Field):
|
|||
return value
|
||||
try:
|
||||
if isinstance(value, str):
|
||||
return self.enum_cls[value]
|
||||
try:
|
||||
return self.enum_cls[value]
|
||||
except Exception:
|
||||
return self.enum_cls(value)
|
||||
if isinstance(value, bytes):
|
||||
return self.enum_cls[value.decode('UTF-8')]
|
||||
decoded = value.decode('UTF-8')
|
||||
try:
|
||||
return self.enum_cls[decoded]
|
||||
except Exception:
|
||||
return self.enum_cls(decoded)
|
||||
if isinstance(value, int):
|
||||
return self.enum_cls(value)
|
||||
except (KeyError, ValueError):
|
||||
|
@ -665,4 +672,3 @@ class LowCardinalityField(Field):
|
|||
|
||||
# Expose only relevant classes in import *
|
||||
__all__ = get_subclass_names(locals(), Field)
|
||||
|
||||
|
|
|
@ -391,11 +391,11 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
|
|||
return F('toYear', d)
|
||||
|
||||
@staticmethod
|
||||
def toISOYear(d, timezone=''):
|
||||
def toISOYear(d, timezone=NO_VALUE):
|
||||
return F('toISOYear', d, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toQuarter(d, timezone=''):
|
||||
def toQuarter(d, timezone=NO_VALUE):
|
||||
return F('toQuarter', d, timezone) if timezone else F('toQuarter', d)
|
||||
|
||||
@staticmethod
|
||||
|
@ -403,11 +403,11 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
|
|||
return F('toMonth', d)
|
||||
|
||||
@staticmethod
|
||||
def toWeek(d, mode=0, timezone=''):
|
||||
def toWeek(d, mode=0, timezone=NO_VALUE):
|
||||
return F('toWeek', d, mode, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toISOWeek(d, timezone=''):
|
||||
def toISOWeek(d, timezone=NO_VALUE):
|
||||
return F('toISOWeek', d, timezone) if timezone else F('toISOWeek', d)
|
||||
|
||||
@staticmethod
|
||||
|
@ -483,7 +483,7 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
|
|||
return F('toStartOfDay', d)
|
||||
|
||||
@staticmethod
|
||||
def toTime(d, timezone=''):
|
||||
def toTime(d, timezone=NO_VALUE):
|
||||
return F('toTime', d, timezone)
|
||||
|
||||
@staticmethod
|
||||
|
@ -491,47 +491,47 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
|
|||
return F('toTimeZone', dt, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toUnixTimestamp(dt, timezone=''):
|
||||
def toUnixTimestamp(dt, timezone=NO_VALUE):
|
||||
return F('toUnixTimestamp', dt, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toYYYYMM(dt, timezone=''):
|
||||
def toYYYYMM(dt, timezone=NO_VALUE):
|
||||
return F('toYYYYMM', dt, timezone) if timezone else F('toYYYYMM', dt)
|
||||
|
||||
@staticmethod
|
||||
def toYYYYMMDD(dt, timezone=''):
|
||||
def toYYYYMMDD(dt, timezone=NO_VALUE):
|
||||
return F('toYYYYMMDD', dt, timezone) if timezone else F('toYYYYMMDD', dt)
|
||||
|
||||
@staticmethod
|
||||
def toYYYYMMDDhhmmss(dt, timezone=''):
|
||||
def toYYYYMMDDhhmmss(dt, timezone=NO_VALUE):
|
||||
return F('toYYYYMMDDhhmmss', dt, timezone) if timezone else F('toYYYYMMDDhhmmss', dt)
|
||||
|
||||
@staticmethod
|
||||
def toRelativeYearNum(d, timezone=''):
|
||||
def toRelativeYearNum(d, timezone=NO_VALUE):
|
||||
return F('toRelativeYearNum', d, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toRelativeMonthNum(d, timezone=''):
|
||||
def toRelativeMonthNum(d, timezone=NO_VALUE):
|
||||
return F('toRelativeMonthNum', d, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toRelativeWeekNum(d, timezone=''):
|
||||
def toRelativeWeekNum(d, timezone=NO_VALUE):
|
||||
return F('toRelativeWeekNum', d, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toRelativeDayNum(d, timezone=''):
|
||||
def toRelativeDayNum(d, timezone=NO_VALUE):
|
||||
return F('toRelativeDayNum', d, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toRelativeHourNum(d, timezone=''):
|
||||
def toRelativeHourNum(d, timezone=NO_VALUE):
|
||||
return F('toRelativeHourNum', d, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toRelativeMinuteNum(d, timezone=''):
|
||||
def toRelativeMinuteNum(d, timezone=NO_VALUE):
|
||||
return F('toRelativeMinuteNum', d, timezone)
|
||||
|
||||
@staticmethod
|
||||
def toRelativeSecondNum(d, timezone=''):
|
||||
def toRelativeSecondNum(d, timezone=NO_VALUE):
|
||||
return F('toRelativeSecondNum', d, timezone)
|
||||
|
||||
@staticmethod
|
||||
|
@ -555,7 +555,7 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
|
|||
return F('timeSlots', start_time, F.toUInt32(duration))
|
||||
|
||||
@staticmethod
|
||||
def formatDateTime(d, format, timezone=''):
|
||||
def formatDateTime(d, format, timezone=NO_VALUE):
|
||||
return F('formatDateTime', d, format, timezone)
|
||||
|
||||
@staticmethod
|
||||
|
@ -1633,6 +1633,16 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
|
|||
def varSamp(x):
|
||||
return F('varSamp', x)
|
||||
|
||||
@staticmethod
|
||||
@aggregate
|
||||
def stddevPop(expr):
|
||||
return F('stddevPop', expr)
|
||||
|
||||
@staticmethod
|
||||
@aggregate
|
||||
def stddevSamp(expr):
|
||||
return F('stddevSamp', expr)
|
||||
|
||||
@staticmethod
|
||||
@aggregate
|
||||
@parametric
|
||||
|
|
|
@ -83,10 +83,12 @@ class AlterTable(ModelOperation):
|
|||
is_regular_field = not (field.materialized or field.alias)
|
||||
if name not in table_fields:
|
||||
logger.info(' Add column %s', name)
|
||||
assert prev_name, 'Cannot add a column to the beginning of the table'
|
||||
cmd = 'ADD COLUMN %s %s' % (name, field.get_sql(db=database))
|
||||
if is_regular_field:
|
||||
cmd += ' AFTER %s' % prev_name
|
||||
if prev_name:
|
||||
cmd += ' AFTER %s' % prev_name
|
||||
else:
|
||||
cmd += ' FIRST'
|
||||
self._alter_table(database, cmd)
|
||||
|
||||
if is_regular_field:
|
||||
|
|
|
@ -209,10 +209,10 @@ class Q(object):
|
|||
|
||||
@classmethod
|
||||
def _construct_from(cls, l_child, r_child, mode):
|
||||
if mode == l_child._mode:
|
||||
if mode == l_child._mode and not l_child._negate:
|
||||
q = deepcopy(l_child)
|
||||
q._children.append(deepcopy(r_child))
|
||||
elif mode == r_child._mode:
|
||||
elif mode == r_child._mode and not r_child._negate:
|
||||
q = deepcopy(r_child)
|
||||
q._children.append(deepcopy(l_child))
|
||||
else:
|
||||
|
|
|
@ -106,7 +106,7 @@ class CompressedFieldsTestCase(unittest.TestCase):
|
|||
('nullable_field', 'CODEC(ZSTD(1))'),
|
||||
('array_field', 'CODEC(Delta(2), LZ4HC(0))'),
|
||||
('float_field', 'CODEC(NONE)'),
|
||||
('alias_field', 'CODEC(ZSTD(4))')])
|
||||
('alias_field', '')])
|
||||
|
||||
|
||||
class CompressedModel(Model):
|
||||
|
@ -120,4 +120,4 @@ class CompressedModel(Model):
|
|||
float_field = Float32Field(codec='NONE')
|
||||
alias_field = Float32Field(alias='float_field', codec='ZSTD(4)')
|
||||
|
||||
engine = MergeTree('datetime_field', ('uint64_field', 'datetime_field'))
|
||||
engine = MergeTree('datetime_field', ('uint64_field', 'datetime_field'))
|
||||
|
|
|
@ -181,12 +181,13 @@ class DatabaseTestCase(TestCaseWithData):
|
|||
Database(self.database.db_name, username='default', password='wrong')
|
||||
|
||||
exc = cm.exception
|
||||
print(exc.code, exc.message)
|
||||
if exc.code == 193: # ClickHouse version < 20.3
|
||||
self.assertTrue(exc.message.startswith('Wrong password for user default'))
|
||||
elif exc.code == 516: # ClickHouse version >= 20.3
|
||||
self.assertTrue(exc.message.startswith('default: Authentication failed'))
|
||||
else:
|
||||
raise Exception('Unexpected error code - %s' % exc.code)
|
||||
raise Exception('Unexpected error code - %s %s' % (exc.code, exc.message))
|
||||
|
||||
def test_nonexisting_db(self):
|
||||
db = Database('db_not_here', autocreate=False)
|
||||
|
@ -251,6 +252,8 @@ class DatabaseTestCase(TestCaseWithData):
|
|||
from infi.clickhouse_orm.models import ModelBase
|
||||
query = "SELECT DISTINCT type FROM system.columns"
|
||||
for row in self.database.select(query):
|
||||
if row.type.startswith('Map'):
|
||||
continue # Not supported yet
|
||||
ModelBase.create_ad_hoc_field(row.type)
|
||||
|
||||
def test_get_model_for_table(self):
|
||||
|
@ -271,7 +274,12 @@ class DatabaseTestCase(TestCaseWithData):
|
|||
query = "SELECT name FROM system.tables WHERE database='system'"
|
||||
for row in self.database.select(query):
|
||||
print(row.name)
|
||||
model = self.database.get_model_for_table(row.name, system_table=True)
|
||||
if row.name in ('distributed_ddl_queue',):
|
||||
continue # Not supported
|
||||
try:
|
||||
model = self.database.get_model_for_table(row.name, system_table=True)
|
||||
except NotImplementedError:
|
||||
continue # Table contains an unsupported field type
|
||||
self.assertTrue(model.is_system_model())
|
||||
self.assertTrue(model.is_read_only())
|
||||
self.assertEqual(model.table_name(), row.name)
|
||||
|
|
|
@ -105,7 +105,7 @@ class HierarchicalDictionaryTest(DictionaryTestMixin, unittest.TestCase):
|
|||
|
||||
def test_dictgethierarchy(self):
|
||||
self._test_func(F.dictGetHierarchy(self.dict_name, F.toUInt64(3)), [3, 2, 1])
|
||||
self._test_func(F.dictGetHierarchy(self.dict_name, F.toUInt64(99)), [99])
|
||||
self._test_func(F.dictGetHierarchy(self.dict_name, F.toUInt64(99)), [])
|
||||
|
||||
def test_dictisin(self):
|
||||
self._test_func(F.dictIsIn(self.dict_name, F.toUInt64(3), F.toUInt64(1)), 1)
|
||||
|
|
|
@ -17,10 +17,10 @@ class _EnginesHelperTestCase(unittest.TestCase):
|
|||
|
||||
|
||||
class EnginesTestCase(_EnginesHelperTestCase):
|
||||
def _create_and_insert(self, model_class):
|
||||
def _create_and_insert(self, model_class, **kwargs):
|
||||
self.database.create_table(model_class)
|
||||
self.database.insert([
|
||||
model_class(date='2017-01-01', event_id=23423, event_group=13, event_count=7, event_version=1)
|
||||
model_class(date='2017-01-01', event_id=23423, event_group=13, event_count=7, event_version=1, **kwargs)
|
||||
])
|
||||
|
||||
def test_merge_tree(self):
|
||||
|
@ -155,7 +155,7 @@ class EnginesTestCase(_EnginesHelperTestCase):
|
|||
)
|
||||
|
||||
self._create_and_insert(TestModel)
|
||||
self._create_and_insert(TestCollapseModel)
|
||||
self._create_and_insert(TestCollapseModel, sign=1)
|
||||
|
||||
# Result order may be different, lets sort manually
|
||||
parts = sorted(list(SystemPart.get(self.database)), key=lambda x: x.table)
|
||||
|
@ -188,7 +188,7 @@ class EnginesTestCase(_EnginesHelperTestCase):
|
|||
)
|
||||
|
||||
self._create_and_insert(TestModel)
|
||||
self._create_and_insert(TestCollapseModel)
|
||||
self._create_and_insert(TestCollapseModel, sign=1)
|
||||
|
||||
self.assertEqual(2, len(list(SystemPart.get(self.database))))
|
||||
|
||||
|
|
|
@ -302,6 +302,18 @@ class QuerySetTestCase(TestCaseWithData):
|
|||
self.assertEqual(qs.conditions_as_sql(),
|
||||
"(first_name = 'a') AND (greater(`height`, 1.7)) AND (last_name = 'b')")
|
||||
|
||||
def test_precedence_of_negation(self):
|
||||
p = ~Q(first_name='a')
|
||||
q = Q(last_name='b')
|
||||
r = p & q
|
||||
self.assertEqual(r.to_sql(Person), "(last_name = 'b') AND (NOT (first_name = 'a'))")
|
||||
r = q & p
|
||||
self.assertEqual(r.to_sql(Person), "(last_name = 'b') AND (NOT (first_name = 'a'))")
|
||||
r = q | p
|
||||
self.assertEqual(r.to_sql(Person), "(last_name = 'b') OR (NOT (first_name = 'a'))")
|
||||
r = ~q & p
|
||||
self.assertEqual(r.to_sql(Person), "(NOT (last_name = 'b')) AND (NOT (first_name = 'a'))")
|
||||
|
||||
def test_invalid_filter(self):
|
||||
qs = Person.objects_in(self.database)
|
||||
with self.assertRaises(TypeError):
|
||||
|
|
Loading…
Reference in New Issue
Block a user