mirror of
https://github.com/Infinidat/infi.clickhouse_orm.git
synced 2025-08-02 19:20:14 +03:00
Chore: blacken / pep8ify tests
This commit is contained in:
parent
c5d8d356fe
commit
817825e878
|
@ -1,3 +1,12 @@
|
||||||
|
[tool.black]
|
||||||
|
line-length = 120
|
||||||
|
extend-exclude = '''
|
||||||
|
/(
|
||||||
|
| examples
|
||||||
|
| scripts
|
||||||
|
)/
|
||||||
|
'''
|
||||||
|
|
||||||
[tool.isort]
|
[tool.isort]
|
||||||
multi_line_output = 3
|
multi_line_output = 3
|
||||||
include_trailing_comma = true
|
include_trailing_comma = true
|
||||||
|
|
|
@ -15,4 +15,7 @@ ignore =
|
||||||
E203 # Whitespace after ':'
|
E203 # Whitespace after ':'
|
||||||
W503 # Operator after new line
|
W503 # Operator after new line
|
||||||
B950 # We use E501
|
B950 # We use E501
|
||||||
B008 # Using callable in function defintion, required for FastAPI
|
exclude =
|
||||||
|
tests/sample_migrations
|
||||||
|
examples
|
||||||
|
scripts
|
||||||
|
|
|
@ -2,8 +2,8 @@ import unittest
|
||||||
from datetime import date
|
from datetime import date
|
||||||
|
|
||||||
from clickhouse_orm.database import Database
|
from clickhouse_orm.database import Database
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import MergeTree
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import ArrayField, DateField, Int32Field, StringField
|
||||||
from clickhouse_orm.models import Model
|
from clickhouse_orm.models import Model
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -4,8 +4,8 @@ import unittest
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from clickhouse_orm.database import Database
|
from clickhouse_orm.database import Database
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import MergeTree
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import DateField, DateTime64Field, DateTimeField
|
||||||
from clickhouse_orm.models import Model
|
from clickhouse_orm.models import Model
|
||||||
|
|
||||||
|
|
||||||
|
@ -42,19 +42,39 @@ class DateFieldsTest(unittest.TestCase):
|
||||||
results = list(self.database.select(query))
|
results = list(self.database.select(query))
|
||||||
self.assertEqual(len(results), 2)
|
self.assertEqual(len(results), 2)
|
||||||
self.assertEqual(results[0].date_field, datetime.date(2016, 8, 30))
|
self.assertEqual(results[0].date_field, datetime.date(2016, 8, 30))
|
||||||
self.assertEqual(results[0].datetime_field, datetime.datetime(2016, 8, 30, 3, 50, 0, tzinfo=pytz.UTC))
|
|
||||||
self.assertEqual(results[0].hour_start, datetime.datetime(2016, 8, 30, 3, 0, 0, tzinfo=pytz.UTC))
|
|
||||||
self.assertEqual(results[1].date_field, datetime.date(2016, 8, 31))
|
|
||||||
self.assertEqual(results[1].datetime_field, datetime.datetime(2016, 8, 31, 1, 30, 0, tzinfo=pytz.UTC))
|
|
||||||
self.assertEqual(results[1].hour_start, datetime.datetime(2016, 8, 31, 1, 0, 0, tzinfo=pytz.UTC))
|
|
||||||
|
|
||||||
self.assertEqual(results[0].datetime64_field, datetime.datetime(2016, 8, 30, 3, 50, 0, 123456, tzinfo=pytz.UTC))
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
results[0].datetime64_3_field, datetime.datetime(2016, 8, 30, 3, 50, 0, 123000, tzinfo=pytz.UTC)
|
results[0].datetime_field,
|
||||||
|
datetime.datetime(2016, 8, 30, 3, 50, 0, tzinfo=pytz.UTC),
|
||||||
)
|
)
|
||||||
self.assertEqual(results[1].datetime64_field, datetime.datetime(2016, 8, 31, 1, 30, 0, 123456, tzinfo=pytz.UTC))
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
results[1].datetime64_3_field, datetime.datetime(2016, 8, 31, 1, 30, 0, 123000, tzinfo=pytz.UTC)
|
results[0].hour_start,
|
||||||
|
datetime.datetime(2016, 8, 30, 3, 0, 0, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(results[1].date_field, datetime.date(2016, 8, 31))
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].datetime_field,
|
||||||
|
datetime.datetime(2016, 8, 31, 1, 30, 0, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].hour_start,
|
||||||
|
datetime.datetime(2016, 8, 31, 1, 0, 0, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
results[0].datetime64_field,
|
||||||
|
datetime.datetime(2016, 8, 30, 3, 50, 0, 123456, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[0].datetime64_3_field,
|
||||||
|
datetime.datetime(2016, 8, 30, 3, 50, 0, 123000, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].datetime64_field,
|
||||||
|
datetime.datetime(2016, 8, 31, 1, 30, 0, 123456, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].datetime64_3_field,
|
||||||
|
datetime.datetime(2016, 8, 31, 1, 30, 0, 123000, tzinfo=pytz.UTC),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -106,20 +126,62 @@ class DateTimeFieldWithTzTest(unittest.TestCase):
|
||||||
query = "SELECT * from $db.modelwithtz ORDER BY datetime_no_tz_field"
|
query = "SELECT * from $db.modelwithtz ORDER BY datetime_no_tz_field"
|
||||||
results = list(self.database.select(query))
|
results = list(self.database.select(query))
|
||||||
|
|
||||||
self.assertEqual(results[0].datetime_no_tz_field, datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC))
|
self.assertEqual(
|
||||||
self.assertEqual(results[0].datetime_tz_field, datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC))
|
results[0].datetime_no_tz_field,
|
||||||
self.assertEqual(results[0].datetime64_tz_field, datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC))
|
datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC),
|
||||||
self.assertEqual(results[0].datetime_utc_field, datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC))
|
)
|
||||||
self.assertEqual(results[1].datetime_no_tz_field, datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC))
|
self.assertEqual(
|
||||||
self.assertEqual(results[1].datetime_tz_field, datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC))
|
results[0].datetime_tz_field,
|
||||||
self.assertEqual(results[1].datetime64_tz_field, datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC))
|
datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC),
|
||||||
self.assertEqual(results[1].datetime_utc_field, datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC))
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[0].datetime64_tz_field,
|
||||||
|
datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[0].datetime_utc_field,
|
||||||
|
datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].datetime_no_tz_field,
|
||||||
|
datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].datetime_tz_field,
|
||||||
|
datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].datetime64_tz_field,
|
||||||
|
datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].datetime_utc_field,
|
||||||
|
datetime.datetime(2020, 6, 11, 4, 0, 0, tzinfo=pytz.UTC),
|
||||||
|
)
|
||||||
|
|
||||||
self.assertEqual(results[0].datetime_no_tz_field.tzinfo.zone, self.database.server_timezone.zone)
|
self.assertEqual(
|
||||||
self.assertEqual(results[0].datetime_tz_field.tzinfo.zone, pytz.timezone("Europe/Madrid").zone)
|
results[0].datetime_no_tz_field.tzinfo.zone,
|
||||||
self.assertEqual(results[0].datetime64_tz_field.tzinfo.zone, pytz.timezone("Europe/Madrid").zone)
|
self.database.server_timezone.zone,
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[0].datetime_tz_field.tzinfo.zone,
|
||||||
|
pytz.timezone("Europe/Madrid").zone,
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[0].datetime64_tz_field.tzinfo.zone,
|
||||||
|
pytz.timezone("Europe/Madrid").zone,
|
||||||
|
)
|
||||||
self.assertEqual(results[0].datetime_utc_field.tzinfo.zone, pytz.timezone("UTC").zone)
|
self.assertEqual(results[0].datetime_utc_field.tzinfo.zone, pytz.timezone("UTC").zone)
|
||||||
self.assertEqual(results[1].datetime_no_tz_field.tzinfo.zone, self.database.server_timezone.zone)
|
self.assertEqual(
|
||||||
self.assertEqual(results[1].datetime_tz_field.tzinfo.zone, pytz.timezone("Europe/Madrid").zone)
|
results[1].datetime_no_tz_field.tzinfo.zone,
|
||||||
self.assertEqual(results[1].datetime64_tz_field.tzinfo.zone, pytz.timezone("Europe/Madrid").zone)
|
self.database.server_timezone.zone,
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].datetime_tz_field.tzinfo.zone,
|
||||||
|
pytz.timezone("Europe/Madrid").zone,
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
results[1].datetime64_tz_field.tzinfo.zone,
|
||||||
|
pytz.timezone("Europe/Madrid").zone,
|
||||||
|
)
|
||||||
self.assertEqual(results[1].datetime_utc_field.tzinfo.zone, pytz.timezone("UTC").zone)
|
self.assertEqual(results[1].datetime_utc_field.tzinfo.zone, pytz.timezone("UTC").zone)
|
||||||
|
|
|
@ -3,8 +3,8 @@ import unittest
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
|
||||||
from clickhouse_orm.database import Database, ServerError
|
from clickhouse_orm.database import Database, ServerError
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import Memory
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import DateField, Decimal32Field, Decimal64Field, Decimal128Field, DecimalField
|
||||||
from clickhouse_orm.models import Model
|
from clickhouse_orm.models import Model
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@ import unittest
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from clickhouse_orm.database import Database
|
from clickhouse_orm.database import Database
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import MergeTree
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import ArrayField, DateField, Enum8Field, Enum16Field
|
||||||
from clickhouse_orm.models import Model
|
from clickhouse_orm.models import Model
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from clickhouse_orm.database import Database
|
from clickhouse_orm.database import Database
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import MergeTree
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import DateField, FixedStringField
|
||||||
from clickhouse_orm.models import Model
|
from clickhouse_orm.models import Model
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,8 @@ import unittest
|
||||||
from datetime import date
|
from datetime import date
|
||||||
|
|
||||||
from clickhouse_orm.database import Database
|
from clickhouse_orm.database import Database
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import MergeTree
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import DateField, DateTimeField, Int32Field, StringField
|
||||||
from clickhouse_orm.funcs import F
|
from clickhouse_orm.funcs import F
|
||||||
from clickhouse_orm.models import NO_VALUE, Model
|
from clickhouse_orm.models import NO_VALUE, Model
|
||||||
|
|
||||||
|
|
|
@ -4,8 +4,25 @@ from datetime import date, datetime
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from clickhouse_orm.database import Database
|
from clickhouse_orm.database import Database
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import MergeTree
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import (
|
||||||
|
BaseFloatField,
|
||||||
|
BaseIntField,
|
||||||
|
DateField,
|
||||||
|
DateTimeField,
|
||||||
|
Float32Field,
|
||||||
|
Float64Field,
|
||||||
|
Int8Field,
|
||||||
|
Int16Field,
|
||||||
|
Int32Field,
|
||||||
|
Int64Field,
|
||||||
|
NullableField,
|
||||||
|
StringField,
|
||||||
|
UInt8Field,
|
||||||
|
UInt16Field,
|
||||||
|
UInt32Field,
|
||||||
|
UInt64Field,
|
||||||
|
)
|
||||||
from clickhouse_orm.models import Model
|
from clickhouse_orm.models import Model
|
||||||
from clickhouse_orm.utils import comma_join
|
from clickhouse_orm.utils import comma_join
|
||||||
|
|
||||||
|
@ -79,7 +96,16 @@ class NullableFieldsTest(unittest.TestCase):
|
||||||
f = NullableField(field())
|
f = NullableField(field())
|
||||||
self.assertTrue(f.isinstance(field))
|
self.assertTrue(f.isinstance(field))
|
||||||
self.assertTrue(f.isinstance(NullableField))
|
self.assertTrue(f.isinstance(NullableField))
|
||||||
for field in (Int8Field, Int16Field, Int32Field, Int64Field, UInt8Field, UInt16Field, UInt32Field, UInt64Field):
|
for field in (
|
||||||
|
Int8Field,
|
||||||
|
Int16Field,
|
||||||
|
Int32Field,
|
||||||
|
Int64Field,
|
||||||
|
UInt8Field,
|
||||||
|
UInt16Field,
|
||||||
|
UInt32Field,
|
||||||
|
UInt64Field,
|
||||||
|
):
|
||||||
f = NullableField(field())
|
f = NullableField(field())
|
||||||
self.assertTrue(f.isinstance(BaseIntField))
|
self.assertTrue(f.isinstance(BaseIntField))
|
||||||
for field in (Float32Field, Float64Field):
|
for field in (Float32Field, Float64Field):
|
||||||
|
@ -95,10 +121,19 @@ class NullableFieldsTest(unittest.TestCase):
|
||||||
self.database.insert(
|
self.database.insert(
|
||||||
[
|
[
|
||||||
ModelWithNullable(date_field="2016-08-30", null_str="", null_int=42, null_date=dt),
|
ModelWithNullable(date_field="2016-08-30", null_str="", null_int=42, null_date=dt),
|
||||||
ModelWithNullable(date_field="2016-08-30", null_str="nothing", null_int=None, null_date=None),
|
ModelWithNullable(
|
||||||
|
date_field="2016-08-30",
|
||||||
|
null_str="nothing",
|
||||||
|
null_int=None,
|
||||||
|
null_date=None,
|
||||||
|
),
|
||||||
ModelWithNullable(date_field="2016-08-31", null_str=None, null_int=42, null_date=dt),
|
ModelWithNullable(date_field="2016-08-31", null_str=None, null_int=42, null_date=dt),
|
||||||
ModelWithNullable(
|
ModelWithNullable(
|
||||||
date_field="2016-08-31", null_str=None, null_int=None, null_date=None, null_default=None
|
date_field="2016-08-31",
|
||||||
|
null_str=None,
|
||||||
|
null_int=None,
|
||||||
|
null_date=None,
|
||||||
|
null_default=None,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
|
@ -3,7 +3,7 @@ from datetime import date, datetime
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import DateField, DateTime64Field, DateTimeField, UInt8Field
|
||||||
|
|
||||||
|
|
||||||
class SimpleFieldsTest(unittest.TestCase):
|
class SimpleFieldsTest(unittest.TestCase):
|
||||||
|
@ -36,7 +36,14 @@ class SimpleFieldsTest(unittest.TestCase):
|
||||||
dt2 = f.to_python(f.to_db_string(dt, quote=False), pytz.utc)
|
dt2 = f.to_python(f.to_db_string(dt, quote=False), pytz.utc)
|
||||||
self.assertEqual(dt, dt2)
|
self.assertEqual(dt, dt2)
|
||||||
# Invalid values
|
# Invalid values
|
||||||
for value in ("nope", "21/7/1999", 0.5, "2017-01 15:06:00", "2017-01-01X15:06:00", "2017-13-01T15:06:00"):
|
for value in (
|
||||||
|
"nope",
|
||||||
|
"21/7/1999",
|
||||||
|
0.5,
|
||||||
|
"2017-01 15:06:00",
|
||||||
|
"2017-01-01X15:06:00",
|
||||||
|
"2017-13-01T15:06:00",
|
||||||
|
):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
f.to_python(value, pytz.utc)
|
f.to_python(value, pytz.utc)
|
||||||
|
|
||||||
|
@ -62,7 +69,13 @@ class SimpleFieldsTest(unittest.TestCase):
|
||||||
dt2 = f.to_python(f.to_db_string(dt, quote=False), pytz.utc)
|
dt2 = f.to_python(f.to_db_string(dt, quote=False), pytz.utc)
|
||||||
self.assertEqual(dt, dt2)
|
self.assertEqual(dt, dt2)
|
||||||
# Invalid values
|
# Invalid values
|
||||||
for value in ("nope", "21/7/1999", "2017-01 15:06:00", "2017-01-01X15:06:00", "2017-13-01T15:06:00"):
|
for value in (
|
||||||
|
"nope",
|
||||||
|
"21/7/1999",
|
||||||
|
"2017-01 15:06:00",
|
||||||
|
"2017-01-01X15:06:00",
|
||||||
|
"2017-13-01T15:06:00",
|
||||||
|
):
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
f.to_python(value, pytz.utc)
|
f.to_python(value, pytz.utc)
|
||||||
|
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
[flake8]
|
|
||||||
max-line-length = 120
|
|
||||||
select =
|
|
||||||
# pycodestyle
|
|
||||||
E, W
|
|
||||||
# pyflakes
|
|
||||||
F
|
|
||||||
# flake8-bugbear
|
|
||||||
B, B9
|
|
||||||
# pydocstyle
|
|
||||||
D
|
|
||||||
# isort
|
|
||||||
I
|
|
||||||
ignore =
|
|
||||||
E203 # Whitespace after ':'
|
|
||||||
W503 # Operator after new line
|
|
||||||
B950 # We use E501
|
|
||||||
B008 # Using callable in function defintion, required for FastAPI
|
|
||||||
F405, F403 # Since * imports cause havok
|
|
|
@ -1,8 +1,8 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import Buffer
|
||||||
from clickhouse_orm.models import BufferModel
|
from clickhouse_orm.models import BufferModel
|
||||||
|
|
||||||
from .base_test_with_data import *
|
from .base_test_with_data import Person, TestCaseWithData, data
|
||||||
|
|
||||||
|
|
||||||
class BufferTestCase(TestCaseWithData):
|
class BufferTestCase(TestCaseWithData):
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from clickhouse_orm import *
|
from clickhouse_orm import Constraint, Database, F, ServerError
|
||||||
|
|
||||||
from .base_test_with_data import Person
|
from .base_test_with_data import Person
|
||||||
|
|
||||||
|
@ -17,20 +17,41 @@ class ConstraintsTest(unittest.TestCase):
|
||||||
|
|
||||||
def test_insert_valid_values(self):
|
def test_insert_valid_values(self):
|
||||||
self.database.insert(
|
self.database.insert(
|
||||||
[PersonWithConstraints(first_name="Mike", last_name="Caruzo", birthday="2000-01-01", height=1.66)]
|
[
|
||||||
|
PersonWithConstraints(
|
||||||
|
first_name="Mike",
|
||||||
|
last_name="Caruzo",
|
||||||
|
birthday="2000-01-01",
|
||||||
|
height=1.66,
|
||||||
|
)
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_insert_invalid_values(self):
|
def test_insert_invalid_values(self):
|
||||||
with self.assertRaises(ServerError) as e:
|
with self.assertRaises(ServerError) as e:
|
||||||
self.database.insert(
|
self.database.insert(
|
||||||
[PersonWithConstraints(first_name="Mike", last_name="Caruzo", birthday="2100-01-01", height=1.66)]
|
[
|
||||||
|
PersonWithConstraints(
|
||||||
|
first_name="Mike",
|
||||||
|
last_name="Caruzo",
|
||||||
|
birthday="2100-01-01",
|
||||||
|
height=1.66,
|
||||||
|
)
|
||||||
|
]
|
||||||
)
|
)
|
||||||
self.assertEqual(e.code, 469)
|
self.assertEqual(e.code, 469)
|
||||||
self.assertTrue("Constraint `birthday_in_the_past`" in str(e))
|
self.assertTrue("Constraint `birthday_in_the_past`" in str(e))
|
||||||
|
|
||||||
with self.assertRaises(ServerError) as e:
|
with self.assertRaises(ServerError) as e:
|
||||||
self.database.insert(
|
self.database.insert(
|
||||||
[PersonWithConstraints(first_name="Mike", last_name="Caruzo", birthday="1970-01-01", height=3)]
|
[
|
||||||
|
PersonWithConstraints(
|
||||||
|
first_name="Mike",
|
||||||
|
last_name="Caruzo",
|
||||||
|
birthday="1970-01-01",
|
||||||
|
height=3,
|
||||||
|
)
|
||||||
|
]
|
||||||
)
|
)
|
||||||
self.assertEqual(e.code, 469)
|
self.assertEqual(e.code, 469)
|
||||||
self.assertTrue("Constraint `max_height`" in str(e))
|
self.assertTrue("Constraint `max_height`" in str(e))
|
||||||
|
|
|
@ -2,14 +2,14 @@
|
||||||
import datetime
|
import datetime
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from clickhouse_orm.database import DatabaseException, ServerError
|
from clickhouse_orm.database import Database, DatabaseException, ServerError
|
||||||
from clickhouse_orm.engines import Memory
|
from clickhouse_orm.engines import Memory
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import DateField, DateTimeField, Float32Field, Int32Field, StringField
|
||||||
from clickhouse_orm.funcs import F
|
from clickhouse_orm.funcs import F
|
||||||
from clickhouse_orm.models import Model
|
from clickhouse_orm.models import Model
|
||||||
from clickhouse_orm.query import Q
|
from clickhouse_orm.query import Q
|
||||||
|
|
||||||
from .base_test_with_data import *
|
from .base_test_with_data import Person, TestCaseWithData, data
|
||||||
|
|
||||||
|
|
||||||
class DatabaseTestCase(TestCaseWithData):
|
class DatabaseTestCase(TestCaseWithData):
|
||||||
|
@ -136,12 +136,19 @@ class DatabaseTestCase(TestCaseWithData):
|
||||||
page_a = self.database.paginate(Person, "first_name, last_name", -1, page_size)
|
page_a = self.database.paginate(Person, "first_name, last_name", -1, page_size)
|
||||||
page_b = self.database.paginate(Person, "first_name, last_name", page_a.pages_total, page_size)
|
page_b = self.database.paginate(Person, "first_name, last_name", page_a.pages_total, page_size)
|
||||||
self.assertEqual(page_a[1:], page_b[1:])
|
self.assertEqual(page_a[1:], page_b[1:])
|
||||||
self.assertEqual([obj.to_tsv() for obj in page_a.objects], [obj.to_tsv() for obj in page_b.objects])
|
self.assertEqual(
|
||||||
|
[obj.to_tsv() for obj in page_a.objects],
|
||||||
|
[obj.to_tsv() for obj in page_b.objects],
|
||||||
|
)
|
||||||
|
|
||||||
def test_pagination_empty_page(self):
|
def test_pagination_empty_page(self):
|
||||||
for page_num in (-1, 1, 2):
|
for page_num in (-1, 1, 2):
|
||||||
page = self.database.paginate(
|
page = self.database.paginate(
|
||||||
Person, "first_name, last_name", page_num, 10, conditions="first_name = 'Ziggy'"
|
Person,
|
||||||
|
"first_name, last_name",
|
||||||
|
page_num,
|
||||||
|
10,
|
||||||
|
conditions="first_name = 'Ziggy'",
|
||||||
)
|
)
|
||||||
self.assertEqual(page.number_of_objects, 0)
|
self.assertEqual(page.number_of_objects, 0)
|
||||||
self.assertEqual(page.objects, [])
|
self.assertEqual(page.objects, [])
|
||||||
|
@ -160,7 +167,13 @@ class DatabaseTestCase(TestCaseWithData):
|
||||||
page = self.database.paginate(Person, "first_name, last_name", 1, 100, conditions="first_name < 'Ava'")
|
page = self.database.paginate(Person, "first_name, last_name", 1, 100, conditions="first_name < 'Ava'")
|
||||||
self.assertEqual(page.number_of_objects, 10)
|
self.assertEqual(page.number_of_objects, 10)
|
||||||
# Conditions as expression
|
# Conditions as expression
|
||||||
page = self.database.paginate(Person, "first_name, last_name", 1, 100, conditions=Person.first_name < "Ava")
|
page = self.database.paginate(
|
||||||
|
Person,
|
||||||
|
"first_name, last_name",
|
||||||
|
1,
|
||||||
|
100,
|
||||||
|
conditions=Person.first_name < "Ava",
|
||||||
|
)
|
||||||
self.assertEqual(page.number_of_objects, 10)
|
self.assertEqual(page.number_of_objects, 10)
|
||||||
# Conditions as Q object
|
# Conditions as Q object
|
||||||
page = self.database.paginate(Person, "first_name, last_name", 1, 100, conditions=Q(first_name__lt="Ava"))
|
page = self.database.paginate(Person, "first_name, last_name", 1, 100, conditions=Q(first_name__lt="Ava"))
|
||||||
|
@ -177,7 +190,10 @@ class DatabaseTestCase(TestCaseWithData):
|
||||||
self._insert_and_check(self._sample_data(), len(data))
|
self._insert_and_check(self._sample_data(), len(data))
|
||||||
query = "SELECT * FROM `test-db`.person WHERE first_name = 'Whitney' ORDER BY last_name"
|
query = "SELECT * FROM `test-db`.person WHERE first_name = 'Whitney' ORDER BY last_name"
|
||||||
results = self.database.raw(query)
|
results = self.database.raw(query)
|
||||||
self.assertEqual(results, "Whitney\tDurham\t1977-09-15\t1.72\t\\N\nWhitney\tScott\t1971-07-04\t1.7\t\\N\n")
|
self.assertEqual(
|
||||||
|
results,
|
||||||
|
"Whitney\tDurham\t1977-09-15\t1.72\t\\N\nWhitney\tScott\t1971-07-04\t1.7\t\\N\n",
|
||||||
|
)
|
||||||
|
|
||||||
def test_invalid_user(self):
|
def test_invalid_user(self):
|
||||||
with self.assertRaises(ServerError) as cm:
|
with self.assertRaises(ServerError) as cm:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import logging
|
import logging
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from clickhouse_orm import *
|
from clickhouse_orm import Database, F, Memory, Model, StringField, UInt64Field
|
||||||
|
|
||||||
|
|
||||||
class DictionaryTestMixin:
|
class DictionaryTestMixin:
|
||||||
|
@ -99,8 +99,14 @@ class HierarchicalDictionaryTest(DictionaryTestMixin, unittest.TestCase):
|
||||||
self._test_func(F.dictGet(self.dict_name, "region_name", F.toUInt64(99)), "?")
|
self._test_func(F.dictGet(self.dict_name, "region_name", F.toUInt64(99)), "?")
|
||||||
|
|
||||||
def test_dictgetordefault(self):
|
def test_dictgetordefault(self):
|
||||||
self._test_func(F.dictGetOrDefault(self.dict_name, "region_name", F.toUInt64(3), "n/a"), "Center")
|
self._test_func(
|
||||||
self._test_func(F.dictGetOrDefault(self.dict_name, "region_name", F.toUInt64(99), "n/a"), "n/a")
|
F.dictGetOrDefault(self.dict_name, "region_name", F.toUInt64(3), "n/a"),
|
||||||
|
"Center",
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.dictGetOrDefault(self.dict_name, "region_name", F.toUInt64(99), "n/a"),
|
||||||
|
"n/a",
|
||||||
|
)
|
||||||
|
|
||||||
def test_dicthas(self):
|
def test_dicthas(self):
|
||||||
self._test_func(F.dictHas(self.dict_name, F.toUInt64(3)), 1)
|
self._test_func(F.dictHas(self.dict_name, F.toUInt64(3)), 1)
|
||||||
|
|
|
@ -2,7 +2,21 @@ import datetime
|
||||||
import logging
|
import logging
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from clickhouse_orm import *
|
from clickhouse_orm.database import Database, DatabaseException, ServerError
|
||||||
|
from clickhouse_orm.engines import (
|
||||||
|
CollapsingMergeTree,
|
||||||
|
Log,
|
||||||
|
Memory,
|
||||||
|
Merge,
|
||||||
|
MergeTree,
|
||||||
|
ReplacingMergeTree,
|
||||||
|
SummingMergeTree,
|
||||||
|
TinyLog,
|
||||||
|
)
|
||||||
|
from clickhouse_orm.fields import DateField, Int8Field, UInt8Field, UInt16Field, UInt32Field
|
||||||
|
from clickhouse_orm.funcs import F
|
||||||
|
from clickhouse_orm.models import Distributed, DistributedModel, MergeModel, Model
|
||||||
|
from clickhouse_orm.system_models import SystemPart
|
||||||
|
|
||||||
logging.getLogger("requests").setLevel(logging.WARNING)
|
logging.getLogger("requests").setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
@ -19,7 +33,15 @@ class EnginesTestCase(_EnginesHelperTestCase):
|
||||||
def _create_and_insert(self, model_class):
|
def _create_and_insert(self, model_class):
|
||||||
self.database.create_table(model_class)
|
self.database.create_table(model_class)
|
||||||
self.database.insert(
|
self.database.insert(
|
||||||
[model_class(date="2017-01-01", event_id=23423, event_group=13, event_count=7, event_version=1)]
|
[
|
||||||
|
model_class(
|
||||||
|
date="2017-01-01",
|
||||||
|
event_id=23423,
|
||||||
|
event_group=13,
|
||||||
|
event_count=7,
|
||||||
|
event_version=1,
|
||||||
|
)
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_merge_tree(self):
|
def test_merge_tree(self):
|
||||||
|
@ -31,7 +53,9 @@ class EnginesTestCase(_EnginesHelperTestCase):
|
||||||
def test_merge_tree_with_sampling(self):
|
def test_merge_tree_with_sampling(self):
|
||||||
class TestModel(SampleModel):
|
class TestModel(SampleModel):
|
||||||
engine = MergeTree(
|
engine = MergeTree(
|
||||||
"date", ("date", "event_id", "event_group", "intHash32(event_id)"), sampling_expr="intHash32(event_id)"
|
"date",
|
||||||
|
("date", "event_id", "event_group", "intHash32(event_id)"),
|
||||||
|
sampling_expr="intHash32(event_id)",
|
||||||
)
|
)
|
||||||
|
|
||||||
self._create_and_insert(TestModel)
|
self._create_and_insert(TestModel)
|
||||||
|
@ -129,15 +153,44 @@ class EnginesTestCase(_EnginesHelperTestCase):
|
||||||
# Insert operations are restricted for this model type
|
# Insert operations are restricted for this model type
|
||||||
with self.assertRaises(DatabaseException):
|
with self.assertRaises(DatabaseException):
|
||||||
self.database.insert(
|
self.database.insert(
|
||||||
[TestMergeModel(date="2017-01-01", event_id=23423, event_group=13, event_count=7, event_version=1)]
|
[
|
||||||
|
TestMergeModel(
|
||||||
|
date="2017-01-01",
|
||||||
|
event_id=23423,
|
||||||
|
event_group=13,
|
||||||
|
event_count=7,
|
||||||
|
event_version=1,
|
||||||
|
)
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Testing select
|
# Testing select
|
||||||
self.database.insert([TestModel1(date="2017-01-01", event_id=1, event_group=1, event_count=1, event_version=1)])
|
self.database.insert(
|
||||||
self.database.insert([TestModel2(date="2017-01-02", event_id=2, event_group=2, event_count=2, event_version=2)])
|
[
|
||||||
|
TestModel1(
|
||||||
|
date="2017-01-01",
|
||||||
|
event_id=1,
|
||||||
|
event_group=1,
|
||||||
|
event_count=1,
|
||||||
|
event_version=1,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.database.insert(
|
||||||
|
[
|
||||||
|
TestModel2(
|
||||||
|
date="2017-01-02",
|
||||||
|
event_id=2,
|
||||||
|
event_group=2,
|
||||||
|
event_count=2,
|
||||||
|
event_version=2,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
# event_uversion is materialized field. So * won't select it and it will be zero
|
# event_uversion is materialized field. So * won't select it and it will be zero
|
||||||
res = self.database.select(
|
res = self.database.select(
|
||||||
"SELECT *, _table, event_uversion FROM $table ORDER BY event_id", model_class=TestMergeModel
|
"SELECT *, _table, event_uversion FROM $table ORDER BY event_id",
|
||||||
|
model_class=TestMergeModel,
|
||||||
)
|
)
|
||||||
res = list(res)
|
res = list(res)
|
||||||
self.assertEqual(2, len(res))
|
self.assertEqual(2, len(res))
|
||||||
|
@ -169,7 +222,8 @@ class EnginesTestCase(_EnginesHelperTestCase):
|
||||||
def test_custom_partitioning(self):
|
def test_custom_partitioning(self):
|
||||||
class TestModel(SampleModel):
|
class TestModel(SampleModel):
|
||||||
engine = MergeTree(
|
engine = MergeTree(
|
||||||
order_by=("date", "event_id", "event_group"), partition_key=("toYYYYMM(date)", "event_group")
|
order_by=("date", "event_id", "event_group"),
|
||||||
|
partition_key=("toYYYYMM(date)", "event_group"),
|
||||||
)
|
)
|
||||||
|
|
||||||
class TestCollapseModel(SampleModel):
|
class TestCollapseModel(SampleModel):
|
||||||
|
@ -327,12 +381,27 @@ class DistributedTestCase(_EnginesHelperTestCase):
|
||||||
|
|
||||||
self.database.insert(
|
self.database.insert(
|
||||||
[
|
[
|
||||||
to_insert(date="2017-01-01", event_id=1, event_group=1, event_count=1, event_version=1),
|
to_insert(
|
||||||
to_insert(date="2017-01-02", event_id=2, event_group=2, event_count=2, event_version=2),
|
date="2017-01-01",
|
||||||
|
event_id=1,
|
||||||
|
event_group=1,
|
||||||
|
event_count=1,
|
||||||
|
event_version=1,
|
||||||
|
),
|
||||||
|
to_insert(
|
||||||
|
date="2017-01-02",
|
||||||
|
event_id=2,
|
||||||
|
event_group=2,
|
||||||
|
event_count=2,
|
||||||
|
event_version=2,
|
||||||
|
),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
# event_uversion is materialized field. So * won't select it and it will be zero
|
# event_uversion is materialized field. So * won't select it and it will be zero
|
||||||
res = self.database.select("SELECT *, event_uversion FROM $table ORDER BY event_id", model_class=to_select)
|
res = self.database.select(
|
||||||
|
"SELECT *, event_uversion FROM $table ORDER BY event_id",
|
||||||
|
model_class=to_select,
|
||||||
|
)
|
||||||
res = [row for row in res]
|
res = [row for row in res]
|
||||||
self.assertEqual(2, len(res))
|
self.assertEqual(2, len(res))
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
|
|
|
@ -246,11 +246,20 @@ class FuncsTestCase(TestCaseWithData):
|
||||||
self._test_func(F.toStartOfYear(d), date(2018, 1, 1))
|
self._test_func(F.toStartOfYear(d), date(2018, 1, 1))
|
||||||
self._test_func(F.toStartOfYear(dt), date(2018, 1, 1))
|
self._test_func(F.toStartOfYear(dt), date(2018, 1, 1))
|
||||||
self._test_func(F.toStartOfMinute(dt), datetime(2018, 12, 31, 11, 22, 0, tzinfo=pytz.utc))
|
self._test_func(F.toStartOfMinute(dt), datetime(2018, 12, 31, 11, 22, 0, tzinfo=pytz.utc))
|
||||||
self._test_func(F.toStartOfFiveMinute(dt), datetime(2018, 12, 31, 11, 20, 0, tzinfo=pytz.utc))
|
self._test_func(
|
||||||
self._test_func(F.toStartOfFifteenMinutes(dt), datetime(2018, 12, 31, 11, 15, 0, tzinfo=pytz.utc))
|
F.toStartOfFiveMinute(dt),
|
||||||
|
datetime(2018, 12, 31, 11, 20, 0, tzinfo=pytz.utc),
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.toStartOfFifteenMinutes(dt),
|
||||||
|
datetime(2018, 12, 31, 11, 15, 0, tzinfo=pytz.utc),
|
||||||
|
)
|
||||||
self._test_func(F.toStartOfHour(dt), datetime(2018, 12, 31, 11, 0, 0, tzinfo=pytz.utc))
|
self._test_func(F.toStartOfHour(dt), datetime(2018, 12, 31, 11, 0, 0, tzinfo=pytz.utc))
|
||||||
self._test_func(F.toStartOfISOYear(dt), date(2018, 12, 31))
|
self._test_func(F.toStartOfISOYear(dt), date(2018, 12, 31))
|
||||||
self._test_func(F.toStartOfTenMinutes(dt), datetime(2018, 12, 31, 11, 20, 0, tzinfo=pytz.utc))
|
self._test_func(
|
||||||
|
F.toStartOfTenMinutes(dt),
|
||||||
|
datetime(2018, 12, 31, 11, 20, 0, tzinfo=pytz.utc),
|
||||||
|
)
|
||||||
self._test_func(F.toStartOfWeek(dt), date(2018, 12, 30))
|
self._test_func(F.toStartOfWeek(dt), date(2018, 12, 30))
|
||||||
self._test_func(F.toTime(dt), datetime(1970, 1, 2, 11, 22, 33, tzinfo=pytz.utc))
|
self._test_func(F.toTime(dt), datetime(1970, 1, 2, 11, 22, 33, tzinfo=pytz.utc))
|
||||||
self._test_func(F.toUnixTimestamp(dt, "UTC"), int(dt.replace(tzinfo=pytz.utc).timestamp()))
|
self._test_func(F.toUnixTimestamp(dt, "UTC"), int(dt.replace(tzinfo=pytz.utc).timestamp()))
|
||||||
|
@ -328,9 +337,18 @@ class FuncsTestCase(TestCaseWithData):
|
||||||
self._test_func(F.toHour(dt), 11)
|
self._test_func(F.toHour(dt), 11)
|
||||||
self._test_func(F.toStartOfDay(dt), datetime(2018, 12, 31, 0, 0, 0, tzinfo=pytz.utc))
|
self._test_func(F.toStartOfDay(dt), datetime(2018, 12, 31, 0, 0, 0, tzinfo=pytz.utc))
|
||||||
self._test_func(F.toTime(dt, pytz.utc), datetime(1970, 1, 2, 11, 22, 33, tzinfo=pytz.utc))
|
self._test_func(F.toTime(dt, pytz.utc), datetime(1970, 1, 2, 11, 22, 33, tzinfo=pytz.utc))
|
||||||
self._test_func(F.toTime(dt, "Europe/Athens"), athens_tz.localize(datetime(1970, 1, 2, 13, 22, 33)))
|
self._test_func(
|
||||||
self._test_func(F.toTime(dt, athens_tz), athens_tz.localize(datetime(1970, 1, 2, 13, 22, 33)))
|
F.toTime(dt, "Europe/Athens"),
|
||||||
self._test_func(F.toTimeZone(dt, "Europe/Athens"), athens_tz.localize(datetime(2018, 12, 31, 13, 22, 33)))
|
athens_tz.localize(datetime(1970, 1, 2, 13, 22, 33)),
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.toTime(dt, athens_tz),
|
||||||
|
athens_tz.localize(datetime(1970, 1, 2, 13, 22, 33)),
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.toTimeZone(dt, "Europe/Athens"),
|
||||||
|
athens_tz.localize(datetime(2018, 12, 31, 13, 22, 33)),
|
||||||
|
)
|
||||||
self._test_func(F.today(), datetime.utcnow().date())
|
self._test_func(F.today(), datetime.utcnow().date())
|
||||||
self._test_func(F.yesterday(), datetime.utcnow().date() - timedelta(days=1))
|
self._test_func(F.yesterday(), datetime.utcnow().date() - timedelta(days=1))
|
||||||
self._test_func(F.toYYYYMMDDhhmmss(dt), 20181231112233)
|
self._test_func(F.toYYYYMMDDhhmmss(dt), 20181231112233)
|
||||||
|
@ -390,16 +408,25 @@ class FuncsTestCase(TestCaseWithData):
|
||||||
def test_type_conversion_functions__utc_only(self):
|
def test_type_conversion_functions__utc_only(self):
|
||||||
if self.database.server_timezone != pytz.utc:
|
if self.database.server_timezone != pytz.utc:
|
||||||
raise unittest.SkipTest("This test must run with UTC as the server timezone")
|
raise unittest.SkipTest("This test must run with UTC as the server timezone")
|
||||||
self._test_func(F.toDateTime("2018-12-31 11:22:33"), datetime(2018, 12, 31, 11, 22, 33, tzinfo=pytz.utc))
|
|
||||||
self._test_func(
|
self._test_func(
|
||||||
F.toDateTime64("2018-12-31 11:22:33.001", 6), datetime(2018, 12, 31, 11, 22, 33, 1000, tzinfo=pytz.utc)
|
F.toDateTime("2018-12-31 11:22:33"),
|
||||||
)
|
datetime(2018, 12, 31, 11, 22, 33, tzinfo=pytz.utc),
|
||||||
self._test_func(F.parseDateTimeBestEffort("31/12/2019 10:05AM"), datetime(2019, 12, 31, 10, 5, tzinfo=pytz.utc))
|
|
||||||
self._test_func(
|
|
||||||
F.parseDateTimeBestEffortOrNull("31/12/2019 10:05AM"), datetime(2019, 12, 31, 10, 5, tzinfo=pytz.utc)
|
|
||||||
)
|
)
|
||||||
self._test_func(
|
self._test_func(
|
||||||
F.parseDateTimeBestEffortOrZero("31/12/2019 10:05AM"), datetime(2019, 12, 31, 10, 5, tzinfo=pytz.utc)
|
F.toDateTime64("2018-12-31 11:22:33.001", 6),
|
||||||
|
datetime(2018, 12, 31, 11, 22, 33, 1000, tzinfo=pytz.utc),
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.parseDateTimeBestEffort("31/12/2019 10:05AM"),
|
||||||
|
datetime(2019, 12, 31, 10, 5, tzinfo=pytz.utc),
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.parseDateTimeBestEffortOrNull("31/12/2019 10:05AM"),
|
||||||
|
datetime(2019, 12, 31, 10, 5, tzinfo=pytz.utc),
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.parseDateTimeBestEffortOrZero("31/12/2019 10:05AM"),
|
||||||
|
datetime(2019, 12, 31, 10, 5, tzinfo=pytz.utc),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_string_functions(self):
|
def test_string_functions(self):
|
||||||
|
@ -420,7 +447,10 @@ class FuncsTestCase(TestCaseWithData):
|
||||||
self._test_func(F.substringUTF8("123456", 3, 2), "34")
|
self._test_func(F.substringUTF8("123456", 3, 2), "34")
|
||||||
self._test_func(F.appendTrailingCharIfAbsent("Hello", "!"), "Hello!")
|
self._test_func(F.appendTrailingCharIfAbsent("Hello", "!"), "Hello!")
|
||||||
self._test_func(F.appendTrailingCharIfAbsent("Hello!", "!"), "Hello!")
|
self._test_func(F.appendTrailingCharIfAbsent("Hello!", "!"), "Hello!")
|
||||||
self._test_func(F.convertCharset(F.convertCharset("Hello", "latin1", "utf16"), "utf16", "latin1"), "Hello")
|
self._test_func(
|
||||||
|
F.convertCharset(F.convertCharset("Hello", "latin1", "utf16"), "utf16", "latin1"),
|
||||||
|
"Hello",
|
||||||
|
)
|
||||||
self._test_func(F.startsWith("aaa", "aa"), True)
|
self._test_func(F.startsWith("aaa", "aa"), True)
|
||||||
self._test_func(F.startsWith("aaa", "bb"), False)
|
self._test_func(F.startsWith("aaa", "bb"), False)
|
||||||
self._test_func(F.endsWith("aaa", "aa"), True)
|
self._test_func(F.endsWith("aaa", "aa"), True)
|
||||||
|
@ -592,17 +622,36 @@ class FuncsTestCase(TestCaseWithData):
|
||||||
self._test_func(F.bitmapContains(F.bitmapBuild([1, 5, 7, 9]), F.toUInt32(9)), 1)
|
self._test_func(F.bitmapContains(F.bitmapBuild([1, 5, 7, 9]), F.toUInt32(9)), 1)
|
||||||
self._test_func(F.bitmapHasAny(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])), 1)
|
self._test_func(F.bitmapHasAny(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])), 1)
|
||||||
self._test_func(F.bitmapHasAll(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])), 0)
|
self._test_func(F.bitmapHasAll(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])), 0)
|
||||||
self._test_func(F.bitmapToArray(F.bitmapAnd(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5]))), [3])
|
|
||||||
self._test_func(
|
self._test_func(
|
||||||
F.bitmapToArray(F.bitmapOr(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5]))), [1, 2, 3, 4, 5]
|
F.bitmapToArray(F.bitmapAnd(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5]))),
|
||||||
|
[3],
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.bitmapToArray(F.bitmapOr(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5]))),
|
||||||
|
[1, 2, 3, 4, 5],
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.bitmapToArray(F.bitmapXor(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5]))),
|
||||||
|
[1, 2, 4, 5],
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.bitmapToArray(F.bitmapAndnot(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5]))),
|
||||||
|
[1, 2],
|
||||||
)
|
)
|
||||||
self._test_func(F.bitmapToArray(F.bitmapXor(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5]))), [1, 2, 4, 5])
|
|
||||||
self._test_func(F.bitmapToArray(F.bitmapAndnot(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5]))), [1, 2])
|
|
||||||
self._test_func(F.bitmapCardinality(F.bitmapBuild([1, 2, 3, 4, 5])), 5)
|
self._test_func(F.bitmapCardinality(F.bitmapBuild([1, 2, 3, 4, 5])), 5)
|
||||||
self._test_func(F.bitmapAndCardinality(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])), 1)
|
self._test_func(
|
||||||
|
F.bitmapAndCardinality(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])),
|
||||||
|
1,
|
||||||
|
)
|
||||||
self._test_func(F.bitmapOrCardinality(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])), 5)
|
self._test_func(F.bitmapOrCardinality(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])), 5)
|
||||||
self._test_func(F.bitmapXorCardinality(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])), 4)
|
self._test_func(
|
||||||
self._test_func(F.bitmapAndnotCardinality(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])), 2)
|
F.bitmapXorCardinality(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])),
|
||||||
|
4,
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.bitmapAndnotCardinality(F.bitmapBuild([1, 2, 3]), F.bitmapBuild([3, 4, 5])),
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
|
||||||
def test_hash_functions(self):
|
def test_hash_functions(self):
|
||||||
args = ["x", "y", "z"]
|
args = ["x", "y", "z"]
|
||||||
|
@ -662,16 +711,26 @@ class FuncsTestCase(TestCaseWithData):
|
||||||
self._test_func(F.IPv4NumToString(F.toUInt32(1)), "0.0.0.1")
|
self._test_func(F.IPv4NumToString(F.toUInt32(1)), "0.0.0.1")
|
||||||
self._test_func(F.IPv4NumToStringClassC(F.toUInt32(1)), "0.0.0.xxx")
|
self._test_func(F.IPv4NumToStringClassC(F.toUInt32(1)), "0.0.0.xxx")
|
||||||
self._test_func(F.IPv4StringToNum("0.0.0.17"), 17)
|
self._test_func(F.IPv4StringToNum("0.0.0.17"), 17)
|
||||||
self._test_func(F.IPv6NumToString(F.IPv4ToIPv6(F.IPv4StringToNum("192.168.0.1"))), "::ffff:192.168.0.1")
|
self._test_func(
|
||||||
|
F.IPv6NumToString(F.IPv4ToIPv6(F.IPv4StringToNum("192.168.0.1"))),
|
||||||
|
"::ffff:192.168.0.1",
|
||||||
|
)
|
||||||
self._test_func(F.IPv6NumToString(F.IPv6StringToNum("2a02:6b8::11")), "2a02:6b8::11")
|
self._test_func(F.IPv6NumToString(F.IPv6StringToNum("2a02:6b8::11")), "2a02:6b8::11")
|
||||||
self._test_func(F.toIPv4("10.20.30.40"), IPv4Address("10.20.30.40"))
|
self._test_func(F.toIPv4("10.20.30.40"), IPv4Address("10.20.30.40"))
|
||||||
self._test_func(F.toIPv6("2001:438:ffff::407d:1bc1"), IPv6Address("2001:438:ffff::407d:1bc1"))
|
|
||||||
self._test_func(
|
self._test_func(
|
||||||
F.IPv4CIDRToRange(F.toIPv4("192.168.5.2"), 16), [IPv4Address("192.168.0.0"), IPv4Address("192.168.255.255")]
|
F.toIPv6("2001:438:ffff::407d:1bc1"),
|
||||||
|
IPv6Address("2001:438:ffff::407d:1bc1"),
|
||||||
|
)
|
||||||
|
self._test_func(
|
||||||
|
F.IPv4CIDRToRange(F.toIPv4("192.168.5.2"), 16),
|
||||||
|
[IPv4Address("192.168.0.0"), IPv4Address("192.168.255.255")],
|
||||||
)
|
)
|
||||||
self._test_func(
|
self._test_func(
|
||||||
F.IPv6CIDRToRange(F.toIPv6("2001:0db8:0000:85a3:0000:0000:ac1f:8001"), 32),
|
F.IPv6CIDRToRange(F.toIPv6("2001:0db8:0000:85a3:0000:0000:ac1f:8001"), 32),
|
||||||
[IPv6Address("2001:db8::"), IPv6Address("2001:db8:ffff:ffff:ffff:ffff:ffff:ffff")],
|
[
|
||||||
|
IPv6Address("2001:db8::"),
|
||||||
|
IPv6Address("2001:db8:ffff:ffff:ffff:ffff:ffff:ffff"),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_aggregate_funcs(self):
|
def test_aggregate_funcs(self):
|
||||||
|
@ -680,7 +739,10 @@ class FuncsTestCase(TestCaseWithData):
|
||||||
self._test_aggr(F.anyLast(Person.first_name))
|
self._test_aggr(F.anyLast(Person.first_name))
|
||||||
self._test_aggr(F.argMin(Person.first_name, Person.height))
|
self._test_aggr(F.argMin(Person.first_name, Person.height))
|
||||||
self._test_aggr(F.argMax(Person.first_name, Person.height))
|
self._test_aggr(F.argMax(Person.first_name, Person.height))
|
||||||
self._test_aggr(F.round(F.avg(Person.height), 4), sum(p.height for p in self._sample_data()) / 100)
|
self._test_aggr(
|
||||||
|
F.round(F.avg(Person.height), 4),
|
||||||
|
sum(p.height for p in self._sample_data()) / 100,
|
||||||
|
)
|
||||||
self._test_aggr(F.corr(Person.height, Person.height), 1)
|
self._test_aggr(F.corr(Person.height, Person.height), 1)
|
||||||
self._test_aggr(F.count(), 100)
|
self._test_aggr(F.count(), 100)
|
||||||
self._test_aggr(F.round(F.covarPop(Person.height, Person.height), 2), 0)
|
self._test_aggr(F.round(F.covarPop(Person.height, Person.height), 2), 0)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from clickhouse_orm import *
|
from clickhouse_orm import Database, F, Index, MergeTree, Model
|
||||||
|
from clickhouse_orm.fields import DateField, Int32Field, StringField
|
||||||
|
|
||||||
|
|
||||||
class IndexesTest(unittest.TestCase):
|
class IndexesTest(unittest.TestCase):
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from clickhouse_orm.database import Database
|
from clickhouse_orm.database import Database
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import MergeTree
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import DateField, Float32Field, Int32Field, StringField
|
||||||
from clickhouse_orm.models import Model
|
from clickhouse_orm.models import Model
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,23 @@ import unittest
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from clickhouse_orm.database import Database, ServerError
|
from clickhouse_orm.database import Database, ServerError
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import Buffer, MergeTree
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import (
|
||||||
|
ArrayField,
|
||||||
|
DateField,
|
||||||
|
DateTimeField,
|
||||||
|
Enum8Field,
|
||||||
|
Enum16Field,
|
||||||
|
Float32Field,
|
||||||
|
Float64Field,
|
||||||
|
Int8Field,
|
||||||
|
Int32Field,
|
||||||
|
Int64Field,
|
||||||
|
LowCardinalityField,
|
||||||
|
NullableField,
|
||||||
|
StringField,
|
||||||
|
UInt64Field,
|
||||||
|
)
|
||||||
from clickhouse_orm.migrations import MigrationHistory
|
from clickhouse_orm.migrations import MigrationHistory
|
||||||
from clickhouse_orm.models import BufferModel, Constraint, Index, Model
|
from clickhouse_orm.models import BufferModel, Constraint, Index, Model
|
||||||
|
|
||||||
|
@ -45,7 +60,10 @@ class MigrationsTestCase(unittest.TestCase):
|
||||||
self.database.migrate("tests.sample_migrations", 3)
|
self.database.migrate("tests.sample_migrations", 3)
|
||||||
self.assertTrue(self.table_exists(Model1))
|
self.assertTrue(self.table_exists(Model1))
|
||||||
# Adding, removing and altering simple fields
|
# Adding, removing and altering simple fields
|
||||||
self.assertEqual(self.get_table_fields(Model1), [("date", "Date"), ("f1", "Int32"), ("f2", "String")])
|
self.assertEqual(
|
||||||
|
self.get_table_fields(Model1),
|
||||||
|
[("date", "Date"), ("f1", "Int32"), ("f2", "String")],
|
||||||
|
)
|
||||||
self.database.migrate("tests.sample_migrations", 4)
|
self.database.migrate("tests.sample_migrations", 4)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.get_table_fields(Model2),
|
self.get_table_fields(Model2),
|
||||||
|
@ -60,36 +78,59 @@ class MigrationsTestCase(unittest.TestCase):
|
||||||
)
|
)
|
||||||
self.database.migrate("tests.sample_migrations", 5)
|
self.database.migrate("tests.sample_migrations", 5)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.get_table_fields(Model3), [("date", "Date"), ("f1", "Int64"), ("f3", "Float64"), ("f4", "String")]
|
self.get_table_fields(Model3),
|
||||||
|
[("date", "Date"), ("f1", "Int64"), ("f3", "Float64"), ("f4", "String")],
|
||||||
)
|
)
|
||||||
# Altering enum fields
|
# Altering enum fields
|
||||||
self.database.migrate("tests.sample_migrations", 6)
|
self.database.migrate("tests.sample_migrations", 6)
|
||||||
self.assertTrue(self.table_exists(EnumModel1))
|
self.assertTrue(self.table_exists(EnumModel1))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.get_table_fields(EnumModel1), [("date", "Date"), ("f1", "Enum8('dog' = 1, 'cat' = 2, 'cow' = 3)")]
|
self.get_table_fields(EnumModel1),
|
||||||
|
[("date", "Date"), ("f1", "Enum8('dog' = 1, 'cat' = 2, 'cow' = 3)")],
|
||||||
)
|
)
|
||||||
self.database.migrate("tests.sample_migrations", 7)
|
self.database.migrate("tests.sample_migrations", 7)
|
||||||
self.assertTrue(self.table_exists(EnumModel1))
|
self.assertTrue(self.table_exists(EnumModel1))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.get_table_fields(EnumModel2),
|
self.get_table_fields(EnumModel2),
|
||||||
[("date", "Date"), ("f1", "Enum16('dog' = 1, 'cat' = 2, 'horse' = 3, 'pig' = 4)")],
|
[
|
||||||
|
("date", "Date"),
|
||||||
|
("f1", "Enum16('dog' = 1, 'cat' = 2, 'horse' = 3, 'pig' = 4)"),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
# Materialized fields and alias fields
|
# Materialized fields and alias fields
|
||||||
self.database.migrate("tests.sample_migrations", 8)
|
self.database.migrate("tests.sample_migrations", 8)
|
||||||
self.assertTrue(self.table_exists(MaterializedModel))
|
self.assertTrue(self.table_exists(MaterializedModel))
|
||||||
self.assertEqual(self.get_table_fields(MaterializedModel), [("date_time", "DateTime"), ("date", "Date")])
|
self.assertEqual(
|
||||||
|
self.get_table_fields(MaterializedModel),
|
||||||
|
[("date_time", "DateTime"), ("date", "Date")],
|
||||||
|
)
|
||||||
self.database.migrate("tests.sample_migrations", 9)
|
self.database.migrate("tests.sample_migrations", 9)
|
||||||
self.assertTrue(self.table_exists(AliasModel))
|
self.assertTrue(self.table_exists(AliasModel))
|
||||||
self.assertEqual(self.get_table_fields(AliasModel), [("date", "Date"), ("date_alias", "Date")])
|
self.assertEqual(
|
||||||
|
self.get_table_fields(AliasModel),
|
||||||
|
[("date", "Date"), ("date_alias", "Date")],
|
||||||
|
)
|
||||||
# Buffer models creation and alteration
|
# Buffer models creation and alteration
|
||||||
self.database.migrate("tests.sample_migrations", 10)
|
self.database.migrate("tests.sample_migrations", 10)
|
||||||
self.assertTrue(self.table_exists(Model4))
|
self.assertTrue(self.table_exists(Model4))
|
||||||
self.assertTrue(self.table_exists(Model4Buffer))
|
self.assertTrue(self.table_exists(Model4Buffer))
|
||||||
self.assertEqual(self.get_table_fields(Model4), [("date", "Date"), ("f1", "Int32"), ("f2", "String")])
|
self.assertEqual(
|
||||||
self.assertEqual(self.get_table_fields(Model4Buffer), [("date", "Date"), ("f1", "Int32"), ("f2", "String")])
|
self.get_table_fields(Model4),
|
||||||
|
[("date", "Date"), ("f1", "Int32"), ("f2", "String")],
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
self.get_table_fields(Model4Buffer),
|
||||||
|
[("date", "Date"), ("f1", "Int32"), ("f2", "String")],
|
||||||
|
)
|
||||||
self.database.migrate("tests.sample_migrations", 11)
|
self.database.migrate("tests.sample_migrations", 11)
|
||||||
self.assertEqual(self.get_table_fields(Model4), [("date", "Date"), ("f3", "DateTime"), ("f2", "String")])
|
self.assertEqual(
|
||||||
self.assertEqual(self.get_table_fields(Model4Buffer), [("date", "Date"), ("f3", "DateTime"), ("f2", "String")])
|
self.get_table_fields(Model4),
|
||||||
|
[("date", "Date"), ("f3", "DateTime"), ("f2", "String")],
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
self.get_table_fields(Model4Buffer),
|
||||||
|
[("date", "Date"), ("f3", "DateTime"), ("f2", "String")],
|
||||||
|
)
|
||||||
|
|
||||||
self.database.migrate("tests.sample_migrations", 12)
|
self.database.migrate("tests.sample_migrations", 12)
|
||||||
self.assertEqual(self.database.count(Model3), 3)
|
self.assertEqual(self.database.count(Model3), 3)
|
||||||
|
@ -105,12 +146,22 @@ class MigrationsTestCase(unittest.TestCase):
|
||||||
self.assertTrue(self.table_exists(MaterializedModel1))
|
self.assertTrue(self.table_exists(MaterializedModel1))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.get_table_fields(MaterializedModel1),
|
self.get_table_fields(MaterializedModel1),
|
||||||
[("date_time", "DateTime"), ("int_field", "Int8"), ("date", "Date"), ("int_field_plus_one", "Int8")],
|
[
|
||||||
|
("date_time", "DateTime"),
|
||||||
|
("int_field", "Int8"),
|
||||||
|
("date", "Date"),
|
||||||
|
("int_field_plus_one", "Int8"),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
self.assertTrue(self.table_exists(AliasModel1))
|
self.assertTrue(self.table_exists(AliasModel1))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.get_table_fields(AliasModel1),
|
self.get_table_fields(AliasModel1),
|
||||||
[("date", "Date"), ("int_field", "Int8"), ("date_alias", "Date"), ("int_field_plus_one", "Int8")],
|
[
|
||||||
|
("date", "Date"),
|
||||||
|
("int_field", "Int8"),
|
||||||
|
("date_alias", "Date"),
|
||||||
|
("int_field_plus_one", "Int8"),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
# Codecs and low cardinality
|
# Codecs and low cardinality
|
||||||
self.database.migrate("tests.sample_migrations", 15)
|
self.database.migrate("tests.sample_migrations", 15)
|
||||||
|
|
|
@ -3,8 +3,8 @@ import unittest
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
|
||||||
from clickhouse_orm.engines import *
|
from clickhouse_orm.engines import MergeTree
|
||||||
from clickhouse_orm.fields import *
|
from clickhouse_orm.fields import DateField, DateTimeField, Float32Field, Int32Field, StringField
|
||||||
from clickhouse_orm.funcs import F
|
from clickhouse_orm.funcs import F
|
||||||
from clickhouse_orm.models import NO_VALUE, Model
|
from clickhouse_orm.models import NO_VALUE, Model
|
||||||
|
|
||||||
|
@ -83,8 +83,14 @@ class ModelTestCase(unittest.TestCase):
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
instance.to_dict(include_readonly=False, field_names=("int_field", "alias_field", "datetime_field")),
|
instance.to_dict(
|
||||||
{"int_field": 100, "datetime_field": datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=pytz.utc)},
|
include_readonly=False,
|
||||||
|
field_names=("int_field", "alias_field", "datetime_field"),
|
||||||
|
),
|
||||||
|
{
|
||||||
|
"int_field": 100,
|
||||||
|
"datetime_field": datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=pytz.utc),
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_field_name_in_error_message_for_invalid_value_in_constructor(self):
|
def test_field_name_in_error_message_for_invalid_value_in_constructor(self):
|
||||||
|
@ -93,7 +99,8 @@ class ModelTestCase(unittest.TestCase):
|
||||||
SimpleModel(str_field=bad_value)
|
SimpleModel(str_field=bad_value)
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"Invalid value for StringField: {} (field 'str_field')".format(repr(bad_value)), str(cm.exception)
|
"Invalid value for StringField: {} (field 'str_field')".format(repr(bad_value)),
|
||||||
|
str(cm.exception),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_field_name_in_error_message_for_invalid_value_in_assignment(self):
|
def test_field_name_in_error_message_for_invalid_value_in_assignment(self):
|
||||||
|
@ -103,7 +110,8 @@ class ModelTestCase(unittest.TestCase):
|
||||||
instance.float_field = bad_value
|
instance.float_field = bad_value
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"Invalid value for Float32Field - {} (field 'float_field')".format(repr(bad_value)), str(cm.exception)
|
"Invalid value for Float32Field - {} (field 'float_field')".format(repr(bad_value)),
|
||||||
|
str(cm.exception),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ from time import sleep
|
||||||
|
|
||||||
from clickhouse_orm import F
|
from clickhouse_orm import F
|
||||||
|
|
||||||
from .base_test_with_data import *
|
from .base_test_with_data import Person, TestCaseWithData
|
||||||
|
|
||||||
|
|
||||||
class MutationsTestCase(TestCaseWithData):
|
class MutationsTestCase(TestCaseWithData):
|
||||||
|
|
|
@ -79,7 +79,10 @@ class QuerySetTestCase(TestCaseWithData):
|
||||||
qs = Person.objects_in(self.database)
|
qs = Person.objects_in(self.database)
|
||||||
self._test_qs(qs.filter(Q(first_name="Ciaran")), 2)
|
self._test_qs(qs.filter(Q(first_name="Ciaran")), 2)
|
||||||
self._test_qs(qs.filter(Q(first_name="Ciaran") | Q(first_name="Chelsea")), 3)
|
self._test_qs(qs.filter(Q(first_name="Ciaran") | Q(first_name="Chelsea")), 3)
|
||||||
self._test_qs(qs.filter(Q(first_name__in=["Warren", "Whilemina", "Whitney"]) & Q(height__gte=1.7)), 3)
|
self._test_qs(
|
||||||
|
qs.filter(Q(first_name__in=["Warren", "Whilemina", "Whitney"]) & Q(height__gte=1.7)),
|
||||||
|
3,
|
||||||
|
)
|
||||||
self._test_qs(
|
self._test_qs(
|
||||||
qs.filter(
|
qs.filter(
|
||||||
(
|
(
|
||||||
|
@ -103,7 +106,10 @@ class QuerySetTestCase(TestCaseWithData):
|
||||||
),
|
),
|
||||||
2,
|
2,
|
||||||
)
|
)
|
||||||
self._test_qs(qs.filter(Q(first_name="Courtney") | Q(first_name="Cassady") & Q(last_name="Knapp")), 3)
|
self._test_qs(
|
||||||
|
qs.filter(Q(first_name="Courtney") | Q(first_name="Cassady") & Q(last_name="Knapp")),
|
||||||
|
3,
|
||||||
|
)
|
||||||
|
|
||||||
def test_filter_unicode_string(self):
|
def test_filter_unicode_string(self):
|
||||||
self.database.insert([Person(first_name=u"דונלד", last_name=u"דאק")])
|
self.database.insert([Person(first_name=u"דונלד", last_name=u"דאק")])
|
||||||
|
@ -269,7 +275,10 @@ class QuerySetTestCase(TestCaseWithData):
|
||||||
page_a = qs.paginate(-1, page_size)
|
page_a = qs.paginate(-1, page_size)
|
||||||
page_b = qs.paginate(page_a.pages_total, page_size)
|
page_b = qs.paginate(page_a.pages_total, page_size)
|
||||||
self.assertEqual(page_a[1:], page_b[1:])
|
self.assertEqual(page_a[1:], page_b[1:])
|
||||||
self.assertEqual([obj.to_tsv() for obj in page_a.objects], [obj.to_tsv() for obj in page_b.objects])
|
self.assertEqual(
|
||||||
|
[obj.to_tsv() for obj in page_a.objects],
|
||||||
|
[obj.to_tsv() for obj in page_b.objects],
|
||||||
|
)
|
||||||
|
|
||||||
def test_pagination_invalid_page(self):
|
def test_pagination_invalid_page(self):
|
||||||
qs = Person.objects_in(self.database).order_by("first_name", "last_name")
|
qs = Person.objects_in(self.database).order_by("first_name", "last_name")
|
||||||
|
@ -320,7 +329,8 @@ class QuerySetTestCase(TestCaseWithData):
|
||||||
qs = Person.objects_in(self.database)
|
qs = Person.objects_in(self.database)
|
||||||
qs = qs.filter(Q(first_name="a"), F("greater", Person.height, 1.7), last_name="b")
|
qs = qs.filter(Q(first_name="a"), F("greater", Person.height, 1.7), last_name="b")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
qs.conditions_as_sql(), "(first_name = 'a') AND (greater(`height`, 1.7)) AND (last_name = 'b')"
|
qs.conditions_as_sql(),
|
||||||
|
"(first_name = 'a') AND (greater(`height`, 1.7)) AND (last_name = 'b')",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_invalid_filter(self):
|
def test_invalid_filter(self):
|
||||||
|
|
|
@ -1,8 +1,12 @@
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
import unittest
|
||||||
|
|
||||||
from clickhouse_orm.database import DatabaseException, ServerError
|
from clickhouse_orm.database import Database, DatabaseException, ServerError
|
||||||
|
from clickhouse_orm.engines import MergeTree
|
||||||
|
from clickhouse_orm.fields import DateField, StringField
|
||||||
|
from clickhouse_orm.models import Model
|
||||||
|
|
||||||
from .base_test_with_data import *
|
from .base_test_with_data import Person, TestCaseWithData, data
|
||||||
|
|
||||||
|
|
||||||
class ReadonlyTestCase(TestCaseWithData):
|
class ReadonlyTestCase(TestCaseWithData):
|
||||||
|
@ -65,7 +69,10 @@ class ReadonlyTestCase(TestCaseWithData):
|
||||||
def test_nonexisting_readonly_database(self):
|
def test_nonexisting_readonly_database(self):
|
||||||
with self.assertRaises(DatabaseException) as cm:
|
with self.assertRaises(DatabaseException) as cm:
|
||||||
Database("dummy", readonly=True)
|
Database("dummy", readonly=True)
|
||||||
self.assertEqual(str(cm.exception), "Database does not exist, and cannot be created under readonly connection")
|
self.assertEqual(
|
||||||
|
str(cm.exception),
|
||||||
|
"Database does not exist, and cannot be created under readonly connection",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ReadOnlyModel(Model):
|
class ReadOnlyModel(Model):
|
||||||
|
|
|
@ -16,7 +16,10 @@ class ServerErrorTest(unittest.TestCase):
|
||||||
"Code: 161, e.displayText() = DB::Exception: Limit for number of columns to read exceeded. Requested: 11, maximum: 1, e.what() = DB::Exception\n"
|
"Code: 161, e.displayText() = DB::Exception: Limit for number of columns to read exceeded. Requested: 11, maximum: 1, e.what() = DB::Exception\n"
|
||||||
)
|
)
|
||||||
self.assertEqual(code, 161)
|
self.assertEqual(code, 161)
|
||||||
self.assertEqual(msg, "Limit for number of columns to read exceeded. Requested: 11, maximum: 1")
|
self.assertEqual(
|
||||||
|
msg,
|
||||||
|
"Limit for number of columns to read exceeded. Requested: 11, maximum: 1",
|
||||||
|
)
|
||||||
|
|
||||||
def test_new_format(self):
|
def test_new_format(self):
|
||||||
|
|
||||||
|
|
|
@ -120,7 +120,10 @@ class CustomPartitionedTable(Model):
|
||||||
date_field = DateField()
|
date_field = DateField()
|
||||||
group_field = UInt32Field()
|
group_field = UInt32Field()
|
||||||
|
|
||||||
engine = MergeTree(order_by=("date_field", "group_field"), partition_key=("toYYYYMM(date_field)", "group_field"))
|
engine = MergeTree(
|
||||||
|
order_by=("date_field", "group_field"),
|
||||||
|
partition_key=("toYYYYMM(date_field)", "group_field"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class SystemTestModel(Model):
|
class SystemTestModel(Model):
|
||||||
|
|
Loading…
Reference in New Issue
Block a user