mirror of
https://github.com/Infinidat/infi.clickhouse_orm.git
synced 2024-11-22 00:56:34 +03:00
Finished Release v1.1.0
This commit is contained in:
commit
40725372fe
14
CHANGELOG.md
14
CHANGELOG.md
|
@ -1,6 +1,20 @@
|
|||
Change Log
|
||||
==========
|
||||
|
||||
v1.1.0
|
||||
------
|
||||
- Add PREWHERE support to querysets (M1hacka)
|
||||
- Add WITH TOTALS support to querysets (M1hacka)
|
||||
- Extend date field range (trthhrtz)
|
||||
- Fix parsing of server errors in ClickHouse v19.3.3+
|
||||
- Fix pagination when asking for the last page on a query that matches no records
|
||||
- Use HTTP Basic Authentication instead of passing the credentials in the URL
|
||||
- Support default/alias/materialized for nullable fields
|
||||
- Add UUIDField (kpotehin)
|
||||
- Add `log_statements` parameter to database initializer
|
||||
- Fix test_merge which fails on ClickHouse v19.8.3
|
||||
- Fix querysets using the SystemPart model
|
||||
|
||||
v1.0.4
|
||||
------
|
||||
- Added `timeout` parameter to database initializer (SUHAR1K)
|
||||
|
|
|
@ -49,7 +49,7 @@ eggs = ${project:name}
|
|||
ipython<6
|
||||
nose
|
||||
coverage
|
||||
enum34
|
||||
enum-compat
|
||||
infi.unittest
|
||||
infi.traceback
|
||||
memory_profiler
|
||||
|
|
|
@ -701,6 +701,13 @@ Extends BaseIntField
|
|||
#### UInt8Field(default=None, alias=None, materialized=None, readonly=None)
|
||||
|
||||
|
||||
### UUIDField
|
||||
|
||||
Extends Field
|
||||
|
||||
#### UUIDField(default=None, alias=None, materialized=None, readonly=None)
|
||||
|
||||
|
||||
infi.clickhouse_orm.engines
|
||||
---------------------------
|
||||
|
||||
|
@ -835,10 +842,10 @@ is equivalent to:
|
|||
Returns the whole query as a SQL string.
|
||||
|
||||
|
||||
#### conditions_as_sql()
|
||||
#### conditions_as_sql(prewhere=False)
|
||||
|
||||
|
||||
Returns the contents of the query's `WHERE` clause as a string.
|
||||
Returns the contents of the query's `WHERE` or `PREWHERE` clause as a string.
|
||||
|
||||
|
||||
#### count()
|
||||
|
@ -854,17 +861,18 @@ Adds a DISTINCT clause to the query, meaning that any duplicate rows
|
|||
in the results will be omitted.
|
||||
|
||||
|
||||
#### exclude(**filter_fields)
|
||||
#### exclude(*q, **kwargs)
|
||||
|
||||
|
||||
Returns a copy of this queryset that excludes all rows matching the conditions.
|
||||
Pass `prewhere=True` to apply the conditions as PREWHERE instead of WHERE.
|
||||
|
||||
|
||||
#### filter(*q, **filter_fields)
|
||||
#### filter(*q, **kwargs)
|
||||
|
||||
|
||||
Returns a copy of this queryset that includes only rows matching the conditions.
|
||||
Add q object to query if it specified.
|
||||
Pass `prewhere=True` to apply the conditions as PREWHERE instead of WHERE.
|
||||
|
||||
|
||||
#### final()
|
||||
|
@ -908,6 +916,12 @@ The result is a namedtuple containing `objects` (list), `number_of_objects`,
|
|||
`pages_total`, `number` (of the current page), and `page_size`.
|
||||
|
||||
|
||||
#### select_fields_as_sql()
|
||||
|
||||
|
||||
Returns the selected fields or expressions as a SQL string.
|
||||
|
||||
|
||||
### AggregateQuerySet
|
||||
|
||||
Extends QuerySet
|
||||
|
@ -943,10 +957,10 @@ This method is not supported on `AggregateQuerySet`.
|
|||
Returns the whole query as a SQL string.
|
||||
|
||||
|
||||
#### conditions_as_sql()
|
||||
#### conditions_as_sql(prewhere=False)
|
||||
|
||||
|
||||
Returns the contents of the query's `WHERE` clause as a string.
|
||||
Returns the contents of the query's `WHERE` or `PREWHERE` clause as a string.
|
||||
|
||||
|
||||
#### count()
|
||||
|
@ -962,17 +976,18 @@ Adds a DISTINCT clause to the query, meaning that any duplicate rows
|
|||
in the results will be omitted.
|
||||
|
||||
|
||||
#### exclude(**filter_fields)
|
||||
#### exclude(*q, **kwargs)
|
||||
|
||||
|
||||
Returns a copy of this queryset that excludes all rows matching the conditions.
|
||||
Pass `prewhere=True` to apply the conditions as PREWHERE instead of WHERE.
|
||||
|
||||
|
||||
#### filter(*q, **filter_fields)
|
||||
#### filter(*q, **kwargs)
|
||||
|
||||
|
||||
Returns a copy of this queryset that includes only rows matching the conditions.
|
||||
Add q object to query if it specified.
|
||||
Pass `prewhere=True` to apply the conditions as PREWHERE instead of WHERE.
|
||||
|
||||
|
||||
#### final()
|
||||
|
@ -1022,3 +1037,17 @@ The result is a namedtuple containing `objects` (list), `number_of_objects`,
|
|||
`pages_total`, `number` (of the current page), and `page_size`.
|
||||
|
||||
|
||||
#### select_fields_as_sql()
|
||||
|
||||
|
||||
Returns the selected fields or expressions as a SQL string.
|
||||
|
||||
|
||||
#### with_totals()
|
||||
|
||||
|
||||
Adds WITH TOTALS modifier ot GROUP BY, making query return extra row
|
||||
with aggregate function calculated across all the rows. More information:
|
||||
https://clickhouse.yandex/docs/en/query_language/select/#with-totals-modifier
|
||||
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ Currently the following field types are supported:
|
|||
| ------------------ | ---------- | ------------------- | -----------------------------------------------------
|
||||
| StringField | String | unicode | Encoded as UTF-8 when written to ClickHouse
|
||||
| FixedStringField | String | unicode | Encoded as UTF-8 when written to ClickHouse
|
||||
| DateField | Date | datetime.date | Range 1970-01-01 to 2038-01-19
|
||||
| DateField | Date | datetime.date | Range 1970-01-01 to 2105-12-31
|
||||
| DateTimeField | DateTime | datetime.datetime | Minimal value is 1970-01-01 00:00:00; Always in UTC
|
||||
| Int8Field | Int8 | int | Range -128 to 127
|
||||
| Int16Field | Int16 | int | Range -32768 to 32767
|
||||
|
@ -25,6 +25,7 @@ Currently the following field types are supported:
|
|||
| Decimal32Field | Decimal32 | Decimal | Ditto
|
||||
| Decimal64Field | Decimal64 | Decimal | Ditto
|
||||
| Decimal128Field | Decimal128 | Decimal | Ditto
|
||||
| UUIDField | UUID | Decimal |
|
||||
| Enum8Field | Enum8 | Enum | See below
|
||||
| Enum16Field | Enum16 | Enum | See below
|
||||
| ArrayField | Array | list | See below
|
||||
|
@ -183,46 +184,6 @@ class BooleanField(Field):
|
|||
return '1' if value else '0'
|
||||
```
|
||||
|
||||
Here's another example - a field for storing UUIDs in the database as 16-byte strings. We'll use Python's built-in `UUID` class to handle the conversion from strings, ints and tuples into UUID instances. So in our Python code we'll have the convenience of working with UUID objects, but they will be stored in the database as efficiently as possible:
|
||||
|
||||
```python
|
||||
from infi.clickhouse_orm.fields import Field
|
||||
from infi.clickhouse_orm.utils import escape
|
||||
from uuid import UUID
|
||||
import six
|
||||
|
||||
class UUIDField(Field):
|
||||
|
||||
# The ClickHouse column type to use
|
||||
db_type = 'FixedString(16)'
|
||||
|
||||
# The default value if empty
|
||||
class_default = UUID(int=0)
|
||||
|
||||
def to_python(self, value, timezone_in_use):
|
||||
# Convert valid values to UUID instance
|
||||
if isinstance(value, UUID):
|
||||
return value
|
||||
elif isinstance(value, six.string_types):
|
||||
return UUID(bytes=value.encode('latin1')) if len(value) == 16 else UUID(value)
|
||||
elif isinstance(value, six.integer_types):
|
||||
return UUID(int=value)
|
||||
elif isinstance(value, tuple):
|
||||
return UUID(fields=value)
|
||||
else:
|
||||
raise ValueError('Invalid value for UUIDField: %r' % value)
|
||||
|
||||
def to_db_string(self, value, quote=True):
|
||||
# The value was already converted by to_python, so it's a UUID instance
|
||||
val = value.bytes
|
||||
if six.PY3:
|
||||
val = str(val, 'latin1')
|
||||
return escape(val, quote)
|
||||
|
||||
```
|
||||
|
||||
Note that the latin-1 encoding is used as an identity encoding for converting between raw bytes and strings. This is required in Python 3, where `str` and `bytes` are different types.
|
||||
|
||||
---
|
||||
|
||||
[<< Querysets](querysets.md) | [Table of Contents](toc.md) | [Table Engines >>](table_engines.md)
|
|
@ -89,7 +89,7 @@ When values are assigned to model fields, they are immediately converted to thei
|
|||
>>> suzy.birthday = 0.5
|
||||
ValueError: Invalid value for DateField - 0.5
|
||||
>>> suzy.birthday = '1922-05-31'
|
||||
ValueError: DateField out of range - 1922-05-31 is not between 1970-01-01 and 2038-01-19
|
||||
ValueError: DateField out of range - 1922-05-31 is not between 1970-01-01 and 2105-12-31
|
||||
|
||||
Inserting to the Database
|
||||
-------------------------
|
||||
|
|
|
@ -32,6 +32,14 @@ For filters with compound conditions you can use `Q` objects inside `filter` wit
|
|||
>>> qs.conditions_as_sql()
|
||||
u"((first_name = 'Ciaran' AND last_name = 'Carver') OR height <= 1.8) AND (NOT (first_name = 'David'))"
|
||||
|
||||
By default conditions from `filter` and `exclude` methods are add to `WHERE` clause.
|
||||
For better aggregation performance you can add them to `PREWHERE` section using `prewhere=True` parameter
|
||||
|
||||
>>> qs = Person.objects_in(database)
|
||||
>>> qs = qs.filter(first_name__startswith='V', prewhere=True)
|
||||
>>> qs.conditions_as_sql(prewhere=True)
|
||||
u"first_name LIKE 'V%'"
|
||||
|
||||
There are different operators that can be used, by passing `<fieldname>__<operator>=<value>` (two underscores separate the field name from the operator). In case no operator is given, `eq` is used by default. Below are all the supported operators.
|
||||
|
||||
| Operator | Equivalent SQL | Comments |
|
||||
|
@ -120,14 +128,14 @@ Adds a DISTINCT clause to the query, meaning that any duplicate rows in the resu
|
|||
Final
|
||||
--------
|
||||
|
||||
This method can be used only with CollapsingMergeTree engine.
|
||||
This method can be used only with CollapsingMergeTree engine.
|
||||
Adds a FINAL modifier to the query, meaning data is selected fully "collapsed" by sign field.
|
||||
|
||||
>>> Person.objects_in(database).count()
|
||||
100
|
||||
>>> Person.objects_in(database).final().count()
|
||||
94
|
||||
|
||||
|
||||
Slicing
|
||||
-------
|
||||
|
||||
|
@ -202,6 +210,19 @@ This queryset is translated to:
|
|||
|
||||
After calling `aggregate` you can still use most of the regular queryset methods, such as `count`, `order_by` and `paginate`. It is not possible, however, to call `only` or `aggregate`. It is also not possible to filter the queryset on calculated fields, only on fields that exist in the model.
|
||||
|
||||
If you limit aggregation results, it might be useful to get total aggregation values for all rows.
|
||||
To achieve this, you can use `with_totals` method. It will return extra row (last) with
|
||||
values aggregated for all rows suitable for filters.
|
||||
|
||||
qs = Person.objects_in(database).aggregate('first_name', num='count()').with_totals().order_by('-count')[:3]
|
||||
>>> print qs.count()
|
||||
4
|
||||
>>> for row in qs:
|
||||
>>> print("'{}': {}".format(row.first_name, row.count))
|
||||
'Cassandra': 2
|
||||
'Alexandra': 2
|
||||
'': 100
|
||||
|
||||
---
|
||||
|
||||
[<< Models and Databases](models_and_databases.md) | [Table of Contents](toc.md) | [Field Types >>](field_types.md)
|
|
@ -482,9 +482,9 @@ infi.clickhouse_orm.query
|
|||
#### QuerySet(model_cls, database)
|
||||
|
||||
|
||||
#### conditions_as_sql()
|
||||
#### conditions_as_sql(prewhere=True)
|
||||
|
||||
Return the contents of the queryset's WHERE clause.
|
||||
Return the contents of the queryset's WHERE or `PREWHERE` clause.
|
||||
|
||||
|
||||
#### count()
|
||||
|
|
|
@ -92,6 +92,7 @@
|
|||
* [UInt32Field](class_reference.md#uint32field)
|
||||
* [UInt64Field](class_reference.md#uint64field)
|
||||
* [UInt8Field](class_reference.md#uint8field)
|
||||
* [UUIDField](class_reference.md#uuidfield)
|
||||
* [infi.clickhouse_orm.engines](class_reference.md#infi.clickhouse_orm.engines)
|
||||
* [Engine](class_reference.md#engine)
|
||||
* [TinyLog](class_reference.md#tinylog)
|
||||
|
|
|
@ -40,11 +40,19 @@ class ServerError(DatabaseException):
|
|||
self.message = message
|
||||
super(ServerError, self).__init__(message)
|
||||
|
||||
ERROR_PATTERN = re.compile(r'''
|
||||
Code:\ (?P<code>\d+),
|
||||
\ e\.displayText\(\)\ =\ (?P<type1>[^ \n]+):\ (?P<msg>.+?),
|
||||
\ e.what\(\)\ =\ (?P<type2>[^ \n]+)
|
||||
''', re.VERBOSE | re.DOTALL)
|
||||
ERROR_PATTERNS = (
|
||||
# ClickHouse prior to v19.3.3
|
||||
re.compile(r'''
|
||||
Code:\ (?P<code>\d+),
|
||||
\ e\.displayText\(\)\ =\ (?P<type1>[^ \n]+):\ (?P<msg>.+?),
|
||||
\ e.what\(\)\ =\ (?P<type2>[^ \n]+)
|
||||
''', re.VERBOSE | re.DOTALL),
|
||||
# ClickHouse v19.3.3+
|
||||
re.compile(r'''
|
||||
Code:\ (?P<code>\d+),
|
||||
\ e\.displayText\(\)\ =\ (?P<type1>[^ \n]+):\ (?P<msg>.+)
|
||||
''', re.VERBOSE | re.DOTALL),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_error_code_msg(cls, full_error_message):
|
||||
|
@ -54,10 +62,11 @@ class ServerError(DatabaseException):
|
|||
See the list of error codes here:
|
||||
https://github.com/yandex/ClickHouse/blob/master/dbms/src/Common/ErrorCodes.cpp
|
||||
"""
|
||||
match = cls.ERROR_PATTERN.match(full_error_message)
|
||||
if match:
|
||||
# assert match.group('type1') == match.group('type2')
|
||||
return int(match.group('code')), match.group('msg')
|
||||
for pattern in cls.ERROR_PATTERNS:
|
||||
match = pattern.match(full_error_message)
|
||||
if match:
|
||||
# assert match.group('type1') == match.group('type2')
|
||||
return int(match.group('code')), match.group('msg').strip()
|
||||
|
||||
return 0, full_error_message
|
||||
|
||||
|
@ -74,7 +83,7 @@ class Database(object):
|
|||
|
||||
def __init__(self, db_name, db_url='http://localhost:8123/',
|
||||
username=None, password=None, readonly=False, autocreate=True,
|
||||
timeout=60, verify_ssl_cert=True):
|
||||
timeout=60, verify_ssl_cert=True, log_statements=False):
|
||||
'''
|
||||
Initializes a database instance. Unless it's readonly, the database will be
|
||||
created on the ClickHouse server if it does not already exist.
|
||||
|
@ -87,15 +96,17 @@ class Database(object):
|
|||
- `autocreate`: automatically create the database if it does not exist (unless in readonly mode).
|
||||
- `timeout`: the connection timeout in seconds.
|
||||
- `verify_ssl_cert`: whether to verify the server's certificate when connecting via HTTPS.
|
||||
- `log_statements`: when True, all database statements are logged.
|
||||
'''
|
||||
self.db_name = db_name
|
||||
self.db_url = db_url
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.readonly = False
|
||||
self.timeout = timeout
|
||||
self.request_session = requests.Session()
|
||||
self.request_session.verify = verify_ssl_cert
|
||||
if username:
|
||||
self.request_session.auth = (username, password or '')
|
||||
self.log_statements = log_statements
|
||||
self.settings = {}
|
||||
self.db_exists = False # this is required before running _is_existing_database
|
||||
self.db_exists = self._is_existing_database()
|
||||
|
@ -276,7 +287,7 @@ class Database(object):
|
|||
count = self.count(model_class, conditions)
|
||||
pages_total = int(ceil(count / float(page_size)))
|
||||
if page_num == -1:
|
||||
page_num = pages_total
|
||||
page_num = max(pages_total, 1)
|
||||
elif page_num < 1:
|
||||
raise ValueError('Invalid page number: %d' % page_num)
|
||||
offset = (page_num - 1) * page_size
|
||||
|
@ -287,7 +298,7 @@ class Database(object):
|
|||
query += ' LIMIT %d, %d' % (offset, page_size)
|
||||
query = self._substitute(query, model_class)
|
||||
return Page(
|
||||
objects=list(self.select(query, model_class, settings)),
|
||||
objects=list(self.select(query, model_class, settings)) if count else [],
|
||||
number_of_objects=count,
|
||||
pages_total=pages_total,
|
||||
number=page_num,
|
||||
|
@ -325,6 +336,8 @@ class Database(object):
|
|||
def _send(self, data, settings=None, stream=False):
|
||||
if isinstance(data, string_types):
|
||||
data = data.encode('utf-8')
|
||||
if self.log_statements:
|
||||
logger.info(data)
|
||||
params = self._build_params(settings)
|
||||
r = self.request_session.post(self.db_url, params=params, data=data, stream=stream, timeout=self.timeout)
|
||||
if r.status_code != 200:
|
||||
|
@ -336,10 +349,6 @@ class Database(object):
|
|||
params.update(self.settings)
|
||||
if self.db_exists:
|
||||
params['database'] = self.db_name
|
||||
if self.username:
|
||||
params['user'] = self.username
|
||||
if self.password:
|
||||
params['password'] = self.password
|
||||
# Send the readonly flag, unless the connection is already readonly (to prevent db error)
|
||||
if self.readonly and not self.connection_readonly:
|
||||
params['readonly'] = '1'
|
||||
|
@ -352,7 +361,10 @@ class Database(object):
|
|||
if '$' in query:
|
||||
mapping = dict(db="`%s`" % self.db_name)
|
||||
if model_class:
|
||||
mapping['table'] = "`%s`.`%s`" % (self.db_name, model_class.table_name())
|
||||
if model_class.is_system_model():
|
||||
mapping['table'] = model_class.table_name()
|
||||
else:
|
||||
mapping['table'] = "`%s`.`%s`" % (self.db_name, model_class.table_name())
|
||||
query = Template(query).safe_substitute(mapping)
|
||||
return query
|
||||
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
from __future__ import unicode_literals
|
||||
from six import string_types, text_type, binary_type
|
||||
from six import string_types, text_type, binary_type, integer_types
|
||||
import datetime
|
||||
import iso8601
|
||||
import pytz
|
||||
import time
|
||||
from calendar import timegm
|
||||
from decimal import Decimal, localcontext
|
||||
from uuid import UUID
|
||||
|
||||
from .utils import escape, parse_array, comma_join
|
||||
|
||||
|
@ -131,7 +132,7 @@ class FixedStringField(StringField):
|
|||
class DateField(Field):
|
||||
|
||||
min_value = datetime.date(1970, 1, 1)
|
||||
max_value = datetime.date(2038, 1, 19)
|
||||
max_value = datetime.date(2105, 12, 31)
|
||||
class_default = min_value
|
||||
db_type = 'Date'
|
||||
|
||||
|
@ -452,6 +453,29 @@ class ArrayField(Field):
|
|||
return 'Array(%s)' % self.inner_field.get_sql(with_default_expression=False)
|
||||
|
||||
|
||||
class UUIDField(Field):
|
||||
|
||||
class_default = UUID(int=0)
|
||||
db_type = 'UUID'
|
||||
|
||||
def to_python(self, value, timezone_in_use):
|
||||
if isinstance(value, UUID):
|
||||
return value
|
||||
elif isinstance(value, binary_type):
|
||||
return UUID(bytes=value)
|
||||
elif isinstance(value, string_types):
|
||||
return UUID(value)
|
||||
elif isinstance(value, integer_types):
|
||||
return UUID(int=value)
|
||||
elif isinstance(value, tuple):
|
||||
return UUID(fields=value)
|
||||
else:
|
||||
raise ValueError('Invalid value for UUIDField: %r' % value)
|
||||
|
||||
def to_db_string(self, value, quote=True):
|
||||
return escape(str(value), quote)
|
||||
|
||||
|
||||
class NullableField(Field):
|
||||
|
||||
class_default = None
|
||||
|
@ -478,4 +502,13 @@ class NullableField(Field):
|
|||
return self.inner_field.to_db_string(value, quote=quote)
|
||||
|
||||
def get_sql(self, with_default_expression=True):
|
||||
return 'Nullable(%s)' % self.inner_field.get_sql(with_default_expression=False)
|
||||
s = 'Nullable(%s)' % self.inner_field.get_sql(with_default_expression=False)
|
||||
if with_default_expression:
|
||||
if self.alias:
|
||||
s = '%s ALIAS %s' % (s, self.alias)
|
||||
elif self.materialized:
|
||||
s = '%s MATERIALIZED %s' % (s, self.materialized)
|
||||
elif self.default:
|
||||
default = self.to_db_string(self.default)
|
||||
s = '%s DEFAULT %s' % (s, default)
|
||||
return s
|
||||
|
|
|
@ -311,9 +311,16 @@ class MergeModel(Model):
|
|||
|
||||
@classmethod
|
||||
def create_table_sql(cls, db):
|
||||
assert isinstance(cls.engine, Merge), "engine must be engines.Merge instance"
|
||||
return super(MergeModel, cls).create_table_sql(db)
|
||||
|
||||
assert isinstance(cls.engine, Merge), "engine must be an instance of engines.Merge"
|
||||
parts = ['CREATE TABLE IF NOT EXISTS `%s`.`%s` (' % (db.db_name, cls.table_name())]
|
||||
cols = []
|
||||
for name, field in iteritems(cls.fields()):
|
||||
if name != '_table':
|
||||
cols.append(' %s %s' % (name, field.get_sql()))
|
||||
parts.append(',\n'.join(cols))
|
||||
parts.append(')')
|
||||
parts.append('ENGINE = ' + cls.engine.create_table_sql(db))
|
||||
return '\n'.join(parts)
|
||||
|
||||
# TODO: base class for models that require specific engine
|
||||
|
||||
|
@ -324,7 +331,7 @@ class DistributedModel(Model):
|
|||
"""
|
||||
|
||||
def set_database(self, db):
|
||||
assert isinstance(self.engine, Distributed), "engine must be engines.Distributed instance"
|
||||
assert isinstance(self.engine, Distributed), "engine must be an instance of engines.Distributed"
|
||||
res = super(DistributedModel, self).set_database(db)
|
||||
return res
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ from __future__ import unicode_literals
|
|||
|
||||
import six
|
||||
import pytz
|
||||
from copy import copy
|
||||
from copy import copy, deepcopy
|
||||
from math import ceil
|
||||
|
||||
from .engines import CollapsingMergeTree
|
||||
|
@ -173,6 +173,11 @@ class FOV(object):
|
|||
def to_sql(self, model_cls):
|
||||
return self._operator.to_sql(model_cls, self._field_name, self._value)
|
||||
|
||||
def __deepcopy__(self, memodict={}):
|
||||
res = copy(self)
|
||||
res._value = deepcopy(self._value)
|
||||
return res
|
||||
|
||||
|
||||
class Q(object):
|
||||
|
||||
|
@ -181,17 +186,32 @@ class Q(object):
|
|||
|
||||
def __init__(self, **filter_fields):
|
||||
self._fovs = [self._build_fov(k, v) for k, v in six.iteritems(filter_fields)]
|
||||
self._l_child = None
|
||||
self._r_child = None
|
||||
self._children = []
|
||||
self._negate = False
|
||||
self._mode = self.AND_MODE
|
||||
|
||||
@property
|
||||
def is_empty(self):
|
||||
"""
|
||||
Checks if there are any conditions in Q object
|
||||
:return: Boolean
|
||||
"""
|
||||
return not bool(self._fovs or self._children)
|
||||
|
||||
@classmethod
|
||||
def _construct_from(cls, l_child, r_child, mode):
|
||||
q = Q()
|
||||
q._l_child = l_child
|
||||
q._r_child = r_child
|
||||
q._mode = mode # AND/OR
|
||||
if mode == l_child._mode:
|
||||
q = deepcopy(l_child)
|
||||
q._children.append(deepcopy(r_child))
|
||||
elif mode == r_child._mode:
|
||||
q = deepcopy(r_child)
|
||||
q._children.append(deepcopy(l_child))
|
||||
else:
|
||||
# Different modes
|
||||
q = Q()
|
||||
q._children = [l_child, r_child]
|
||||
q._mode = mode # AND/OR
|
||||
|
||||
return q
|
||||
|
||||
def _build_fov(self, key, value):
|
||||
|
@ -202,16 +222,27 @@ class Q(object):
|
|||
return FOV(field_name, operator, value)
|
||||
|
||||
def to_sql(self, model_cls):
|
||||
condition_sql = []
|
||||
|
||||
if self._fovs:
|
||||
sql = ' {} '.format(self._mode).join(fov.to_sql(model_cls) for fov in self._fovs)
|
||||
condition_sql.extend([fov.to_sql(model_cls) for fov in self._fovs])
|
||||
|
||||
if self._children:
|
||||
condition_sql.extend([child.to_sql(model_cls) for child in self._children if child])
|
||||
|
||||
if not condition_sql:
|
||||
# Empty Q() object returns everything
|
||||
sql = '1'
|
||||
elif len(condition_sql) == 1:
|
||||
# Skip not needed brackets over single condition
|
||||
sql = condition_sql[0]
|
||||
else:
|
||||
if self._l_child and self._r_child:
|
||||
sql = '({} {} {})'.format(
|
||||
self._l_child.to_sql(model_cls), self._mode, self._r_child.to_sql(model_cls))
|
||||
else:
|
||||
return '1'
|
||||
# Each condition must be enclosed in brackets, or order of operations may be wrong
|
||||
sql = '(%s)' % ') {} ('.format(self._mode).join(condition_sql)
|
||||
|
||||
if self._negate:
|
||||
sql = 'NOT (%s)' % sql
|
||||
|
||||
return sql
|
||||
|
||||
def __or__(self, other):
|
||||
|
@ -225,6 +256,20 @@ class Q(object):
|
|||
q._negate = True
|
||||
return q
|
||||
|
||||
def __bool__(self):
|
||||
return not self.is_empty
|
||||
|
||||
def __deepcopy__(self, memodict={}):
|
||||
q = Q()
|
||||
q._fovs = [deepcopy(fov) for fov in self._fovs]
|
||||
q._negate = self._negate
|
||||
q._mode = self._mode
|
||||
|
||||
if self._children:
|
||||
q._children = [deepcopy(child) for child in self._children]
|
||||
|
||||
return q
|
||||
|
||||
|
||||
@six.python_2_unicode_compatible
|
||||
class QuerySet(object):
|
||||
|
@ -242,7 +287,10 @@ class QuerySet(object):
|
|||
self._model_cls = model_cls
|
||||
self._database = database
|
||||
self._order_by = []
|
||||
self._q = []
|
||||
self._where_q = Q()
|
||||
self._prewhere_q = Q()
|
||||
self._grouping_fields = []
|
||||
self._grouping_with_totals = False
|
||||
self._fields = model_cls.fields().keys()
|
||||
self._limits = None
|
||||
self._distinct = False
|
||||
|
@ -284,20 +332,44 @@ class QuerySet(object):
|
|||
qs._limits = (start, stop - start)
|
||||
return qs
|
||||
|
||||
def select_fields_as_sql(self):
|
||||
"""
|
||||
Returns the selected fields or expressions as a SQL string.
|
||||
"""
|
||||
return comma_join('`%s`' % field for field in self._fields) if self._fields else '*'
|
||||
|
||||
def as_sql(self):
|
||||
"""
|
||||
Returns the whole query as a SQL string.
|
||||
"""
|
||||
distinct = 'DISTINCT ' if self._distinct else ''
|
||||
fields = '*'
|
||||
if self._fields:
|
||||
fields = comma_join('`%s`' % field for field in self._fields)
|
||||
ordering = '\nORDER BY ' + self.order_by_as_sql() if self._order_by else ''
|
||||
limit = '\nLIMIT %d, %d' % self._limits if self._limits else ''
|
||||
final = ' FINAL' if self._final else ''
|
||||
params = (distinct, fields, self._model_cls.table_name(), final,
|
||||
self.conditions_as_sql(), ordering, limit)
|
||||
return u'SELECT %s%s\nFROM `%s`%s\nWHERE %s%s%s' % params
|
||||
table_name = self._model_cls.table_name()
|
||||
if not self._model_cls.is_system_model():
|
||||
table_name = '`%s`' % table_name
|
||||
|
||||
params = (distinct, self.select_fields_as_sql(), table_name, final)
|
||||
sql = u'SELECT %s%s\nFROM %s%s' % params
|
||||
|
||||
if self._prewhere_q and not self._prewhere_q.is_empty:
|
||||
sql += '\nPREWHERE ' + self.conditions_as_sql(prewhere=True)
|
||||
|
||||
if self._where_q and not self._where_q.is_empty:
|
||||
sql += '\nWHERE ' + self.conditions_as_sql(prewhere=False)
|
||||
|
||||
if self._grouping_fields:
|
||||
sql += '\nGROUP BY %s' % comma_join('`%s`' % field for field in self._grouping_fields)
|
||||
|
||||
if self._grouping_with_totals:
|
||||
sql += ' WITH TOTALS'
|
||||
|
||||
if self._order_by:
|
||||
sql += '\nORDER BY ' + self.order_by_as_sql()
|
||||
|
||||
if self._limits:
|
||||
sql += '\nLIMIT %d, %d' % self._limits
|
||||
|
||||
return sql
|
||||
|
||||
def order_by_as_sql(self):
|
||||
"""
|
||||
|
@ -308,14 +380,12 @@ class QuerySet(object):
|
|||
for field in self._order_by
|
||||
])
|
||||
|
||||
def conditions_as_sql(self):
|
||||
def conditions_as_sql(self, prewhere=False):
|
||||
"""
|
||||
Returns the contents of the query's `WHERE` clause as a string.
|
||||
Returns the contents of the query's `WHERE` or `PREWHERE` clause as a string.
|
||||
"""
|
||||
if self._q:
|
||||
return u' AND '.join([q.to_sql(self._model_cls) for q in self._q])
|
||||
else:
|
||||
return u'1'
|
||||
q_object = self._prewhere_q if prewhere else self._where_q
|
||||
return q_object.to_sql(self._model_cls)
|
||||
|
||||
def count(self):
|
||||
"""
|
||||
|
@ -326,8 +396,10 @@ class QuerySet(object):
|
|||
sql = u'SELECT count() FROM (%s)' % self.as_sql()
|
||||
raw = self._database.raw(sql)
|
||||
return int(raw) if raw else 0
|
||||
|
||||
# Simple case
|
||||
return self._database.count(self._model_cls, self.conditions_as_sql())
|
||||
conditions = (self._where_q & self._prewhere_q).to_sql(self._model_cls)
|
||||
return self._database.count(self._model_cls, conditions)
|
||||
|
||||
def order_by(self, *field_names):
|
||||
"""
|
||||
|
@ -347,25 +419,43 @@ class QuerySet(object):
|
|||
qs._fields = field_names
|
||||
return qs
|
||||
|
||||
def filter(self, *q, **filter_fields):
|
||||
"""
|
||||
Returns a copy of this queryset that includes only rows matching the conditions.
|
||||
Add q object to query if it specified.
|
||||
"""
|
||||
def _filter_or_exclude(self, *q, **kwargs):
|
||||
inverse = kwargs.pop('_inverse', False)
|
||||
prewhere = kwargs.pop('prewhere', False)
|
||||
|
||||
qs = copy(self)
|
||||
if q:
|
||||
qs._q = list(self._q) + list(q)
|
||||
|
||||
condition = Q()
|
||||
for q_obj in q:
|
||||
condition &= q_obj
|
||||
|
||||
if kwargs:
|
||||
condition &= Q(**kwargs)
|
||||
|
||||
if inverse:
|
||||
condition = ~condition
|
||||
|
||||
condition = copy(self._prewhere_q if prewhere else self._where_q) & condition
|
||||
if prewhere:
|
||||
qs._prewhere_q = condition
|
||||
else:
|
||||
qs._q = list(self._q) + [Q(**filter_fields)]
|
||||
qs._where_q = condition
|
||||
|
||||
return qs
|
||||
|
||||
def exclude(self, **filter_fields):
|
||||
def filter(self, *q, **kwargs):
|
||||
"""
|
||||
Returns a copy of this queryset that includes only rows matching the conditions.
|
||||
Pass `prewhere=True` to apply the conditions as PREWHERE instead of WHERE.
|
||||
"""
|
||||
return self._filter_or_exclude(*q, **kwargs)
|
||||
|
||||
def exclude(self, *q, **kwargs):
|
||||
"""
|
||||
Returns a copy of this queryset that excludes all rows matching the conditions.
|
||||
Pass `prewhere=True` to apply the conditions as PREWHERE instead of WHERE.
|
||||
"""
|
||||
qs = copy(self)
|
||||
qs._q = list(self._q) + [~Q(**filter_fields)]
|
||||
return qs
|
||||
return self._filter_or_exclude(*q, _inverse=True, **kwargs)
|
||||
|
||||
def paginate(self, page_num=1, page_size=100):
|
||||
"""
|
||||
|
@ -459,7 +549,8 @@ class AggregateQuerySet(QuerySet):
|
|||
self._grouping_fields = grouping_fields
|
||||
self._calculated_fields = calculated_fields
|
||||
self._order_by = list(base_qs._order_by)
|
||||
self._q = list(base_qs._q)
|
||||
self._where_q = base_qs._where_q
|
||||
self._prewhere_q = base_qs._prewhere_q
|
||||
self._limits = base_qs._limits
|
||||
self._distinct = base_qs._distinct
|
||||
|
||||
|
@ -488,26 +579,11 @@ class AggregateQuerySet(QuerySet):
|
|||
"""
|
||||
raise NotImplementedError('Cannot re-aggregate an AggregateQuerySet')
|
||||
|
||||
def as_sql(self):
|
||||
def select_fields_as_sql(self):
|
||||
"""
|
||||
Returns the whole query as a SQL string.
|
||||
Returns the selected fields or expressions as a SQL string.
|
||||
"""
|
||||
distinct = 'DISTINCT ' if self._distinct else ''
|
||||
grouping = comma_join('`%s`' % field for field in self._grouping_fields)
|
||||
fields = comma_join(list(self._fields) + ['%s AS %s' % (v, k) for k, v in self._calculated_fields.items()])
|
||||
params = dict(
|
||||
distinct=distinct,
|
||||
grouping=grouping or "''",
|
||||
fields=fields,
|
||||
table=self._model_cls.table_name(),
|
||||
conds=self.conditions_as_sql()
|
||||
)
|
||||
sql = u'SELECT %(distinct)s%(fields)s\nFROM `%(table)s`\nWHERE %(conds)s\nGROUP BY %(grouping)s' % params
|
||||
if self._order_by:
|
||||
sql += '\nORDER BY ' + self.order_by_as_sql()
|
||||
if self._limits:
|
||||
sql += '\nLIMIT %d, %d' % self._limits
|
||||
return sql
|
||||
return comma_join(list(self._fields) + ['%s AS %s' % (v, k) for k, v in self._calculated_fields.items()])
|
||||
|
||||
def __iter__(self):
|
||||
return self._database.select(self.as_sql()) # using an ad-hoc model
|
||||
|
@ -519,3 +595,13 @@ class AggregateQuerySet(QuerySet):
|
|||
sql = u'SELECT count() FROM (%s)' % self.as_sql()
|
||||
raw = self._database.raw(sql)
|
||||
return int(raw) if raw else 0
|
||||
|
||||
def with_totals(self):
|
||||
"""
|
||||
Adds WITH TOTALS modifier ot GROUP BY, making query return extra row
|
||||
with aggregate function calculated across all the rows. More information:
|
||||
https://clickhouse.yandex/docs/en/query_language/select/#with-totals-modifier
|
||||
"""
|
||||
qs = copy(self)
|
||||
qs._grouping_with_totals = True
|
||||
return qs
|
||||
|
|
|
@ -19,8 +19,8 @@ class SystemPart(Model):
|
|||
"""
|
||||
OPERATIONS = frozenset({'DETACH', 'DROP', 'ATTACH', 'FREEZE', 'FETCH'})
|
||||
|
||||
readonly = True
|
||||
system = True
|
||||
_readonly = True
|
||||
_system = True
|
||||
|
||||
database = StringField() # Name of the database where the table that this part belongs to is located.
|
||||
table = StringField() # Name of the table that this part belongs to.
|
||||
|
|
|
@ -14,7 +14,7 @@ logging.getLogger("requests").setLevel(logging.WARNING)
|
|||
class TestCaseWithData(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.create_table(Person)
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -46,7 +46,7 @@ class Person(Model):
|
|||
data = [
|
||||
{"first_name": "Abdul", "last_name": "Hester", "birthday": "1970-12-02", "height": "1.63",
|
||||
"passport": 35052255},
|
||||
|
||||
|
||||
{"first_name": "Adam", "last_name": "Goodman", "birthday": "1986-01-07", "height": "1.74",
|
||||
"passport": 36052255},
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ from infi.clickhouse_orm.engines import *
|
|||
class MaterializedFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.create_table(ModelWithAliasFields)
|
||||
|
||||
def tearDown(self):
|
||||
|
|
|
@ -11,7 +11,7 @@ from infi.clickhouse_orm.engines import *
|
|||
class ArrayFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.create_table(ModelWithArrays)
|
||||
|
||||
def tearDown(self):
|
||||
|
|
|
@ -1,18 +1,15 @@
|
|||
from __future__ import unicode_literals
|
||||
import unittest
|
||||
import six
|
||||
from uuid import UUID
|
||||
from infi.clickhouse_orm.database import Database
|
||||
from infi.clickhouse_orm.fields import Field, Int16Field
|
||||
from infi.clickhouse_orm.models import Model
|
||||
from infi.clickhouse_orm.engines import Memory
|
||||
from infi.clickhouse_orm.utils import escape
|
||||
|
||||
|
||||
class CustomFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
|
||||
def tearDown(self):
|
||||
self.database.drop_database()
|
||||
|
@ -35,37 +32,6 @@ class CustomFieldsTest(unittest.TestCase):
|
|||
with self.assertRaises(ValueError):
|
||||
TestModel(i=1, f=value)
|
||||
|
||||
def test_uuid_field(self):
|
||||
# Create a model
|
||||
class TestModel(Model):
|
||||
i = Int16Field()
|
||||
f = UUIDField()
|
||||
engine = Memory()
|
||||
self.database.create_table(TestModel)
|
||||
# Check valid values (all values are the same UUID)
|
||||
values = [
|
||||
'{12345678-1234-5678-1234-567812345678}',
|
||||
'12345678123456781234567812345678',
|
||||
'urn:uuid:12345678-1234-5678-1234-567812345678',
|
||||
'\x12\x34\x56\x78'*4,
|
||||
(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678),
|
||||
0x12345678123456781234567812345678,
|
||||
]
|
||||
for index, value in enumerate(values):
|
||||
rec = TestModel(i=index, f=value)
|
||||
self.database.insert([rec])
|
||||
for rec in TestModel.objects_in(self.database):
|
||||
self.assertEqual(rec.f, UUID(values[0]))
|
||||
# Check that ClickHouse encoding functions are supported
|
||||
for rec in self.database.select("SELECT i, UUIDNumToString(f) AS f FROM testmodel", TestModel):
|
||||
self.assertEqual(rec.f, UUID(values[0]))
|
||||
for rec in self.database.select("SELECT 1 as i, UUIDStringToNum('12345678-1234-5678-1234-567812345678') AS f", TestModel):
|
||||
self.assertEqual(rec.f, UUID(values[0]))
|
||||
# Check invalid values
|
||||
for value in [None, 'zzz', -1, '123']:
|
||||
with self.assertRaises(ValueError):
|
||||
TestModel(i=1, f=value)
|
||||
|
||||
|
||||
class BooleanField(Field):
|
||||
|
||||
|
@ -88,32 +54,3 @@ class BooleanField(Field):
|
|||
# The value was already converted by to_python, so it's a bool
|
||||
return '1' if value else '0'
|
||||
|
||||
|
||||
class UUIDField(Field):
|
||||
|
||||
# The ClickHouse column type to use
|
||||
db_type = 'FixedString(16)'
|
||||
|
||||
# The default value if empty
|
||||
class_default = UUID(int=0)
|
||||
|
||||
def to_python(self, value, timezone_in_use):
|
||||
# Convert valid values to UUID instance
|
||||
if isinstance(value, UUID):
|
||||
return value
|
||||
elif isinstance(value, six.string_types):
|
||||
return UUID(bytes=value.encode('latin1')) if len(value) == 16 else UUID(value)
|
||||
elif isinstance(value, six.integer_types):
|
||||
return UUID(int=value)
|
||||
elif isinstance(value, tuple):
|
||||
return UUID(fields=value)
|
||||
else:
|
||||
raise ValueError('Invalid value for UUIDField: %r' % value)
|
||||
|
||||
def to_db_string(self, value, quote=True):
|
||||
# The value was already converted by to_python, so it's a UUID instance
|
||||
val = value.bytes
|
||||
if six.PY3:
|
||||
val = str(val, 'latin1')
|
||||
return escape(val, quote)
|
||||
|
||||
|
|
|
@ -112,6 +112,14 @@ class DatabaseTestCase(TestCaseWithData):
|
|||
self.assertEqual([obj.to_tsv() for obj in page_a.objects],
|
||||
[obj.to_tsv() for obj in page_b.objects])
|
||||
|
||||
def test_pagination_empty_page(self):
|
||||
for page_num in (-1, 1, 2):
|
||||
page = self.database.paginate(Person, 'first_name, last_name', page_num, 10, conditions="first_name = 'Ziggy'")
|
||||
self.assertEqual(page.number_of_objects, 0)
|
||||
self.assertEqual(page.objects, [])
|
||||
self.assertEqual(page.pages_total, 0)
|
||||
self.assertEqual(page.number, max(page_num, 1))
|
||||
|
||||
def test_pagination_invalid_page(self):
|
||||
self._insert_and_check(self._sample_data(), len(data))
|
||||
for page_num in (0, -2, -100):
|
||||
|
@ -142,7 +150,7 @@ class DatabaseTestCase(TestCaseWithData):
|
|||
|
||||
exc = cm.exception
|
||||
self.assertEqual(exc.code, 193)
|
||||
self.assertEqual(exc.message, 'Wrong password for user default')
|
||||
self.assertTrue(exc.message.startswith('Wrong password for user default'))
|
||||
|
||||
def test_nonexisting_db(self):
|
||||
db = Database('db_not_here', autocreate=False)
|
||||
|
@ -150,7 +158,7 @@ class DatabaseTestCase(TestCaseWithData):
|
|||
db.create_table(Person)
|
||||
exc = cm.exception
|
||||
self.assertEqual(exc.code, 81)
|
||||
self.assertEqual(exc.message, "Database db_not_here doesn't exist")
|
||||
self.assertTrue(exc.message.startswith("Database db_not_here doesn't exist"))
|
||||
# Create and delete the db twice, to ensure db_exists gets updated
|
||||
for i in range(2):
|
||||
# Now create the database - should succeed
|
||||
|
|
|
@ -10,7 +10,7 @@ from infi.clickhouse_orm.engines import *
|
|||
class DateFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.create_table(ModelWithDate)
|
||||
|
||||
def tearDown(self):
|
||||
|
|
|
@ -12,7 +12,7 @@ from infi.clickhouse_orm.engines import *
|
|||
class DecimalFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.add_setting('allow_experimental_decimal_type', 1)
|
||||
try:
|
||||
self.database.create_table(DecimalModel)
|
||||
|
|
|
@ -14,7 +14,7 @@ logging.getLogger("requests").setLevel(logging.WARNING)
|
|||
class _EnginesHelperTestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
|
||||
def tearDown(self):
|
||||
self.database.drop_database()
|
||||
|
@ -115,8 +115,8 @@ class EnginesTestCase(_EnginesHelperTestCase):
|
|||
TestModel2(date='2017-01-02', event_id=2, event_group=2, event_count=2, event_version=2)
|
||||
])
|
||||
# event_uversion is materialized field. So * won't select it and it will be zero
|
||||
res = self.database.select('SELECT *, event_uversion FROM $table ORDER BY event_id', model_class=TestMergeModel)
|
||||
res = [row for row in res]
|
||||
res = self.database.select('SELECT *, _table, event_uversion FROM $table ORDER BY event_id', model_class=TestMergeModel)
|
||||
res = list(res)
|
||||
self.assertEqual(2, len(res))
|
||||
self.assertDictEqual({
|
||||
'_table': 'testmodel1',
|
||||
|
@ -209,7 +209,7 @@ class DistributedTestCase(_EnginesHelperTestCase):
|
|||
|
||||
exc = cm.exception
|
||||
self.assertEqual(exc.code, 170)
|
||||
self.assertEqual(exc.message, "Requested cluster 'cluster_name' not found")
|
||||
self.assertTrue(exc.message.startswith("Requested cluster 'cluster_name' not found"))
|
||||
|
||||
def test_verbose_engine_two_superclasses(self):
|
||||
class TestModel2(SampleModel):
|
||||
|
|
|
@ -15,7 +15,7 @@ except NameError:
|
|||
class EnumFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.create_table(ModelWithEnum)
|
||||
self.database.create_table(ModelWithEnumArray)
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ from infi.clickhouse_orm.engines import *
|
|||
class FixedStringFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.create_table(FixedStringModel)
|
||||
|
||||
def tearDown(self):
|
||||
|
|
|
@ -9,7 +9,7 @@ from infi.clickhouse_orm import database, engines, fields, models
|
|||
class JoinTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = database.Database('test-db')
|
||||
self.database = database.Database('test-db', log_statements=True)
|
||||
self.database.create_table(Foo)
|
||||
self.database.create_table(Bar)
|
||||
self.database.insert([Foo(id=i) for i in range(3)])
|
||||
|
|
|
@ -11,7 +11,7 @@ from infi.clickhouse_orm.engines import *
|
|||
class MaterializedFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.create_table(ModelWithMaterializedFields)
|
||||
|
||||
def tearDown(self):
|
||||
|
|
|
@ -24,7 +24,7 @@ logging.getLogger("requests").setLevel(logging.WARNING)
|
|||
class MigrationsTestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.drop_table(MigrationHistory)
|
||||
|
||||
def tearDown(self):
|
||||
|
|
|
@ -6,6 +6,7 @@ from infi.clickhouse_orm.database import Database
|
|||
from infi.clickhouse_orm.models import Model
|
||||
from infi.clickhouse_orm.fields import *
|
||||
from infi.clickhouse_orm.engines import *
|
||||
from infi.clickhouse_orm.utils import comma_join
|
||||
|
||||
from datetime import date, datetime
|
||||
|
||||
|
@ -13,7 +14,7 @@ from datetime import date, datetime
|
|||
class NullableFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.create_table(ModelWithNullable)
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -95,14 +96,19 @@ class NullableFieldsTest(unittest.TestCase):
|
|||
ModelWithNullable(date_field='2016-08-30', null_str='', null_int=42, null_date=dt),
|
||||
ModelWithNullable(date_field='2016-08-30', null_str='nothing', null_int=None, null_date=None),
|
||||
ModelWithNullable(date_field='2016-08-31', null_str=None, null_int=42, null_date=dt),
|
||||
ModelWithNullable(date_field='2016-08-31', null_str=None, null_int=None, null_date=None)
|
||||
ModelWithNullable(date_field='2016-08-31', null_str=None, null_int=None, null_date=None, null_default=None)
|
||||
])
|
||||
|
||||
def _assert_sample_data(self, results):
|
||||
for r in results:
|
||||
print(r.to_dict())
|
||||
dt = date(1970, 1, 1)
|
||||
self.assertEqual(len(results), 4)
|
||||
self.assertIsNone(results[0].null_str)
|
||||
self.assertEqual(results[0].null_int, 42)
|
||||
self.assertEqual(results[0].null_default, 7)
|
||||
self.assertEqual(results[0].null_alias, 21)
|
||||
self.assertEqual(results[0].null_materialized, 420)
|
||||
self.assertEqual(results[0].null_date, dt)
|
||||
self.assertIsNone(results[1].null_date)
|
||||
self.assertEqual(results[1].null_str, 'nothing')
|
||||
|
@ -110,19 +116,27 @@ class NullableFieldsTest(unittest.TestCase):
|
|||
self.assertIsNone(results[2].null_str)
|
||||
self.assertEqual(results[2].null_date, dt)
|
||||
self.assertEqual(results[2].null_int, 42)
|
||||
self.assertEqual(results[2].null_default, 7)
|
||||
self.assertEqual(results[2].null_alias, 21)
|
||||
self.assertEqual(results[0].null_materialized, 420)
|
||||
self.assertIsNone(results[3].null_int)
|
||||
self.assertIsNone(results[3].null_default)
|
||||
self.assertIsNone(results[3].null_alias)
|
||||
self.assertIsNone(results[3].null_materialized)
|
||||
self.assertIsNone(results[3].null_str)
|
||||
self.assertIsNone(results[3].null_date)
|
||||
|
||||
def test_insert_and_select(self):
|
||||
self._insert_sample_data()
|
||||
query = 'SELECT * from $table ORDER BY date_field'
|
||||
fields = comma_join(ModelWithNullable.fields().keys())
|
||||
query = 'SELECT %s from $table ORDER BY date_field' % fields
|
||||
results = list(self.database.select(query, ModelWithNullable))
|
||||
self._assert_sample_data(results)
|
||||
|
||||
def test_ad_hoc_model(self):
|
||||
self._insert_sample_data()
|
||||
query = 'SELECT * from $db.modelwithnullable ORDER BY date_field'
|
||||
fields = comma_join(ModelWithNullable.fields().keys())
|
||||
query = 'SELECT %s from $db.modelwithnullable ORDER BY date_field' % fields
|
||||
results = list(self.database.select(query))
|
||||
self._assert_sample_data(results)
|
||||
|
||||
|
@ -133,5 +147,8 @@ class ModelWithNullable(Model):
|
|||
null_str = NullableField(StringField(), extra_null_values={''})
|
||||
null_int = NullableField(Int32Field())
|
||||
null_date = NullableField(DateField())
|
||||
null_default = NullableField(Int32Field(), default=7)
|
||||
null_alias = NullableField(Int32Field(), alias='null_int/2')
|
||||
null_materialized = NullableField(Int32Field(), alias='null_int*10')
|
||||
|
||||
engine = MergeTree('date_field', ('date_field',))
|
||||
|
|
|
@ -11,7 +11,7 @@ from datetime import date, datetime
|
|||
try:
|
||||
Enum # exists in Python 3.4+
|
||||
except NameError:
|
||||
from enum import Enum # use the enum34 library instead
|
||||
from enum import Enum # use the enum34 library instead
|
||||
|
||||
|
||||
class QuerySetTestCase(TestCaseWithData):
|
||||
|
@ -29,6 +29,13 @@ class QuerySetTestCase(TestCaseWithData):
|
|||
self.assertEqual(count, expected_count)
|
||||
self.assertEqual(qs.count(), expected_count)
|
||||
|
||||
def test_prewhere(self):
|
||||
# We can't distinguish prewhere and where results, it affects performance only.
|
||||
# So let's control prewhere acts like where does
|
||||
qs = Person.objects_in(self.database)
|
||||
self.assertTrue(qs.filter(first_name='Connor', prewhere=True))
|
||||
self.assertFalse(qs.filter(first_name='Willy', prewhere=True))
|
||||
|
||||
def test_no_filtering(self):
|
||||
qs = Person.objects_in(self.database)
|
||||
self._test_qs(qs, len(data))
|
||||
|
@ -404,6 +411,17 @@ class AggregateTestCase(TestCaseWithData):
|
|||
print(qs.as_sql())
|
||||
self.assertEqual(qs.count(), 1)
|
||||
|
||||
def test_aggregate_with_totals(self):
|
||||
qs = Person.objects_in(self.database).aggregate('first_name', count='count()').\
|
||||
with_totals().order_by('-count')[:5]
|
||||
print(qs.as_sql())
|
||||
result = list(qs)
|
||||
self.assertEqual(len(result), 6)
|
||||
for row in result[:-1]:
|
||||
self.assertEqual(2, row.count)
|
||||
|
||||
self.assertEqual(100, result[-1].count)
|
||||
|
||||
def test_double_underscore_field(self):
|
||||
class Mdl(Model):
|
||||
the__number = Int32Field()
|
||||
|
|
|
@ -36,9 +36,9 @@ class ReadonlyTestCase(TestCaseWithData):
|
|||
def _check_db_readonly_err(self, exc, drop_table=None):
|
||||
self.assertEqual(exc.code, 164)
|
||||
if drop_table:
|
||||
self.assertEqual(exc.message, 'Cannot drop table in readonly mode')
|
||||
self.assertTrue(exc.message.startswith('Cannot drop table in readonly mode'))
|
||||
else:
|
||||
self.assertEqual(exc.message, 'Cannot insert into table in readonly mode')
|
||||
self.assertTrue(exc.message.startswith('Cannot insert into table in readonly mode'))
|
||||
|
||||
def test_readonly_db_with_default_user(self):
|
||||
self._test_readonly_db('default')
|
||||
|
|
32
tests/test_server_errors.py
Normal file
32
tests/test_server_errors.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
from __future__ import unicode_literals
|
||||
import unittest
|
||||
|
||||
from infi.clickhouse_orm.database import ServerError
|
||||
|
||||
|
||||
class ServerErrorTest(unittest.TestCase):
|
||||
|
||||
def test_old_format(self):
|
||||
|
||||
code, msg = ServerError.get_error_code_msg("Code: 81, e.displayText() = DB::Exception: Database db_not_here doesn't exist, e.what() = DB::Exception (from [::1]:33458)")
|
||||
self.assertEqual(code, 81)
|
||||
self.assertEqual(msg, "Database db_not_here doesn't exist")
|
||||
|
||||
code, msg = ServerError.get_error_code_msg("Code: 161, e.displayText() = DB::Exception: Limit for number of columns to read exceeded. Requested: 11, maximum: 1, e.what() = DB::Exception\n")
|
||||
self.assertEqual(code, 161)
|
||||
self.assertEqual(msg, "Limit for number of columns to read exceeded. Requested: 11, maximum: 1")
|
||||
|
||||
|
||||
def test_new_format(self):
|
||||
|
||||
code, msg = ServerError.get_error_code_msg("Code: 164, e.displayText() = DB::Exception: Cannot drop table in readonly mode")
|
||||
self.assertEqual(code, 164)
|
||||
self.assertEqual(msg, "Cannot drop table in readonly mode")
|
||||
|
||||
code, msg = ServerError.get_error_code_msg("Code: 48, e.displayText() = DB::Exception: Method write is not supported by storage Merge")
|
||||
self.assertEqual(code, 48)
|
||||
self.assertEqual(msg, "Method write is not supported by storage Merge")
|
||||
|
||||
code, msg = ServerError.get_error_code_msg("Code: 60, e.displayText() = DB::Exception: Table default.zuzu doesn't exist.\n")
|
||||
self.assertEqual(code, 60)
|
||||
self.assertEqual(msg, "Table default.zuzu doesn't exist.")
|
|
@ -13,8 +13,9 @@ from infi.clickhouse_orm.system_models import SystemPart
|
|||
|
||||
|
||||
class SystemTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
|
||||
def tearDown(self):
|
||||
self.database.drop_database()
|
||||
|
@ -38,7 +39,7 @@ class SystemPartTest(unittest.TestCase):
|
|||
BACKUP_DIRS = ['/var/lib/clickhouse/shadow', '/opt/clickhouse/shadow/']
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db')
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
self.database.create_table(TestTable)
|
||||
self.database.create_table(CustomPartitionedTable)
|
||||
self.database.insert([TestTable(date_field=date.today())])
|
||||
|
@ -54,6 +55,12 @@ class SystemPartTest(unittest.TestCase):
|
|||
return dirnames
|
||||
raise unittest.SkipTest('Cannot find backups dir')
|
||||
|
||||
def test_is_read_only(self):
|
||||
self.assertTrue(SystemPart.is_read_only())
|
||||
|
||||
def test_is_system_model(self):
|
||||
self.assertTrue(SystemPart.is_system_model())
|
||||
|
||||
def test_get_all(self):
|
||||
parts = SystemPart.get(self.database)
|
||||
self.assertEqual(len(list(parts)), 2)
|
||||
|
@ -62,7 +69,8 @@ class SystemPartTest(unittest.TestCase):
|
|||
parts = list(SystemPart.get_active(self.database))
|
||||
self.assertEqual(len(parts), 2)
|
||||
parts[0].detach()
|
||||
self.assertEqual(len(list(SystemPart.get_active(self.database))), 1)
|
||||
parts = list(SystemPart.get_active(self.database))
|
||||
self.assertEqual(len(parts), 1)
|
||||
|
||||
def test_get_conditions(self):
|
||||
parts = list(SystemPart.get(self.database, conditions="table='testtable'"))
|
||||
|
@ -101,6 +109,10 @@ class SystemPartTest(unittest.TestCase):
|
|||
# TODO Not tested, as I have no replication set
|
||||
pass
|
||||
|
||||
def test_query(self):
|
||||
SystemPart.objects_in(self.database).count()
|
||||
list(SystemPart.objects_in(self.database).filter(table='testtable'))
|
||||
|
||||
|
||||
class TestTable(Model):
|
||||
date_field = DateField()
|
||||
|
|
45
tests/test_uuid_fields.py
Normal file
45
tests/test_uuid_fields.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
from __future__ import unicode_literals
|
||||
import unittest
|
||||
from uuid import UUID
|
||||
from infi.clickhouse_orm.database import Database
|
||||
from infi.clickhouse_orm.fields import Int16Field, UUIDField
|
||||
from infi.clickhouse_orm.models import Model
|
||||
from infi.clickhouse_orm.engines import Memory
|
||||
|
||||
|
||||
class UUIDFieldsTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.database = Database('test-db', log_statements=True)
|
||||
|
||||
def tearDown(self):
|
||||
self.database.drop_database()
|
||||
|
||||
def test_uuid_field(self):
|
||||
# Create a model
|
||||
class TestModel(Model):
|
||||
i = Int16Field()
|
||||
f = UUIDField()
|
||||
engine = Memory()
|
||||
self.database.create_table(TestModel)
|
||||
# Check valid values (all values are the same UUID)
|
||||
values = [
|
||||
'12345678-1234-5678-1234-567812345678',
|
||||
'{12345678-1234-5678-1234-567812345678}',
|
||||
'12345678123456781234567812345678',
|
||||
'urn:uuid:12345678-1234-5678-1234-567812345678',
|
||||
b'\x12\x34\x56\x78'*4,
|
||||
(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678),
|
||||
0x12345678123456781234567812345678,
|
||||
UUID(int=0x12345678123456781234567812345678),
|
||||
]
|
||||
for index, value in enumerate(values):
|
||||
rec = TestModel(i=index, f=value)
|
||||
self.database.insert([rec])
|
||||
for rec in TestModel.objects_in(self.database):
|
||||
self.assertEqual(rec.f, UUID(values[0]))
|
||||
# Check invalid values
|
||||
for value in [None, 'zzz', -1, '123']:
|
||||
with self.assertRaises(ValueError):
|
||||
TestModel(i=1, f=value)
|
||||
|
Loading…
Reference in New Issue
Block a user