mirror of
https://github.com/Infinidat/infi.clickhouse_orm.git
synced 2024-11-22 00:56:34 +03:00
Merge branch 'develop' into qs-prewhere
# Conflicts: # src/infi/clickhouse_orm/query.py
This commit is contained in:
commit
375316fbf3
|
@ -1,6 +1,12 @@
|
|||
Change Log
|
||||
==========
|
||||
|
||||
Unreleased
|
||||
----------
|
||||
- Added `timeout` parameter to database initializer (SUHAR1K)
|
||||
- Added `verify_ssl_cert` parameter to database initializer
|
||||
- Added `final()` method to querysets (M1hacka)
|
||||
|
||||
v1.0.3
|
||||
------
|
||||
- Bug fix: `QuerySet.count()` ignores slicing
|
||||
|
|
|
@ -10,7 +10,7 @@ infi.clickhouse_orm.database
|
|||
Database instances connect to a specific ClickHouse database for running queries,
|
||||
inserting data and other operations.
|
||||
|
||||
#### Database(db_name, db_url="http://localhost:8123/", username=None, password=None, readonly=False, autocreate=True)
|
||||
#### Database(db_name, db_url="http://localhost:8123/", username=None, password=None, readonly=False, autocreate=True, timeout=60, verify_ssl_cert=True)
|
||||
|
||||
|
||||
Initializes a database instance. Unless it's readonly, the database will be
|
||||
|
@ -21,7 +21,9 @@ created on the ClickHouse server if it does not already exist.
|
|||
- `username`: optional connection credentials.
|
||||
- `password`: optional connection credentials.
|
||||
- `readonly`: use a read-only connection.
|
||||
- `autocreate`: automatically create the database if does not exist (unless in readonly mode).
|
||||
- `autocreate`: automatically create the database if it does not exist (unless in readonly mode).
|
||||
- `timeout`: the connection timeout in seconds.
|
||||
- `verify_ssl_cert`: whether to verify the server's certificate when connecting via HTTPS.
|
||||
|
||||
|
||||
#### add_setting(name, value)
|
||||
|
@ -865,6 +867,13 @@ Returns a copy of this queryset that includes only rows matching the conditions.
|
|||
Add q object to query if it specified.
|
||||
|
||||
|
||||
#### final()
|
||||
|
||||
|
||||
Adds a FINAL modifier to table, meaning data will be collapsed to final version.
|
||||
Can be used with `CollapsingMergeTree` engine only.
|
||||
|
||||
|
||||
#### only(*field_names)
|
||||
|
||||
|
||||
|
@ -966,6 +975,13 @@ Returns a copy of this queryset that includes only rows matching the conditions.
|
|||
Add q object to query if it specified.
|
||||
|
||||
|
||||
#### final()
|
||||
|
||||
|
||||
Adds a FINAL modifier to table, meaning data will be collapsed to final version.
|
||||
Can be used with `CollapsingMergeTree` engine only.
|
||||
|
||||
|
||||
#### group_by(*args)
|
||||
|
||||
|
||||
|
|
|
@ -125,6 +125,17 @@ Adds a DISTINCT clause to the query, meaning that any duplicate rows in the resu
|
|||
>>> Person.objects_in(database).only('first_name').distinct().count()
|
||||
94
|
||||
|
||||
Final
|
||||
--------
|
||||
|
||||
This method can be used only with CollapsingMergeTree engine.
|
||||
Adds a FINAL modifier to the query, meaning data is selected fully "collapsed" by sign field.
|
||||
|
||||
>>> Person.objects_in(database).count()
|
||||
100
|
||||
>>> Person.objects_in(database).final().count()
|
||||
94
|
||||
|
||||
Slicing
|
||||
-------
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
* [Ordering](querysets.md#ordering)
|
||||
* [Omitting Fields](querysets.md#omitting-fields)
|
||||
* [Distinct](querysets.md#distinct)
|
||||
* [Final](querysets.md#final)
|
||||
* [Slicing](querysets.md#slicing)
|
||||
* [Pagination](querysets.md#pagination)
|
||||
* [Aggregation](querysets.md#aggregation)
|
||||
|
|
|
@ -73,7 +73,8 @@ class Database(object):
|
|||
'''
|
||||
|
||||
def __init__(self, db_name, db_url='http://localhost:8123/',
|
||||
username=None, password=None, readonly=False, autocreate=True):
|
||||
username=None, password=None, readonly=False, autocreate=True,
|
||||
timeout=60, verify_ssl_cert=True):
|
||||
'''
|
||||
Initializes a database instance. Unless it's readonly, the database will be
|
||||
created on the ClickHouse server if it does not already exist.
|
||||
|
@ -83,15 +84,20 @@ class Database(object):
|
|||
- `username`: optional connection credentials.
|
||||
- `password`: optional connection credentials.
|
||||
- `readonly`: use a read-only connection.
|
||||
- `autocreate`: automatically create the database if does not exist (unless in readonly mode).
|
||||
- `autocreate`: automatically create the database if it does not exist (unless in readonly mode).
|
||||
- `timeout`: the connection timeout in seconds.
|
||||
- `verify_ssl_cert`: whether to verify the server's certificate when connecting via HTTPS.
|
||||
'''
|
||||
self.db_name = db_name
|
||||
self.db_url = db_url
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.readonly = False
|
||||
self.timeout = timeout
|
||||
self.request_session = requests.Session()
|
||||
self.request_session.verify = verify_ssl_cert
|
||||
self.settings = {}
|
||||
self.db_exists = False
|
||||
self.db_exists = False # this is required before running _is_existing_database
|
||||
self.db_exists = self._is_existing_database()
|
||||
if readonly:
|
||||
if not self.db_exists:
|
||||
|
@ -116,6 +122,7 @@ class Database(object):
|
|||
Deletes the database on the ClickHouse server.
|
||||
'''
|
||||
self._send('DROP DATABASE `%s`' % self.db_name)
|
||||
self.db_exists = False
|
||||
|
||||
def create_table(self, model_class):
|
||||
'''
|
||||
|
@ -319,7 +326,7 @@ class Database(object):
|
|||
if isinstance(data, string_types):
|
||||
data = data.encode('utf-8')
|
||||
params = self._build_params(settings)
|
||||
r = requests.post(self.db_url, params=params, data=data, stream=stream)
|
||||
r = self.request_session.post(self.db_url, params=params, data=data, stream=stream, timeout=self.timeout)
|
||||
if r.status_code != 200:
|
||||
raise ServerError(r.text)
|
||||
return r
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import six
|
||||
import pytz
|
||||
from copy import copy, deepcopy
|
||||
from math import ceil
|
||||
|
||||
from .engines import CollapsingMergeTree
|
||||
from .utils import comma_join
|
||||
|
||||
|
||||
|
@ -290,6 +293,7 @@ class QuerySet(object):
|
|||
self._fields = model_cls.fields().keys()
|
||||
self._limits = None
|
||||
self._distinct = False
|
||||
self._final = False
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
|
@ -335,9 +339,10 @@ class QuerySet(object):
|
|||
Returns the whole query as a SQL string.
|
||||
"""
|
||||
distinct = 'DISTINCT ' if self._distinct else ''
|
||||
final = ' FINAL' if self._final else ''
|
||||
|
||||
params = (distinct, self.select_fields_as_sql(), self._model_cls.table_name())
|
||||
sql = u'SELECT %s%s\nFROM `%s`' % params
|
||||
params = (distinct, self.select_fields_as_sql(), self._model_cls.table_name(), final)
|
||||
sql = u'SELECT %s%s\nFROM `%s`%s' % params
|
||||
|
||||
if self._prewhere_q:
|
||||
sql += '\nPREWHERE ' + self.conditions_as_sql(self._prewhere_q)
|
||||
|
@ -475,6 +480,18 @@ class QuerySet(object):
|
|||
qs._distinct = True
|
||||
return qs
|
||||
|
||||
def final(self):
|
||||
"""
|
||||
Adds a FINAL modifier to table, meaning data will be collapsed to final version.
|
||||
Can be used with `CollapsingMergeTree` engine only.
|
||||
"""
|
||||
if not isinstance(self._model_cls.engine, CollapsingMergeTree):
|
||||
raise TypeError('final() method can be used only with CollapsingMergeTree engine')
|
||||
|
||||
qs = copy(self)
|
||||
qs._final = True
|
||||
return qs
|
||||
|
||||
def aggregate(self, *args, **kwargs):
|
||||
"""
|
||||
Returns an `AggregateQuerySet` over this query, with `args` serving as
|
||||
|
|
|
@ -148,4 +148,4 @@ data = [
|
|||
{"first_name": "Whitney", "last_name": "Scott", "birthday": "1971-07-04", "height": "1.70"},
|
||||
{"first_name": "Wynter", "last_name": "Garcia", "birthday": "1975-01-10", "height": "1.69"},
|
||||
{"first_name": "Yolanda", "last_name": "Duke", "birthday": "1997-02-25", "height": "1.74"}
|
||||
];
|
||||
]
|
||||
|
|
|
@ -151,10 +151,15 @@ class DatabaseTestCase(TestCaseWithData):
|
|||
exc = cm.exception
|
||||
self.assertEqual(exc.code, 81)
|
||||
self.assertEqual(exc.message, "Database db_not_here doesn't exist")
|
||||
# Now create the database - should succeed
|
||||
db.create_database()
|
||||
db.create_table(Person)
|
||||
db.drop_database()
|
||||
# Create and delete the db twice, to ensure db_exists gets updated
|
||||
for i in range(2):
|
||||
# Now create the database - should succeed
|
||||
db.create_database()
|
||||
self.assertTrue(db.db_exists)
|
||||
db.create_table(Person)
|
||||
# Drop the database
|
||||
db.drop_database()
|
||||
self.assertFalse(db.db_exists)
|
||||
|
||||
def test_preexisting_db(self):
|
||||
db = Database(self.database.db_name, autocreate=False)
|
||||
|
|
|
@ -111,6 +111,22 @@ class QuerySetTestCase(TestCaseWithData):
|
|||
self._test_qs(qs.filter(birthday=date(1970, 12, 2)), 1)
|
||||
self._test_qs(qs.filter(birthday__lte=date(1970, 12, 2)), 3)
|
||||
|
||||
def test_mutiple_filter(self):
|
||||
qs = Person.objects_in(self.database)
|
||||
# Single filter call with multiple conditions is ANDed
|
||||
self._test_qs(qs.filter(first_name='Ciaran', last_name='Carver'), 1)
|
||||
# Separate filter calls are also ANDed
|
||||
self._test_qs(qs.filter(first_name='Ciaran').filter(last_name='Carver'), 1)
|
||||
self._test_qs(qs.filter(birthday='1970-12-02').filter(birthday='1986-01-07'), 0)
|
||||
|
||||
def test_multiple_exclude(self):
|
||||
qs = Person.objects_in(self.database)
|
||||
# Single exclude call with multiple conditions is ANDed
|
||||
self._test_qs(qs.exclude(first_name='Ciaran', last_name='Carver'), 99)
|
||||
# Separate exclude calls are ORed
|
||||
self._test_qs(qs.exclude(first_name='Ciaran').exclude(last_name='Carver'), 98)
|
||||
self._test_qs(qs.exclude(birthday='1970-12-02').exclude(birthday='1986-01-07'), 98)
|
||||
|
||||
def test_only(self):
|
||||
qs = Person.objects_in(self.database).only('first_name', 'last_name')
|
||||
for person in qs:
|
||||
|
@ -148,6 +164,20 @@ class QuerySetTestCase(TestCaseWithData):
|
|||
SampleModel(timestamp=now, num=4, color=Color.white),
|
||||
])
|
||||
|
||||
def _insert_sample_collapsing_model(self):
|
||||
self.database.create_table(SampleCollapsingModel)
|
||||
now = datetime.now()
|
||||
self.database.insert([
|
||||
SampleCollapsingModel(timestamp=now, num=1, color=Color.red),
|
||||
SampleCollapsingModel(timestamp=now, num=2, color=Color.red),
|
||||
SampleCollapsingModel(timestamp=now, num=2, color=Color.red, sign=-1),
|
||||
SampleCollapsingModel(timestamp=now, num=2, color=Color.green),
|
||||
SampleCollapsingModel(timestamp=now, num=3, color=Color.white),
|
||||
SampleCollapsingModel(timestamp=now, num=4, color=Color.white, sign=1),
|
||||
SampleCollapsingModel(timestamp=now, num=4, color=Color.white, sign=-1),
|
||||
SampleCollapsingModel(timestamp=now, num=4, color=Color.blue, sign=1),
|
||||
])
|
||||
|
||||
def test_filter_enum_field(self):
|
||||
self._insert_sample_model()
|
||||
qs = SampleModel.objects_in(self.database)
|
||||
|
@ -256,6 +286,17 @@ class QuerySetTestCase(TestCaseWithData):
|
|||
self._test_qs(qs[70:80], 10)
|
||||
self._test_qs(qs[80:], 20)
|
||||
|
||||
def test_final(self):
|
||||
# Final can be used with CollapsingMergeTree engine only
|
||||
with self.assertRaises(TypeError):
|
||||
Person.objects_in(self.database).final()
|
||||
|
||||
self._insert_sample_collapsing_model()
|
||||
res = list(SampleCollapsingModel.objects_in(self.database).final().order_by('num'))
|
||||
self.assertEqual(4, len(res))
|
||||
for item, exp_color in zip(res, (Color.red, Color.green, Color.white, Color.blue)):
|
||||
self.assertEqual(exp_color, item.color)
|
||||
|
||||
|
||||
class AggregateTestCase(TestCaseWithData):
|
||||
|
||||
|
@ -399,6 +440,13 @@ class SampleModel(Model):
|
|||
engine = MergeTree('materialized_date', ('materialized_date',))
|
||||
|
||||
|
||||
class SampleCollapsingModel(SampleModel):
|
||||
|
||||
sign = Int8Field(default=1)
|
||||
|
||||
engine = CollapsingMergeTree('materialized_date', ('num',), 'sign')
|
||||
|
||||
|
||||
class Numbers(Model):
|
||||
|
||||
number = UInt64Field()
|
||||
|
|
Loading…
Reference in New Issue
Block a user