- improve imports

- documentation updates
This commit is contained in:
Itai Shirav 2020-05-01 20:11:40 +03:00
parent 677e08f723
commit 6dee101593
23 changed files with 988 additions and 311 deletions

View File

@ -12,6 +12,7 @@ from infi.clickhouse_orm.database import Database
from infi.clickhouse_orm.models import Model from infi.clickhouse_orm.models import Model
from infi.clickhouse_orm.fields import * from infi.clickhouse_orm.fields import *
from infi.clickhouse_orm.engines import Memory from infi.clickhouse_orm.engines import Memory
from infi.clickhouse_orm.funcs import F
class CPUStats(Model): class CPUStats(Model):
@ -45,13 +46,13 @@ Querying the table is easy, using either the query builder or raw SQL:
```python ```python
# Calculate what percentage of the time CPU 1 was over 95% busy # Calculate what percentage of the time CPU 1 was over 95% busy
total = CPUStats.objects_in(db).filter(cpu_id=1).count() total = CPUStats.objects_in(db).filter(CPUStats.cpu_id == 1).count()
busy = CPUStats.objects_in(db).filter(cpu_id=1, cpu_percent__gt=95).count() busy = CPUStats.objects_in(db).filter(CPUStats.cpu_id == 1, CPUStats.cpu_percent > 95).count()
print 'CPU 1 was busy {:.2f}% of the time'.format(busy * 100.0 / total) print('CPU 1 was busy {:.2f}% of the time'.format(busy * 100.0 / total))
# Calculate the average usage per CPU # Calculate the average usage per CPU
for row in CPUStats.objects_in(db).aggregate('cpu_id', average='avg(cpu_percent)'): for row in CPUStats.objects_in(db).aggregate(CPUStats.cpu_id, average=F.avg(CPUStats.cpu_percent)):
print 'CPU {row.cpu_id}: {row.average:.2f}%'.format(row=row) print('CPU {row.cpu_id}: {row.average:.2f}%'.format(row=row))
``` ```
To learn more please visit the [documentation](docs/toc.md). To learn more please visit the [documentation](docs/toc.md).

File diff suppressed because it is too large Load Diff

View File

@ -93,4 +93,4 @@ expr = F("someFunctionName", arg1, arg2, ...)
Note that higher-order database functions (those that use lambda expressions) are not supported. Note that higher-order database functions (those that use lambda expressions) are not supported.
--- ---
[<< Models and Databases](models_and_databases.md) | [Table of Contents](toc.md) | [Querysets >>](querysets.md) [<< Models and Databases](models_and_databases.md) | [Table of Contents](toc.md) | [Importing ORM Classes >>](importing_orm_classes.md)

View File

@ -225,4 +225,4 @@ values aggregated for all rows suitable for filters.
--- ---
[<< Expressions](expressions.md) | [Table of Contents](toc.md) | [Field Options >>](field_options.md) [<< Importing ORM Classes](importing_orm_classes.md) | [Table of Contents](toc.md) | [Field Options >>](field_options.md)

View File

@ -48,6 +48,7 @@
* [Simple Engines](table_engines.md#simple-engines) * [Simple Engines](table_engines.md#simple-engines)
* [Engines in the MergeTree Family](table_engines.md#engines-in-the-mergetree-family) * [Engines in the MergeTree Family](table_engines.md#engines-in-the-mergetree-family)
* [Custom partitioning](table_engines.md#custom-partitioning) * [Custom partitioning](table_engines.md#custom-partitioning)
* [Primary key](table_engines.md#primary-key)
* [Data Replication](table_engines.md#data-replication) * [Data Replication](table_engines.md#data-replication)
* [Buffer Engine](table_engines.md#buffer-engine) * [Buffer Engine](table_engines.md#buffer-engine)
* [Merge Engine](table_engines.md#merge-engine) * [Merge Engine](table_engines.md#merge-engine)
@ -117,8 +118,7 @@
* [infi.clickhouse_orm.query](class_reference.md#infi.clickhouse_orm.query) * [infi.clickhouse_orm.query](class_reference.md#infi.clickhouse_orm.query)
* [QuerySet](class_reference.md#queryset) * [QuerySet](class_reference.md#queryset)
* [AggregateQuerySet](class_reference.md#aggregatequeryset) * [AggregateQuerySet](class_reference.md#aggregatequeryset)
* [Q](class_reference.md#q)
* [infi.clickhouse_orm.funcs](class_reference.md#infi.clickhouse_orm.funcs) * [infi.clickhouse_orm.funcs](class_reference.md#infi.clickhouse_orm.funcs)
* [F](class_reference.md#f) * [F](class_reference.md#f)
* [infi.clickhouse_orm.system_models](class_reference.md#infi.clickhouse_orm.system_models)
* [SystemPart](class_reference.md#systempart)

View File

@ -135,6 +135,6 @@ if __name__ == '__main__':
module_doc([models.Model, models.BufferModel, models.DistributedModel]) module_doc([models.Model, models.BufferModel, models.DistributedModel])
module_doc(sorted([fields.Field] + all_subclasses(fields.Field), key=lambda x: x.__name__), False) module_doc(sorted([fields.Field] + all_subclasses(fields.Field), key=lambda x: x.__name__), False)
module_doc([engines.Engine] + all_subclasses(engines.Engine), False) module_doc([engines.Engine] + all_subclasses(engines.Engine), False)
module_doc([query.QuerySet, query.AggregateQuerySet]) module_doc([query.QuerySet, query.AggregateQuerySet, query.Q])
module_doc([funcs.F]) module_doc([funcs.F])
module_doc([system_models.SystemPart]) module_doc([system_models.SystemPart])

View File

@ -1 +1,13 @@
__import__("pkg_resources").declare_namespace(__name__) __import__("pkg_resources").declare_namespace(__name__)
from infi.clickhouse_orm.database import *
from infi.clickhouse_orm.engines import *
from infi.clickhouse_orm.fields import *
from infi.clickhouse_orm.funcs import *
from infi.clickhouse_orm.migrations import *
from infi.clickhouse_orm.models import *
from infi.clickhouse_orm.query import *
from infi.clickhouse_orm.system_models import *
from inspect import isclass
__all__ = [c.__name__ for c in locals().values() if isclass(c)]

View File

@ -411,3 +411,7 @@ class Database(object):
def _is_connection_readonly(self): def _is_connection_readonly(self):
r = self._send("SELECT value FROM system.settings WHERE name = 'readonly'") r = self._send("SELECT value FROM system.settings WHERE name = 'readonly'")
return r.text.strip() != '0' return r.text.strip() != '0'
# Expose only relevant classes in import *
__all__ = [c.__name__ for c in [Page, DatabaseException, ServerError, Database]]

View File

@ -2,7 +2,7 @@ from __future__ import unicode_literals
import logging import logging
from .utils import comma_join from .utils import comma_join, get_subclass_names
logger = logging.getLogger('clickhouse_orm') logger = logging.getLogger('clickhouse_orm')
@ -262,3 +262,7 @@ class Distributed(Engine):
if self.sharding_key: if self.sharding_key:
params.append(self.sharding_key) params.append(self.sharding_key)
return params return params
# Expose only relevant classes in import *
__all__ = get_subclass_names(locals(), Engine)

View File

@ -6,7 +6,7 @@ from calendar import timegm
from decimal import Decimal, localcontext from decimal import Decimal, localcontext
from uuid import UUID from uuid import UUID
from logging import getLogger from logging import getLogger
from .utils import escape, parse_array, comma_join, string_or_func from .utils import escape, parse_array, comma_join, string_or_func, get_subclass_names
from .funcs import F, FunctionOperatorsMixin from .funcs import F, FunctionOperatorsMixin
from ipaddress import IPv4Address, IPv6Address from ipaddress import IPv4Address, IPv6Address
@ -598,3 +598,8 @@ class LowCardinalityField(Field):
if with_default_expression: if with_default_expression:
sql += self._extra_params(db) sql += self._extra_params(db)
return sql return sql
# Expose only relevant classes in import *
__all__ = get_subclass_names(locals(), Field)

View File

@ -1812,3 +1812,7 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
def greatest(x, y): def greatest(x, y):
return F('greatest', x, y) return F('greatest', x, y)
# Expose only relevant classes in import *
__all__ = ['F']

View File

@ -1,7 +1,7 @@
from .models import Model, BufferModel from .models import Model, BufferModel
from .fields import DateField, StringField from .fields import DateField, StringField
from .engines import MergeTree from .engines import MergeTree
from .utils import escape from .utils import escape, get_subclass_names
import logging import logging
logger = logging.getLogger('migrations') logger = logging.getLogger('migrations')
@ -177,3 +177,7 @@ class MigrationHistory(Model):
@classmethod @classmethod
def table_name(cls): def table_name(cls):
return 'infi_clickhouse_orm_migrations' return 'infi_clickhouse_orm_migrations'
# Expose only relevant classes in import *
__all__ = get_subclass_names(locals(), Operation)

View File

@ -7,7 +7,7 @@ from six import reraise
import pytz import pytz
from .fields import Field, StringField from .fields import Field, StringField
from .utils import parse_tsv, NO_VALUE from .utils import parse_tsv, NO_VALUE, get_subclass_names
from .query import QuerySet from .query import QuerySet
from .funcs import F from .funcs import F
from .engines import Merge, Distributed from .engines import Merge, Distributed
@ -459,3 +459,5 @@ class DistributedModel(Model):
return '\n'.join(parts) return '\n'.join(parts)
# Expose only relevant classes in import *
__all__ = get_subclass_names(locals(), Model)

View File

@ -646,3 +646,7 @@ class AggregateQuerySet(QuerySet):
qs = copy(self) qs = copy(self)
qs._grouping_with_totals = True qs._grouping_with_totals = True
return qs return qs
# Expose only relevant classes in import *
__all__ = [c.__name__ for c in [Q, QuerySet, AggregateQuerySet]]

View File

@ -158,3 +158,7 @@ class SystemPart(Model):
conditions += ' AND ' conditions += ' AND '
conditions += 'active' conditions += 'active'
return SystemPart.get(database, conditions=conditions) return SystemPart.get(database, conditions=conditions)
# Expose only relevant classes in import *
__all__ = [c.__name__ for c in [SystemPart]]

View File

@ -114,6 +114,11 @@ def is_iterable(obj):
return False return False
def get_subclass_names(locals, base_class):
from inspect import isclass
return [c.__name__ for c in locals.values() if isclass(c) and issubclass(c, base_class)]
class NoValue: class NoValue:
''' '''
A sentinel for fields with an expression for a default value, A sentinel for fields with an expression for a default value,

View File

@ -6,6 +6,7 @@ from infi.clickhouse_orm.database import Database
from infi.clickhouse_orm.models import Model, NO_VALUE from infi.clickhouse_orm.models import Model, NO_VALUE
from infi.clickhouse_orm.fields import * from infi.clickhouse_orm.fields import *
from infi.clickhouse_orm.engines import * from infi.clickhouse_orm.engines import *
from infi.clickhouse_orm.funcs import F
class AliasFieldsTest(unittest.TestCase): class AliasFieldsTest(unittest.TestCase):

View File

@ -7,6 +7,7 @@ from infi.clickhouse_orm.database import ServerError, DatabaseException
from infi.clickhouse_orm.models import Model from infi.clickhouse_orm.models import Model
from infi.clickhouse_orm.engines import Memory from infi.clickhouse_orm.engines import Memory
from infi.clickhouse_orm.fields import * from infi.clickhouse_orm.fields import *
from infi.clickhouse_orm.funcs import F
from .base_test_with_data import * from .base_test_with_data import *

View File

@ -1,5 +1,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import unittest import unittest
import datetime
import pytz
from infi.clickhouse_orm.database import Database from infi.clickhouse_orm.database import Database
from infi.clickhouse_orm.models import Model from infi.clickhouse_orm.models import Model

View File

@ -1,5 +1,6 @@
from __future__ import unicode_literals from __future__ import unicode_literals
import unittest import unittest
import datetime
from infi.clickhouse_orm.system_models import SystemPart from infi.clickhouse_orm.system_models import SystemPart
from infi.clickhouse_orm.database import Database, DatabaseException, ServerError from infi.clickhouse_orm.database import Database, DatabaseException, ServerError

View File

@ -2,9 +2,14 @@ import unittest
from .base_test_with_data import * from .base_test_with_data import *
from .test_querysets import SampleModel from .test_querysets import SampleModel
from datetime import date, datetime, tzinfo, timedelta from datetime import date, datetime, tzinfo, timedelta
import pytz
from ipaddress import IPv4Address, IPv6Address from ipaddress import IPv4Address, IPv6Address
import logging
from decimal import Decimal
from infi.clickhouse_orm.database import ServerError from infi.clickhouse_orm.database import ServerError
from infi.clickhouse_orm.utils import NO_VALUE from infi.clickhouse_orm.utils import NO_VALUE
from infi.clickhouse_orm.funcs import F
class FuncsTestCase(TestCaseWithData): class FuncsTestCase(TestCaseWithData):
@ -14,28 +19,28 @@ class FuncsTestCase(TestCaseWithData):
self.database.insert(self._sample_data()) self.database.insert(self._sample_data())
def _test_qs(self, qs, expected_count): def _test_qs(self, qs, expected_count):
logger.info(qs.as_sql()) logging.info(qs.as_sql())
count = 0 count = 0
for instance in qs: for instance in qs:
count += 1 count += 1
logger.info('\t[%d]\t%s' % (count, instance.to_dict())) logging.info('\t[%d]\t%s' % (count, instance.to_dict()))
self.assertEqual(count, expected_count) self.assertEqual(count, expected_count)
self.assertEqual(qs.count(), expected_count) self.assertEqual(qs.count(), expected_count)
def _test_func(self, func, expected_value=NO_VALUE): def _test_func(self, func, expected_value=NO_VALUE):
sql = 'SELECT %s AS value' % func.to_sql() sql = 'SELECT %s AS value' % func.to_sql()
logger.info(sql) logging.info(sql)
result = list(self.database.select(sql)) result = list(self.database.select(sql))
logger.info('\t==> %s', result[0].value if result else '<empty>') logging.info('\t==> %s', result[0].value if result else '<empty>')
if expected_value != NO_VALUE: if expected_value != NO_VALUE:
self.assertEqual(result[0].value, expected_value) self.assertEqual(result[0].value, expected_value)
return result[0].value if result else None return result[0].value if result else None
def _test_aggr(self, func, expected_value=NO_VALUE): def _test_aggr(self, func, expected_value=NO_VALUE):
qs = Person.objects_in(self.database).aggregate(value=func) qs = Person.objects_in(self.database).aggregate(value=func)
logger.info(qs.as_sql()) logging.info(qs.as_sql())
result = list(qs) result = list(qs)
logger.info('\t==> %s', result[0].value if result else '<empty>') logging.info('\t==> %s', result[0].value if result else '<empty>')
if expected_value != NO_VALUE: if expected_value != NO_VALUE:
self.assertEqual(result[0].value, expected_value) self.assertEqual(result[0].value, expected_value)
return result[0].value if result else None return result[0].value if result else None

View File

@ -6,6 +6,7 @@ from infi.clickhouse_orm.database import Database
from infi.clickhouse_orm.models import Model, NO_VALUE from infi.clickhouse_orm.models import Model, NO_VALUE
from infi.clickhouse_orm.fields import * from infi.clickhouse_orm.fields import *
from infi.clickhouse_orm.engines import * from infi.clickhouse_orm.engines import *
from infi.clickhouse_orm.funcs import F
class MaterializedFieldsTest(unittest.TestCase): class MaterializedFieldsTest(unittest.TestCase):

View File

@ -1,13 +1,13 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function from __future__ import unicode_literals, print_function
import unittest import unittest
from infi.clickhouse_orm.database import Database from infi.clickhouse_orm.database import Database
from infi.clickhouse_orm.query import Q from infi.clickhouse_orm.query import Q
from infi.clickhouse_orm.funcs import F from infi.clickhouse_orm.funcs import F
from .base_test_with_data import * from .base_test_with_data import *
from datetime import date, datetime from datetime import date, datetime
from enum import Enum from enum import Enum
from decimal import Decimal
from logging import getLogger from logging import getLogger
logger = getLogger('tests') logger = getLogger('tests')