tuple names maybe not exists

This commit is contained in:
sw 2022-06-04 12:47:35 +08:00
parent 26437832d8
commit 1a366ce625
30 changed files with 1548 additions and 258 deletions

View File

@ -1,64 +0,0 @@
name: ci
on:
push:
# Publish `master` as Docker `latest` image.
branches:
- master
- develop
# Publish `v1.2.3` tags as releases.
tags:
- v*
# Run tests for any PRs.
pull_request:
env:
IMAGE_NAME: ch_orm
jobs:
# Run tests.
# See also https://docs.docker.com/docker-hub/builds/automated-testing/
test:
runs-on: ubuntu-latest
services:
clickhouse:
image: clickhouse/clickhouse-server
ports:
- 8123:8123
- 9000:9000
options: --ulimit nofile=262144:262144
strategy:
matrix:
python-version: [ "3.7", "3.8", "3.9", "3.10" ]
steps:
- uses: actions/checkout@v2
- name: Build and Install
run: |
pip install build
python -m build
pip install dist/*
pip install coveralls
- name: UnitTest
run: |
coverage run --source=clickhouse_orm -m unittest
- name: Upload Coverage
run: coveralls --service=github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: ${{ matrix.python-version }}
COVERALLS_PARALLEL: true
coveralls:
name: Finish Coveralls
needs: test
runs-on: ubuntu-latest
container: python:3-slim
steps:
- name: Finished
run: |
pip3 install --upgrade coveralls
coveralls --finish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

44
.github/workflows/coverage.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: Coverage check
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
test:
runs-on: ${{ matrix.os }}
strategy:
matrix:
python-version: [3.9]
os: [ubuntu-latest]
fail-fast: false
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies 🔨
run: |
python -m pip install --upgrade pip
pip install build
python -m build
pip install dist/*
pip install coveralls
- name: Run coverage
run: |
coverage run --source=clickhouse_orm -m unittest
- name: Upload Coverage
run: coveralls --service=github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: ${{ matrix.python-version }}
COVERALLS_PARALLEL: true
- name: Finished
run: |
coveralls --finish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

41
.github/workflows/pr-python310.yml vendored Normal file
View File

@ -0,0 +1,41 @@
name: Python 3.10 Tests
on:
push:
# Publish `master` as Docker `latest` image.
branches:
- master
- develop
# Publish `v1.2.3` tags as releases.
tags:
- v*
# Run tests for any PRs.
pull_request:
branches:
- master
- develop
jobs:
testPy37:
runs-on: ubuntu-latest
services:
clickhouse:
image: clickhouse/clickhouse-server
ports:
- 8123:8123
- 9000:9000
options: --ulimit nofile=262144:262144
strategy:
matrix:
python-version: [ "3.10" ]
steps:
- uses: actions/checkout@v2
- name: Build and Install
run: |
pip install build
python -m build
pip install dist/*
- name: Run Unit Tests
run: |
python -m unittest

41
.github/workflows/pr-python37.yml vendored Normal file
View File

@ -0,0 +1,41 @@
name: Python 3.7 Tests
on:
push:
# Publish `master` as Docker `latest` image.
branches:
- master
- develop
# Publish `v1.2.3` tags as releases.
tags:
- v*
# Run tests for any PRs.
pull_request:
branches:
- master
- develop
jobs:
testPy37:
runs-on: ubuntu-latest
services:
clickhouse:
image: clickhouse/clickhouse-server
ports:
- 8123:8123
- 9000:9000
options: --ulimit nofile=262144:262144
strategy:
matrix:
python-version: [ "3.7" ]
steps:
- uses: actions/checkout@v2
- name: Build and Install
run: |
pip install build
python -m build
pip install dist/*
- name: Run Unit Tests
run: |
python -m unittest

41
.github/workflows/pr-python38.yml vendored Normal file
View File

@ -0,0 +1,41 @@
name: Python 3.8 Tests
on:
push:
# Publish `master` as Docker `latest` image.
branches:
- master
- develop
# Publish `v1.2.3` tags as releases.
tags:
- v*
# Run tests for any PRs.
pull_request:
branches:
- master
- develop
jobs:
testPy37:
runs-on: ubuntu-latest
services:
clickhouse:
image: clickhouse/clickhouse-server
ports:
- 8123:8123
- 9000:9000
options: --ulimit nofile=262144:262144
strategy:
matrix:
python-version: [ "3.8" ]
steps:
- uses: actions/checkout@v2
- name: Build and Install
run: |
pip install build
python -m build
pip install dist/*
- name: Run Unit Tests
run: |
python -m unittest

41
.github/workflows/pr-python39.yml vendored Normal file
View File

@ -0,0 +1,41 @@
name: Python 3.9 Tests
on:
push:
# Publish `master` as Docker `latest` image.
branches:
- master
- develop
# Publish `v1.2.3` tags as releases.
tags:
- v*
# Run tests for any PRs.
pull_request:
branches:
- master
- develop
jobs:
testPy37:
runs-on: ubuntu-latest
services:
clickhouse:
image: clickhouse/clickhouse-server
ports:
- 8123:8123
- 9000:9000
options: --ulimit nofile=262144:262144
strategy:
matrix:
python-version: [ "3.9" ]
steps:
- uses: actions/checkout@v2
- name: Build and Install
run: |
pip install build
python -m build
pip install dist/*
- name: Run Unit Tests
run: |
python -m unittest

View File

@ -2,6 +2,7 @@ A fork of [infi.clikchouse_orm](https://github.com/Infinidat/infi.clickhouse_orm
This repository expects to use more type hints, and will drop support for Python 2.x.
Supports both synchronous and asynchronous ways to interact with the clickhouse server. Means you can use asyncio to perform asynchronous queries, although the asynchronous mode is not well tested.
| Build | [![Coverage Status](https://github.com/sswest/ch-orm/workflows/ci/badge.svg)](https://github.com/sswest/ch-orm/actions?query=workflow:ci)[![Coverage Status](https://coveralls.io/repos/github/sswest/ch-orm/badge.svg?branch=develop)](https://coveralls.io/github/sswest/ch-orm?branch=develop) |
| ------- | ------------------------------------------------------------ |

60
docs/async_databases.md Normal file
View File

@ -0,0 +1,60 @@
Async Databases
====================
Databases in async mode have basically the same API. In most cases you just need to add `await`.
Insert from the AioDatabase
-------------------------
To write your instances to ClickHouse, you need a `AioDatabase` instance:
```python
from clickhouse_orm.aio.database import AioDatabase
db = AioDatabase('my_test_db')
async def main():
await db.init()
...
```
**Unlike the previous Database instance, you have to use an asynchronous method to initialize the db.**
Using the `AioDatabase` instance you can create a table for your model, and insert instances to it:
```python
from clickhouse_orm.aio.database import AioDatabase
db = AioDatabase('my_test_db')
async def main():
await db.init()
await db.create_table(Person)
await db.insert([dan, suzy])
```
The `insert` method can take any iterable of model instances, but they all must belong to the same model class.
Reading from the AioDatabase
-------------------------
Loading model instances from the database is easy, use the `async for` keyword:
```python
async for person in db.select("SELECT * FROM my_test_db.person", model_class=Person):
print(person.first_name, person.last_name)
```
**Note: AioDatabase does not support QuerySet value by index**
```python
async def main():
await db.init()
# incorrect example
person = await Person.objects_in(db).filter[5]
# correct
person = [_ async for _ in Person.objects_in(db).filter[5:5]][0]
```
[<< Models and Databases](models_and_databases.md) | [Table of Contents](toc.md) | [Expressions >>](expressions.md)

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
Contributing
============
This project is hosted on GitHub - [https://github.com/Infinidat/infi.clickhouse_orm/](https://github.com/Infinidat/infi.clickhouse_orm/).
This project is hosted on GitHub - [https://github.com/sswest/ch-orm](https://github.com/sswest/ch-orm).
Please open an issue there if you encounter a bug or want to request a feature.
Pull requests are also welcome.
@ -11,29 +11,24 @@ Building
After cloning the project, run the following commands:
easy_install -U infi.projector
cd infi.clickhouse_orm
projector devenv build
pip install build
python -m build
A `setup.py` file will be generated, which you can use to install the development version of the package:
A `dist` directory will be generated, which you can use to install the development version of the package:
python setup.py install
pip install dist/*
Tests
-----
To run the tests, ensure that the ClickHouse server is running on <http://localhost:8123/> (this is the default), and run:
bin/nosetests
python -m unittest
To see test coverage information run:
bin/nosetests --with-coverage --cover-package=infi.clickhouse_orm
To test with tox, ensure that the setup.py is present (otherwise run `bin/buildout buildout:develop= setup.py`) and run:
pip install tox
tox
coverage run --source=clickhouse_orm -m unittest
coverage report -m
---

View File

@ -96,4 +96,4 @@ Note that higher-order database functions (those that use lambda expressions) ar
---
[<< Models and Databases](models_and_databases.md) | [Table of Contents](toc.md) | [Importing ORM Classes >>](importing_orm_classes.md)
[<< Async Databases](async_databases.md) | [Table of Contents](toc.md) | [Importing ORM Classes >>](importing_orm_classes.md)

View File

@ -8,6 +8,7 @@ All field types accept the following arguments:
- materialized
- readonly
- codec
- db_column
Note that `default`, `alias` and `materialized` are mutually exclusive - you cannot use more than one of them in a single field.
@ -25,7 +26,7 @@ class Event(Model):
engine = Memory()
...
```
When creating a model instance, any fields you do not specify get their default value. Fields that use a default expression are assigned a sentinel value of `infi.clickhouse_orm.utils.NO_VALUE` instead. For example:
When creating a model instance, any fields you do not specify get their default value. Fields that use a default expression are assigned a sentinel value of `clickhouse_orm.utils.NO_VALUE` instead. For example:
```python
>>> event = Event()
>>> print(event.to_dict())
@ -33,6 +34,20 @@ When creating a model instance, any fields you do not specify get their default
```
:warning: Due to a bug in ClickHouse versions prior to 20.1.2.4, insertion of records with expressions for default values may fail.
## db_column
db_column allows you to use the field names defined by the clickhouse backend, rather than Field instance names.
```python
class Style(Model):
create_time = DateTimeField(default=F.now(), db_column="createTime")
engine = Memory()
```
You can use the `create_time` field for all ORM operations, but the clickhouse will store the column named `createTime`.
## alias / materialized
The `alias` and `materialized` attributes expect an expression that gets calculated by the database. The difference is that `alias` fields are calculated on the fly, while `materialized` fields are calculated when the record is inserted, and are stored on disk.
@ -63,7 +78,7 @@ db.select('SELECT created, created_date, username, name FROM $db.event', model_c
# created_date and username will contain a default value
db.select('SELECT * FROM $db.event', model_class=Event)
```
When creating a model instance, any alias or materialized fields are assigned a sentinel value of `infi.clickhouse_orm.utils.NO_VALUE` since their real values can only be known after insertion to the database.
When creating a model instance, any alias or materialized fields are assigned a sentinel value of `clickhouse_orm.utils.NO_VALUE` since their real values can only be known after insertion to the database.
## codec

View File

@ -5,34 +5,37 @@ See: [ClickHouse Documentation](https://clickhouse.tech/docs/en/sql-reference/da
The following field types are supported:
| Class | DB Type | Pythonic Type | Comments
| ------------------ | ---------- | --------------------- | -----------------------------------------------------
| StringField | String | str | Encoded as UTF-8 when written to ClickHouse
| FixedStringField | FixedString| str | Encoded as UTF-8 when written to ClickHouse
| DateField | Date | datetime.date | Range 1970-01-01 to 2105-12-31
| DateTimeField | DateTime | datetime.datetime | Minimal value is 1970-01-01 00:00:00; Timezone aware
| DateTime64Field | DateTime64 | datetime.datetime | Minimal value is 1970-01-01 00:00:00; Timezone aware
| Int8Field | Int8 | int | Range -128 to 127
| Int16Field | Int16 | int | Range -32768 to 32767
| Int32Field | Int32 | int | Range -2147483648 to 2147483647
| Int64Field | Int64 | int | Range -9223372036854775808 to 9223372036854775807
| UInt8Field | UInt8 | int | Range 0 to 255
| UInt16Field | UInt16 | int | Range 0 to 65535
| UInt32Field | UInt32 | int | Range 0 to 4294967295
| UInt64Field | UInt64 | int | Range 0 to 18446744073709551615
| Float32Field | Float32 | float |
| Float64Field | Float64 | float |
| DecimalField | Decimal | Decimal | Pythonic values are rounded to fit the scale of the database field
| Decimal32Field | Decimal32 | Decimal | Ditto
| Decimal64Field | Decimal64 | Decimal | Ditto
| Decimal128Field | Decimal128 | Decimal | Ditto
| UUIDField | UUID | uuid.UUID |
| IPv4Field | IPv4 | ipaddress.IPv4Address |
| IPv6Field | IPv6 | ipaddress.IPv6Address |
| Enum8Field | Enum8 | Enum | See below
| Enum16Field | Enum16 | Enum | See below
| ArrayField | Array | list | See below
| NullableField | Nullable | See below | See below
| Class | DB Type | Pythonic Type | Comments
|------------------|-------------|------------------------| -----------------------------------------------------
| StringField | String | str | Encoded as UTF-8 when written to ClickHouse
| FixedStringField | FixedString | str | Encoded as UTF-8 when written to ClickHouse
| DateField | Date | datetime.date | Range 1970-01-01 to 2105-12-31
| DateTimeField | DateTime | datetime.datetime | Minimal value is 1970-01-01 00:00:00; Timezone aware
| DateTime64Field | DateTime64 | datetime.datetime | Minimal value is 1970-01-01 00:00:00; Timezone aware
| Int8Field | Int8 | int | Range -128 to 127
| Int16Field | Int16 | int | Range -32768 to 32767
| Int32Field | Int32 | int | Range -2147483648 to 2147483647
| Int64Field | Int64 | int | Range -9223372036854775808 to 9223372036854775807
| UInt8Field | UInt8 | int | Range 0 to 255
| UInt16Field | UInt16 | int | Range 0 to 65535
| UInt32Field | UInt32 | int | Range 0 to 4294967295
| UInt64Field | UInt64 | int | Range 0 to 18446744073709551615
| Float32Field | Float32 | float |
| Float64Field | Float64 | float |
| DecimalField | Decimal | Decimal | Pythonic values are rounded to fit the scale of the database field
| Decimal32Field | Decimal32 | Decimal | Ditto
| Decimal64Field | Decimal64 | Decimal | Ditto
| Decimal128Field | Decimal128 | Decimal | Ditto
| UUIDField | UUID | uuid.UUID |
| IPv4Field | IPv4 | ipaddress.IPv4Address |
| IPv6Field | IPv6 | ipaddress.IPv6Address |
| Enum8Field | Enum8 | Enum | See below
| Enum16Field | Enum16 | Enum | See below
| ArrayField | Array | list | See below
| TupleField | Tuple | tuple | See below
| PointField | Point | contrib.geo.fields.Point | Experimental feature
| RingField | Ring | contrib.geo.fields.Ring | Experimental feature
| NullableField | Nullable | See below | See below
DateTimeField and Time Zones
@ -96,6 +99,28 @@ data = SensorData(date=date.today(), temperatures=[25.5, 31.2, 28.7], humidity_l
Note that multidimensional arrays are not supported yet by the ORM.
Working with tuple fields
-------------------------
You can create tuple fields containing multiple data type, for example:
```python
from datetime import date
from clickhouse_orm.models import Model
from clickhouse_orm.engines import MergeTree
from clickhouse_orm.fields import DateField, Float32Field, UInt8Field, TupleField
class SensorData(Model):
date = DateField()
info = TupleField([('t', Float32Field()), ('h', UInt8Field())])
engine = MergeTree('date', ('date',))
data = SensorData(date=date.today(), info=(25.5, 41))
```
Working with nullable fields
----------------------------
[ClickHouse provides a NULL value support](https://clickhouse.tech/docs/en/sql-reference/data-types/nullable/).

View File

@ -7,24 +7,24 @@ The ORM supports different styles of importing and referring to its classes, so
Importing Everything
--------------------
It is safe to use `import *` from `infi.clickhouse_orm` or its submodules. Only classes that are needed by users of the ORM will get imported, and nothing else:
It is safe to use `import *` from `clickhouse_orm` or its submodules. Only classes that are needed by users of the ORM will get imported, and nothing else:
```python
from infi.clickhouse_orm import *
from clickhouse_orm import *
```
This is exactly equivalent to the following import statements:
```python
from infi.clickhouse_orm.database import *
from infi.clickhouse_orm.engines import *
from infi.clickhouse_orm.fields import *
from infi.clickhouse_orm.funcs import *
from infi.clickhouse_orm.migrations import *
from infi.clickhouse_orm.models import *
from infi.clickhouse_orm.query import *
from infi.clickhouse_orm.system_models import *
from clickhouse_orm.database import *
from clickhouse_orm.engines import *
from clickhouse_orm.fields import *
from clickhouse_orm.funcs import *
from clickhouse_orm.migrations import *
from clickhouse_orm.models import *
from clickhouse_orm.query import *
from clickhouse_orm.system_models import *
```
By importing everything, all of the ORM's public classes can be used directly. For example:
```python
from infi.clickhouse_orm import *
from clickhouse_orm import *
class Event(Model):
@ -40,8 +40,8 @@ Importing Everything into a Namespace
To prevent potential name clashes and to make the code more readable, you can import the ORM's classes into a namespace of your choosing, e.g. `orm`. For brevity, it is recommended to import the `F` class explicitly:
```python
import infi.clickhouse_orm as orm
from infi.clickhouse_orm import F
import clickhouse_orm as orm
from clickhouse_orm import F
class Event(orm.Model):
@ -57,7 +57,7 @@ Importing Specific Submodules
It is possible to import only the submodules you need, and use their names to qualify the ORM's class names. This option is more verbose, but makes it clear where each class comes from. For example:
```python
from infi.clickhouse_orm import models, fields, engines, F
from clickhouse_orm import models, fields, engines, F
class Event(models.Model):
@ -73,7 +73,7 @@ Importing Specific Classes
If you prefer, you can import only the specific ORM classes that you need directly from `infi.clickhouse_orm`:
```python
from infi.clickhouse_orm import Model, StringField, UInt32Field, DateTimeField, F, Memory
from clickhouse_orm import Model, StringField, UInt32Field, DateTimeField, F, Memory
class Event(Model):

View File

@ -3,14 +3,16 @@ Overview
This project is simple ORM for working with the [ClickHouse database](https://clickhouse.tech/). It allows you to define model classes whose instances can be written to the database and read from it.
Version 1.x supports Python 2.7 and 3.5+. Version 2.x dropped support for Python 2.7, and works only with Python 3.5+.
This repository expects to use more type hints, and will drop support for Python 2.x.
Supports both synchronous and asynchronous ways to interact with the clickhouse server. Means you can use asyncio to perform asynchronous queries, although the asynchronous mode is not well tested.
Installation
------------
To install infi.clickhouse_orm:
To install clickhouse_orm:
pip install infi.clickhouse_orm
pip install ch-orm
---

View File

@ -10,7 +10,7 @@ Defining Models
Models are defined in a way reminiscent of Django's ORM, by subclassing `Model`:
```python
from infi.clickhouse_orm import Model, StringField, DateField, Float32Field, MergeTree
from clickhouse_orm import Model, StringField, DateField, Float32Field, MergeTree
class Person(Model):
@ -68,6 +68,8 @@ For additional details see [here](field_options.md).
The table name used for the model is its class name, converted to lowercase. To override the default name, implement the `table_name` method:
```python
from clickhouse_orm.models import Model
class Person(Model):
...
@ -81,10 +83,14 @@ class Person(Model):
It is possible to define constraints which ClickHouse verifies when data is inserted. Trying to insert invalid records will raise a `ServerError`. Each constraint has a name and an expression to validate. For example:
```python
from clickhouse_orm.models import Model, Constraint
from clickhouse_orm.funcs import F
from clickhouse_orm.fields import DateTimeField
class Person(Model):
...
birthday = DateTimeField()
# Ensure that the birthday is not a future date
birthday_is_in_the_past = Constraint(birthday <= F.today())
```
@ -95,10 +101,17 @@ Models that use an engine from the `MergeTree` family can define additional inde
For example:
```python
from clickhouse_orm.models import Model, Index
from clickhouse_orm.funcs import F
from clickhouse_orm.fields import StringField, Float32Field
class Person(Model):
...
first_name = StringField()
last_name = StringField()
height = Float32Field()
# A minmax index that can help find people taller or shorter than some height
height_index = Index(height, type=Index.minmax(), granularity=2)
@ -116,7 +129,7 @@ Once you have a model, you can create model instances:
>>> dan = Person(first_name='Dan', last_name='Schwartz')
>>> suzy = Person(first_name='Suzy', last_name='Jones')
>>> dan.first_name
u'Dan'
'Dan'
When values are assigned to model fields, they are immediately converted to their Pythonic data type. In case the value is invalid, a `ValueError` is raised:
@ -133,9 +146,11 @@ Inserting to the Database
To write your instances to ClickHouse, you need a `Database` instance:
from infi.clickhouse_orm import Database
```python
from clickhouse_orm import Database
db = Database('my_test_db')
db = Database('my_test_db')
```
This automatically connects to <http://localhost:8123> and creates a database called my_test_db, unless it already exists. If necessary, you can specify a different database URL and optional credentials:
@ -247,4 +262,4 @@ Note that `order_by` must be chosen so that the ordering is unique, otherwise th
---
[<< Overview](index.md) | [Table of Contents](toc.md) | [Expressions >>](expressions.md)
[<< Overview](index.md) | [Table of Contents](toc.md) | [Async DataBase >>](async_databases.md)

View File

@ -11,6 +11,11 @@ This queryset matches all Person instances in the database. You can get these in
for person in qs:
print(person.first_name, person.last_name)
For AioDatabase instances:
async for person in qs:
print(person.first_name, person.last_name)
Filtering
---------
@ -88,24 +93,26 @@ qs = qs.filter(x__gt=100, y__lt=20, terrain='water')
```
Below are all the supported operators.
| Operator | Equivalent SQL | Comments |
| -------- | -------------------------------------------- | ---------------------------------- |
| `eq` | `field = value` | |
| `ne` | `field != value` | |
| `gt` | `field > value` | |
| `gte` | `field >= value` | |
| `lt` | `field < value` | |
| `lte` | `field <= value` | |
| `between` | `field BETWEEN value1 AND value2` | |
| `in` | `field IN (values)` | |
| `not_in` | `field NOT IN (values)` | |
| `contains` | `field LIKE '%value%'` | For string fields only |
| `startswith` | `field LIKE 'value%'` | For string fields only |
| `endswith` | `field LIKE '%value'` | For string fields only |
| `icontains` | `lowerUTF8(field) LIKE lowerUTF8('%value%')` | For string fields only |
| `istartswith` | `lowerUTF8(field) LIKE lowerUTF8('value%')` | For string fields only |
| `iendswith` | `lowerUTF8(field) LIKE lowerUTF8('%value')` | For string fields only |
| `iexact` | `lowerUTF8(field) = lowerUTF8(value)` | For string fields only |
| Operator | Equivalent SQL | Comments |
|---------------|----------------------------------------------| ---------------------------------- |
| `eq` | `field = value` | |
| `ne` | `field != value` | |
| `gt` | `field > value` | |
| `gte` | `field >= value` | |
| `lt` | `field < value` | |
| `lte` | `field <= value` | |
| `between` | `field BETWEEN value1 AND value2` | |
| `in` | `field IN (values)` | |
| `gin` | `field GLOBAL IN (values)` | |
| `not_in` | `field NOT IN (values)` | |
| `not_gin` | `field NOT GLOBAL IN (values)` | |
| `contains` | `field LIKE '%value%'` | For string fields only |
| `startswith` | `field LIKE 'value%'` | For string fields only |
| `endswith` | `field LIKE '%value'` | For string fields only |
| `icontains` | `lowerUTF8(field) LIKE lowerUTF8('%value%')` | For string fields only |
| `istartswith` | `lowerUTF8(field) LIKE lowerUTF8('value%')` | For string fields only |
| `iendswith` | `lowerUTF8(field) LIKE lowerUTF8('%value')` | For string fields only |
| `iexact` | `lowerUTF8(field) = lowerUTF8(value)` | For string fields only |
Counting and Checking Existence
-------------------------------
@ -114,6 +121,9 @@ Use the `count` method to get the number of matches:
Person.objects_in(database).count()
# aio
# await Person.objects_in(database).count()
To check if there are any matches at all, you can use any of the following equivalent options:
if qs.count(): ...
@ -164,7 +174,7 @@ Adds a FINAL modifier to the query, meaning that the selected data is fully "col
Slicing
-------
It is possible to get a specific item from the queryset by index:
It is possible to get a specific item from the queryset by index (**not applicable to AioDatabase)**:
qs = Person.objects_in(database).order_by('last_name', 'first_name')
first = qs[0]
@ -175,6 +185,9 @@ It is also possible to get a range a instances using a slice. This returns a que
first_ten_people = list(qs[:10])
next_ten_people = list(qs[10:20])
# first_ten_people = [_ async for _ in qs[:10]]
# next_ten_people = [_ async for _ in qs[10:20]]
You should use `order_by` to ensure a consistent ordering of the results.
Trying to use negative indexes or a slice with a step (e.g. [0 : 100 : 2]) is not supported and will raise an `AssertionError`.

View File

@ -20,12 +20,16 @@
* [Counting](models_and_databases.md#counting)
* [Pagination](models_and_databases.md#pagination)
* [Async Databases](async_databases.md#async-databases)
* [Insert from the AioDatabase](async_databases.md#insert-from-the-aiodatabase)
* [Reading from the AioDatabase](async_databases.md#reading-from-the-aiodatabase)
* [Querysets](querysets.md#querysets)
* [Filtering](querysets.md#filtering)
* [Using IN and NOT IN](querysets.md#using-in-and-not-in)
* [Specifying PREWHERE conditions](querysets.md#specifying-prewhere-conditions)
* [Old-style filter conditions](querysets.md#old-style-filter-conditions)
* [Counting and Checking Existence](querysets.md#counting-and-checking-existence)
* [Using IN and NOT IN](querysets.md#using-in-and not-in)
* [Specifying PREWHERE conditions](querysets.md#specifying-prewhere conditions)
* [Old-style filter conditions](querysets.md#old-style-filter-conditions)
* [Counting and Checking Existence](querysets.md#counting-and-checking existence)
* [Ordering](querysets.md#ordering)
* [Omitting Fields](querysets.md#omitting-fields)
* [Distinct](querysets.md#distinct)
@ -38,6 +42,7 @@
* [Field Options](field_options.md#field-options)
* [default](field_options.md#default)
* [db_column](field_options.md#db_column)
* [alias / materialized](field_options.md#alias-/-materialized)
* [codec](field_options.md#codec)
* [readonly](field_options.md#readonly)
@ -46,13 +51,14 @@
* [DateTimeField and Time Zones](field_types.md#datetimefield-and-time-zones)
* [Working with enum fields](field_types.md#working-with-enum-fields)
* [Working with array fields](field_types.md#working-with-array-fields)
* [Working with tuple fields](field_types.md#working-with-tuple-fields)
* [Working with nullable fields](field_types.md#working-with-nullable-fields)
* [Working with LowCardinality fields](field_types.md#working-with-lowcardinality-fields)
* [Working with LowCardinality fields](field_types.md#working-with-lowcardinality fields)
* [Creating custom field types](field_types.md#creating-custom-field-types)
* [Table Engines](table_engines.md#table-engines)
* [Simple Engines](table_engines.md#simple-engines)
* [Engines in the MergeTree Family](table_engines.md#engines-in-the-mergetree-family)
* [Engines in the MergeTree Family](table_engines.md#engines-in-the-mergetree family)
* [Custom partitioning](table_engines.md#custom-partitioning)
* [Primary key](table_engines.md#primary-key)
* [Data Replication](table_engines.md#data-replication)

View File

@ -50,9 +50,9 @@ for row in QueryLog.objects_in(db).filter(QueryLog.query_duration_ms > 10000):
## Convenient ways to import ORM classes
You can now import all ORM classes directly from `infi.clickhouse_orm`, without worrying about sub-modules. For example:
You can now import all ORM classes directly from `clickhouse_orm`, without worrying about sub-modules. For example:
```python
from infi.clickhouse_orm import Database, Model, StringField, DateTimeField, MergeTree
from clickhouse_orm import Database, Model, StringField, DateTimeField, MergeTree
```
See [Importing ORM Classes](importing_orm_classes.md).

View File

@ -29,6 +29,7 @@ def _get_default_arg(args, defaults, arg_index):
value = '"%s"' % value
return DefaultArgSpec(True, value)
def get_method_sig(method):
""" Given a function, it returns a string that pretty much looks how the
function signature would be written in python.
@ -42,8 +43,8 @@ def get_method_sig(method):
# list of defaults are returned in separate array.
# eg: ArgSpec(args=['first_arg', 'second_arg', 'third_arg'],
# varargs=None, keywords=None, defaults=(42, 'something'))
argspec = inspect.getargspec(method)
arg_index=0
argspec = inspect.getfullargspec(method)
arg_index = 0
args = []
# Use the args and defaults array returned by argspec and find out
@ -58,8 +59,8 @@ def get_method_sig(method):
arg_index += 1
if argspec.varargs:
args.append('*' + argspec.varargs)
if argspec.keywords:
args.append('**' + argspec.keywords)
if argspec.varkw:
args.append('**' + argspec.varkw)
return "%s(%s)" % (method.__name__, ", ".join(args[1:]))
@ -120,18 +121,20 @@ def all_subclasses(cls):
if __name__ == '__main__':
from infi.clickhouse_orm import database
from infi.clickhouse_orm import fields
from infi.clickhouse_orm import engines
from infi.clickhouse_orm import models
from infi.clickhouse_orm import query
from infi.clickhouse_orm import funcs
from infi.clickhouse_orm import system_models
from clickhouse_orm import database
from clickhouse_orm import fields
from clickhouse_orm import engines
from clickhouse_orm import models
from clickhouse_orm import query
from clickhouse_orm import funcs
from clickhouse_orm import system_models
from clickhouse_orm.aio import database as aio_database
print('Class Reference')
print('===============')
print()
module_doc([database.Database, database.DatabaseException])
module_doc([aio_database.AioDatabase])
module_doc([models.Model, models.BufferModel, models.MergeModel, models.DistributedModel, models.Constraint, models.Index])
module_doc(sorted([fields.Field] + all_subclasses(fields.Field), key=lambda x: x.__name__), False)
module_doc([engines.Engine] + all_subclasses(engines.Engine), False)

View File

@ -1,13 +1,14 @@
generate_one() {
# Converts Markdown to HTML using Pandoc, and then extracts the header tags
pandoc "$1" | python "../scripts/html_to_markdown_toc.py" "$1" >> toc.md
pandoc "$1" | python3 "../scripts/html_to_markdown_toc.py" "$1" >> toc.md
}
printf "# Table of Contents\n\n" > toc.md
generate_one "index.md"
generate_one "models_and_databases.md"
generate_one "async_databases.md"
generate_one "querysets.md"
generate_one "field_options.md"
generate_one "field_types.md"

View File

@ -51,6 +51,7 @@ class AioDatabase(Database):
):
r = await super()._send(data, settings, stream)
if r.status_code != 200:
await r.aread()
raise ServerError(r.text)
return r
@ -85,11 +86,6 @@ class AioDatabase(Database):
"""
Creates the database on the ClickHouse server if it does not already exist.
"""
if not self._init:
raise DatabaseException(
'The AioDatabase object must execute the init method before it can be used'
)
await self._send('CREATE DATABASE IF NOT EXISTS `%s`' % self.db_name)
self.db_exists = True
@ -97,11 +93,6 @@ class AioDatabase(Database):
"""
Deletes the database on the ClickHouse server.
"""
if not self._init:
raise DatabaseException(
'The AioDatabase object must execute the init method before it can be used'
)
await self._send('DROP DATABASE `%s`' % self.db_name)
self.db_exists = False

View File

@ -100,7 +100,7 @@ class Database:
- `username`: optional connection credentials.
- `password`: optional connection credentials.
- `readonly`: use a read-only connection.
- `autocreate`: automatically create the database
- `auto_create`: automatically create the database
if it does not exist (unless in readonly mode).
- `timeout`: the connection timeout in seconds.
- `verify_ssl_cert`: whether to verify the server's certificate when connecting via HTTPS.

View File

@ -49,7 +49,7 @@ class Index:
name: Optional[str] = None # this is set by the parent model
parent: Optional[type["Model"]] = None # this is set by the parent model
def __init__(self, expr: F, type: str, granularity: int):
def __init__(self, expr: Field | F | tuple, type: str, granularity: int):
"""
Initializer.
@ -238,10 +238,14 @@ class ModelBase(type):
return orm_fields.ArrayField(inner_field)
# Tuples
if db_type.startswith('Tuple'):
types = [s.strip() for s in db_type[6:-1].split(',')]
return orm_fields.TupleField(name_fields=[
(str(i), cls.create_ad_hoc_field(type_name)) for i, type_name in enumerate(types)]
)
types = [s.strip().split(' ') for s in db_type[6:-1].split(',')]
name_fields = []
for i, tp in enumerate(types):
if len(tp) == 2:
name_fields.append((tp[0], cls.create_ad_hoc_field(tp[1])))
else:
name_fields.append((str(i), cls.create_ad_hoc_field(tp[0])))
return orm_fields.TupleField(name_fields=name_fields)
# FixedString
if db_type.startswith('FixedString'):
length = int(db_type[12:-1])

View File

@ -5,6 +5,7 @@ from clickhouse_orm.database import Database
from clickhouse_orm.models import Model
from clickhouse_orm.fields import *
from clickhouse_orm.engines import *
from clickhouse_orm.aio.database import AioDatabase
import logging
logging.getLogger("requests").setLevel(logging.WARNING)
@ -35,6 +36,31 @@ class TestCaseWithData(unittest.TestCase):
yield Person(**entry)
class TestCaseWithAsyncData(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
self.database = AioDatabase('test-db', log_statements=True)
await self.database.init()
await self.database.create_table(Person)
async def asyncTearDown(self):
await self.database.drop_table(Person)
await self.database.drop_database()
async def _insert_all(self):
await self.database.insert(self._sample_data())
self.assertTrue(await self.database.count(Person))
async def _insert_and_check(self, data, count, batch_size=1000):
await self.database.insert(data, batch_size=batch_size)
self.assertEqual(count, await self.database.count(Person))
for instance in data:
self.assertEqual(self.database, instance.get_database())
def _sample_data(self):
for entry in data:
yield Person(**entry)
class Person(Model):

331
tests/test_aiodatabase.py Normal file
View File

@ -0,0 +1,331 @@
# -*- coding: utf-8 -*-
import unittest
import datetime
from clickhouse_orm.database import ServerError, DatabaseException
from clickhouse_orm.query import Q
from clickhouse_orm.funcs import F
from tests.base_test_with_data import *
class DatabaseTestCase(TestCaseWithAsyncData):
async def test_insert__generator(self):
await self._insert_and_check(self._sample_data(), len(data))
async def test_insert__list(self):
await self._insert_and_check(list(self._sample_data()), len(data))
async def test_insert__iterator(self):
await self._insert_and_check(iter(self._sample_data()), len(data))
async def test_insert__empty(self):
await self._insert_and_check([], 0)
async def test_insert__small_batches(self):
await self._insert_and_check(self._sample_data(), len(data), batch_size=10)
async def test_insert__medium_batches(self):
await self._insert_and_check(self._sample_data(), len(data), batch_size=100)
async def test_insert__funcs_as_default_values(self):
if self.database.server_version < (20, 1, 2, 4):
raise unittest.SkipTest('Buggy in server versions before 20.1.2.4')
class TestModel(Model):
a = DateTimeField(default=datetime.datetime(2020, 1, 1))
b = DateField(default=F.toDate(a))
c = Int32Field(default=7)
d = Int32Field(default=c * 5)
engine = Memory()
await self.database.create_table(TestModel)
await self.database.insert([TestModel()])
with self.assertRaises(TypeError):
# AioDatabase does not support queryset object value by index
obj = TestModel.objects_in(self.database)[0]
async for t in TestModel.objects_in(self.database):
self.assertEqual(str(t.b), '2020-01-01')
self.assertEqual(t.d, 35)
async def test_count(self):
await self.database.insert(self._sample_data())
self.assertEqual(await self.database.count(Person), 100)
# Conditions as string
self.assertEqual(await self.database.count(Person, "first_name = 'Courtney'"), 2)
self.assertEqual(await self.database.count(Person, "birthday > '2000-01-01'"), 22)
self.assertEqual(await self.database.count(Person, "birthday < '1970-03-01'"), 0)
# Conditions as expression
self.assertEqual(
await self.database.count(Person, Person.birthday > datetime.date(2000, 1, 1)), 22
)
# Conditions as Q object
self.assertEqual(
await self.database.count(Person, Q(birthday__gt=datetime.date(2000, 1, 1))), 22
)
async def test_select(self):
await self._insert_and_check(self._sample_data(), len(data))
query = "SELECT * FROM `test-db`.person WHERE first_name = 'Whitney' ORDER BY last_name"
results = [person async for person in self.database.select(query, Person)]
self.assertEqual(len(results), 2)
self.assertEqual(results[0].last_name, 'Durham')
self.assertEqual(results[0].height, 1.72)
self.assertEqual(results[1].last_name, 'Scott')
self.assertEqual(results[1].height, 1.70)
self.assertEqual(results[0].get_database(), self.database)
self.assertEqual(results[1].get_database(), self.database)
async def test_dollar_in_select(self):
query = "SELECT * FROM $table WHERE first_name = '$utm_source'"
[_ async for _ in self.database.select(query, Person)]
async def test_select_partial_fields(self):
await self._insert_and_check(self._sample_data(), len(data))
query = "SELECT first_name, last_name FROM `test-db`.person WHERE first_name = 'Whitney' ORDER BY last_name"
results = [person async for person in self.database.select(query, Person)]
self.assertEqual(len(results), 2)
self.assertEqual(results[0].last_name, 'Durham')
self.assertEqual(results[0].height, 0) # default value
self.assertEqual(results[1].last_name, 'Scott')
self.assertEqual(results[1].height, 0) # default value
self.assertEqual(results[0].get_database(), self.database)
self.assertEqual(results[1].get_database(), self.database)
async def test_select_ad_hoc_model(self):
await self._insert_and_check(self._sample_data(), len(data))
query = "SELECT * FROM `test-db`.person WHERE first_name = 'Whitney' ORDER BY last_name"
results = [person async for person in self.database.select(query)]
self.assertEqual(len(results), 2)
self.assertEqual(results[0].__class__.__name__, 'AdHocModel')
self.assertEqual(results[0].last_name, 'Durham')
self.assertEqual(results[0].height, 1.72)
self.assertEqual(results[1].last_name, 'Scott')
self.assertEqual(results[1].height, 1.70)
self.assertEqual(results[0].get_database(), self.database)
self.assertEqual(results[1].get_database(), self.database)
async def test_select_with_totals(self):
await self._insert_and_check(self._sample_data(), len(data))
query = "SELECT last_name, sum(height) as height FROM `test-db`.person GROUP BY last_name WITH TOTALS"
results = [person async for person in self.database.select(query)]
total = sum(r.height for r in results[:-1])
# Last line has an empty last name, and total of all heights
self.assertFalse(results[-1].last_name)
self.assertEqual(total, results[-1].height)
async def test_pagination(self):
await self._insert_and_check(self._sample_data(), len(data))
# Try different page sizes
for page_size in (1, 2, 7, 10, 30, 100, 150):
# Iterate over pages and collect all intances
page_num = 1
instances = set()
while True:
page = await self.database.paginate(Person, 'first_name, last_name', page_num, page_size)
self.assertEqual(page.number_of_objects, len(data))
self.assertGreater(page.pages_total, 0)
[instances.add(obj.to_tsv()) for obj in page.objects]
if page.pages_total == page_num:
break
page_num += 1
# Verify that all instances were returned
self.assertEqual(len(instances), len(data))
async def test_pagination_last_page(self):
await self._insert_and_check(self._sample_data(), len(data))
# Try different page sizes
for page_size in (1, 2, 7, 10, 30, 100, 150):
# Ask for the last page in two different ways and verify equality
page_a = await self.database.paginate(Person, 'first_name, last_name', -1, page_size)
page_b = await self.database.paginate(Person, 'first_name, last_name',
page_a.pages_total, page_size)
self.assertEqual(page_a[1:], page_b[1:])
self.assertEqual(
[obj.to_tsv() for obj in page_a.objects], [obj.to_tsv() for obj in page_b.objects]
)
async def test_pagination_empty_page(self):
for page_num in (-1, 1, 2):
page = await self.database.paginate(
Person, 'first_name, last_name', page_num, 10, conditions="first_name = 'Ziggy'"
)
self.assertEqual(page.number_of_objects, 0)
self.assertEqual(page.objects, [])
self.assertEqual(page.pages_total, 0)
self.assertEqual(page.number, max(page_num, 1))
async def test_pagination_invalid_page(self):
await self._insert_and_check(self._sample_data(), len(data))
for page_num in (0, -2, -100):
with self.assertRaises(ValueError):
await self.database.paginate(Person, 'first_name, last_name', page_num, 100)
async def test_pagination_with_conditions(self):
await self._insert_and_check(self._sample_data(), len(data))
# Conditions as string
page = await self.database.paginate(
Person, 'first_name, last_name', 1, 100, conditions="first_name < 'Ava'"
)
self.assertEqual(page.number_of_objects, 10)
# Conditions as expression
page = await self.database.paginate(
Person, 'first_name, last_name', 1, 100, conditions=Person.first_name < 'Ava'
)
self.assertEqual(page.number_of_objects, 10)
# Conditions as Q object
page = await self.database.paginate(
Person, 'first_name, last_name', 1, 100, conditions=Q(first_name__lt='Ava')
)
self.assertEqual(page.number_of_objects, 10)
async def test_special_chars(self):
s = u'אבגד \\\'"`,.;éåäöšž\n\t\0\b\r'
p = Person(first_name=s)
await self.database.insert([p])
p = [_ async for _ in self.database.select("SELECT * from $table", Person)][0]
self.assertEqual(p.first_name, s)
async def test_raw(self):
await self._insert_and_check(self._sample_data(), len(data))
query = "SELECT * FROM `test-db`.person WHERE first_name = 'Whitney' ORDER BY last_name"
results = await self.database.raw(query)
self.assertEqual(results, "Whitney\tDurham\t1977-09-15\t1.72\t\\N\nWhitney\tScott\t1971-07-04\t1.7\t\\N\n")
async def test_not_init(self):
with self.assertRaises(DatabaseException) as cm:
db = AioDatabase(self.database.db_name)
await db.create_table(Person)
exc = cm.exception
self.assertTrue(exc.args[0].startswith('The AioDatabase object must execute the init'))
async def test_read_only(self):
with self.assertRaises(DatabaseException) as cm:
db = AioDatabase('test-db-2', readonly=True)
await db.init()
exc = cm.exception
self.assertTrue(exc.args[0].startswith('Database does not exist'))
async def test_invalid_user(self):
with self.assertRaises(ServerError) as cm:
db = AioDatabase(self.database.db_name, username='default', password='wrong')
await db.init()
exc = cm.exception
print(exc.code, exc.message)
if exc.code == 193: # ClickHouse version < 20.3
self.assertTrue(exc.message.startswith('Wrong password for user default'))
elif exc.code == 516: # ClickHouse version >= 20.3
self.assertTrue(exc.message.startswith('default: Authentication failed'))
else:
raise Exception('Unexpected error code - %s %s' % (exc.code, exc.message))
async def test_nonexisting_db(self):
db = AioDatabase('db_not_here', auto_create=False)
await db.init()
with self.assertRaises(ServerError) as cm:
await db.create_table(Person)
exc = cm.exception
self.assertEqual(exc.code, 81)
self.assertTrue(exc.message.startswith("Database db_not_here doesn't exist"))
# Create and delete the db twice, to ensure db_exists gets updated
for i in range(2):
# Now create the database - should succeed
await db.create_database()
self.assertTrue(db.db_exists)
await db.create_table(Person)
# Drop the database
await db.drop_database()
self.assertFalse(db.db_exists)
async def test_preexisting_db(self):
db = AioDatabase(self.database.db_name, auto_create=False)
await db.init()
await db.count(Person)
async def test_missing_engine(self):
class EnginelessModel(Model):
float_field = Float32Field()
with self.assertRaises(DatabaseException) as cm:
await self.database.create_table(EnginelessModel)
self.assertEqual(str(cm.exception), 'EnginelessModel class must define an engine')
async def test_potentially_problematic_field_names(self):
class Model1(Model):
system = StringField()
readonly = StringField()
engine = Memory()
instance = Model1(system='s', readonly='r')
self.assertEqual(instance.to_dict(), dict(system='s', readonly='r'))
await self.database.create_table(Model1)
await self.database.insert([instance])
instance = [_ async for _ in Model1.objects_in(self.database)[0:10]][0]
self.assertEqual(instance.to_dict(), dict(system='s', readonly='r'))
async def test_does_table_exist(self):
class Person2(Person):
pass
self.assertTrue(await self.database.does_table_exist(Person))
self.assertFalse(await self.database.does_table_exist(Person2))
async def test_add_setting(self):
# Non-string setting name should not be accepted
with self.assertRaises(AssertionError):
self.database.add_setting(0, 1)
# Add a setting and see that it makes the query fail
self.database.add_setting('max_columns_to_read', 1)
with self.assertRaises(ServerError):
[_ async for _ in self.database.select('SELECT * from system.tables')]
# Remove the setting and see that now it works
self.database.add_setting('max_columns_to_read', None)
[_ async for _ in self.database.select('SELECT * from system.tables')]
async def test_create_ad_hoc_field(self):
# Tests that create_ad_hoc_field works for all column types in the database
from clickhouse_orm.models import ModelBase
query = "SELECT DISTINCT type FROM system.columns"
async for row in self.database.select(query):
if row.type.startswith('Map'):
continue # Not supported yet
ModelBase.create_ad_hoc_field(row.type)
async def test_get_model_for_table(self):
# Tests that get_model_for_table works for a non-system model
model = await self.database.get_model_for_table('person')
self.assertFalse(model.is_system_model())
self.assertFalse(model.is_read_only())
self.assertEqual(model.table_name(), 'person')
# Read a few records
[_ async for _ in model.objects_in(self.database)[:10]]
# Inserts should work too
await self.database.insert([
model(first_name='aaa', last_name='bbb', height=1.77)
])
async def test_get_model_for_table__system(self):
# Tests that get_model_for_table works for all system tables
query = "SELECT name FROM system.tables WHERE database='system'"
async for row in self.database.select(query):
print(row.name)
if row.name in ('distributed_ddl_queue',):
continue # Not supported
try:
model = await self.database.get_model_for_table(row.name, system_table=True)
except NotImplementedError:
continue # Table contains an unsupported field type
self.assertTrue(model.is_system_model())
self.assertTrue(model.is_read_only())
self.assertEqual(model.table_name(), row.name)
# Read a few records
try:
[_ async for _ in model.objects_in(self.database)[:10]]
except ServerError as e:
if 'Not enough privileges' in e.message:
pass
elif 'no certificate file has been specified' in e.message:
pass
elif 'table must contain condition' in e.message:
pass
else:
raise

View File

@ -3,8 +3,8 @@ from datetime import date
from clickhouse_orm.database import Database
from clickhouse_orm.models import Model, NO_VALUE
from clickhouse_orm.fields import *
from clickhouse_orm.engines import *
from clickhouse_orm.fields import Int32Field, StringField, DateField
from clickhouse_orm.engines import MergeTree
from clickhouse_orm.funcs import F
@ -70,7 +70,7 @@ class ModelWithAliasFields(Model):
date_field = DateField()
str_field = StringField()
alias_str = StringField(alias=u'str_field')
alias_str = StringField(alias='str_field')
alias_int = Int32Field(alias='int_field')
alias_date = DateField(alias='date_field')
alias_func = Int32Field(alias=F.toYYYYMM(date_field))

View File

@ -3,8 +3,8 @@ from datetime import date
from clickhouse_orm.database import Database
from clickhouse_orm.models import Model
from clickhouse_orm.fields import *
from clickhouse_orm.engines import *
from clickhouse_orm.fields import ArrayField, DateField, StringField, Int32Field
from clickhouse_orm.engines import MergeTree
class ArrayFieldsTest(unittest.TestCase):

View File

@ -0,0 +1,46 @@
import unittest
from datetime import date
import os
from clickhouse_orm.database import Database, DatabaseException, ServerError
from clickhouse_orm.engines import Memory, MergeTree
from clickhouse_orm.fields import UUIDField, DateField
from clickhouse_orm.models import TemporaryModel, Model
from clickhouse_orm.session import in_session
class TemporaryTest(unittest.TestCase):
def setUp(self):
self.database = Database('test-db', log_statements=True)
def tearDown(self):
self.database.drop_database()
def test_create_table(self):
with self.assertRaises(ServerError):
self.database.create_table(TemporaryTable)
with self.assertRaises(AssertionError):
self.database.create_table(TemporaryTable2)
with in_session():
self.database.create_table(TemporaryTable)
count = TemporaryTable.objects_in(self.database).count()
self.assertEqual(count, 0)
# Check if temporary table is cleaned up
with self.assertRaises(ServerError):
TemporaryTable.objects_in(self.database).count()
class TemporaryTable(TemporaryModel):
date_field = DateField()
uuid = UUIDField()
engine = Memory()
class TemporaryTable2(TemporaryModel):
date_field = DateField()
uuid = UUIDField()
engine = MergeTree('date_field', ('date_field',))

View File

@ -0,0 +1,63 @@
import unittest
from datetime import date
from clickhouse_orm.database import Database
from clickhouse_orm.models import Model
from clickhouse_orm.fields import TupleField, DateField, StringField, Int32Field, ArrayField
from clickhouse_orm.engines import MergeTree
class TupleFieldsTest(unittest.TestCase):
def setUp(self):
self.database = Database('test-db', log_statements=True)
self.database.create_table(ModelWithTuple)
def tearDown(self):
self.database.drop_database()
def test_insert_and_select(self):
instance = ModelWithTuple(
date_field='2016-08-30',
tuple_str=['goodbye,', 'cruel'],
tuple_date=['2010-01-01', '2020-01-01'],
)
self.database.insert([instance])
query = 'SELECT * from $db.modelwithtuple ORDER BY date_field'
for model_cls in (ModelWithTuple, None):
results = list(self.database.select(query, model_cls))
self.assertEqual(len(results), 1)
self.assertEqual(results[0].tuple_str, instance.tuple_str)
self.assertEqual(results[0].tuple_int, instance.tuple_int)
self.assertEqual(results[0].tuple_date, instance.tuple_date)
def test_conversion(self):
instance = ModelWithTuple(
tuple_int=('1', '2'),
tuple_date=['2010-01-01', '2020-01-01']
)
self.assertEqual(instance.tuple_str, ('', ''))
self.assertEqual(instance.tuple_int, (1, 2))
self.assertEqual(instance.tuple_date, (date(2010, 1, 1), date(2020, 1, 1)))
def test_assignment_error(self):
instance = ModelWithTuple()
for value in (7, 'x', [date.today()], ['aaa'], [None]):
with self.assertRaises(ValueError):
instance.tuple_int = value
def test_invalid_inner_field(self):
for x in ([('a', DateField)], [('b', None)], [('c', "")], [('d', ArrayField(StringField()))]):
with self.assertRaises(AssertionError):
TupleField(x)
class ModelWithTuple(Model):
date_field = DateField()
tuple_str = TupleField([('a', StringField()), ('b', StringField())])
tuple_int = TupleField([('a', Int32Field()), ('b', Int32Field())])
tuple_date = TupleField([('a', DateField()), ('b', DateField())])
tuple_mix = TupleField([('a', StringField()), ('b', Int32Field()), ('c', DateField())])
engine = MergeTree('date_field', ('date_field',))