test: workflows

This commit is contained in:
sw 2022-06-03 14:23:49 +08:00
parent de915bb00a
commit 96364b5418
12 changed files with 107 additions and 36 deletions

42
.github/workflows/unittest.yml vendored Normal file
View File

@ -0,0 +1,42 @@
name: Docker
on:
push:
# Publish `master` as Docker `latest` image.
branches:
- master
- develop
# Publish `v1.2.3` tags as releases.
tags:
- v*
# Run tests for any PRs.
pull_request:
env:
IMAGE_NAME: ch_orm
jobs:
# Run tests.
# See also https://docs.docker.com/docker-hub/builds/automated-testing/
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Pull images
run: |
docker pull clickhouse/clickhouse-server
docker run -d --network=host --name some-clickhouse-server --ulimit nofile=262144:262144 clickhouse/clickhouse-server
- name: Build and Install
run: |
pip install build
python -m build
pip install dist/*
- name: UnitTest
run: |
python -m unittest

View File

@ -8,8 +8,13 @@ description = "A Python library for working with the ClickHouse database"
readme = "README.md"
keywords = ["ClickHouse", "ORM", 'DB', 'DATABASE', 'OLAP']
license = {text = "BSD License"}
homepage = "https://github.com/sswest/ch-orm"
classifiers = [
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Database"
@ -20,7 +25,7 @@ dependencies = [
"iso8601 >= 0.1.12",
"setuptools"
]
version = "0.1.0"
version = "0.1.1"
[tool.setuptools.packages.find]
where = ["src"]

View File

@ -45,7 +45,7 @@ class AioDatabase(Database):
async def _send(
self,
data: str | bytes,
data: str | bytes | AsyncGenerator,
settings: dict = None,
stream: bool = False
):
@ -255,8 +255,10 @@ class AioDatabase(Database):
field_types = parse_tsv(line)
model_class = model_class or ModelBase.create_ad_hoc_model(
zip(field_names, field_types))
elif line:
elif line.strip():
yield model_class.from_tsv(line, field_names, self.server_timezone, self)
except StopIteration:
return
finally:
await r.aclose()

View File

@ -320,8 +320,10 @@ class Database:
model_class = ModelBase.create_ad_hoc_model(zip(field_names, field_types))
for line in lines:
# skip blank line left by WITH TOTALS modifier
if line:
if line.strip():
yield model_class.from_tsv(line, field_names, self.server_timezone, self)
except StopIteration:
return
finally:
r.close()
@ -432,7 +434,7 @@ class Database:
def _send(
self,
data: str | bytes,
data: str | bytes | Generator,
settings: dict = None,
stream: bool = False
):

View File

@ -280,11 +280,11 @@ class Distributed(Engine):
@property
def table_name(self) -> str:
from clickhouse_orm.models import Model
from clickhouse_orm.models import ModelBase
table = self.table
if isinstance(table, Model):
if isinstance(table, ModelBase):
return table.table_name()
return table

View File

@ -236,12 +236,12 @@ class ModelBase(type):
if db_type.startswith('Array'):
inner_field = cls.create_ad_hoc_field(db_type[6:-1])
return orm_fields.ArrayField(inner_field)
# Tuples (poor man's version - convert to array)
# Tuples
if db_type.startswith('Tuple'):
types = [s.strip() for s in db_type[6:-1].split(',')]
assert len(set(types)) == 1, 'No support for mixed types in tuples - ' + db_type
inner_field = cls.create_ad_hoc_field(types[0])
return orm_fields.ArrayField(inner_field)
return orm_fields.TupleField(name_fields=[
(str(i), cls.create_ad_hoc_field(type_name)) for i, type_name in enumerate(types)]
)
# FixedString
if db_type.startswith('FixedString'):
length = int(db_type[12:-1])

View File

@ -360,6 +360,9 @@ class QuerySet(Generic[MODEL]):
queryset._final = self._final
return queryset
def __deepcopy__(self, memodict={}):
return self._clone()
def __iter__(self) -> Iterator[MODEL]:
"""
Iterates over the model instances matching this queryset
@ -712,6 +715,18 @@ class AggregateQuerySet(QuerySet[MODEL]):
self._limits = base_queryset._limits
self._distinct = base_queryset._distinct
def _clone(self) -> "AggregateQuerySet[MODEL]":
queryset = copy(self)
queryset._fields = copy(self._fields)
queryset._grouping_fields = copy(self._grouping_fields)
queryset._calculated_fields = copy(self._calculated_fields)
queryset._order_by = copy(self._order_by)
queryset._where_q = copy(self._where_q)
queryset._prewhere_q = copy(self._prewhere_q)
queryset._limits = copy(self._limits)
queryset._distinct = copy(self._distinct)
return queryset
def group_by(self, *args) -> "AggregateQuerySet[MODEL]":
"""
This method lets you specify the grouping fields explicitly. The `args` must

View File

@ -2,7 +2,6 @@
import unittest
from clickhouse_orm.models import BufferModel
from clickhouse_orm.engines import *
from .base_test_with_data import *

View File

@ -3,10 +3,6 @@ import unittest
import datetime
from clickhouse_orm.database import ServerError, DatabaseException
from clickhouse_orm.models import Model
from clickhouse_orm.engines import Memory
from clickhouse_orm.fields import *
from clickhouse_orm.funcs import F
from clickhouse_orm.query import Q
from .base_test_with_data import *
@ -190,7 +186,7 @@ class DatabaseTestCase(TestCaseWithData):
raise Exception('Unexpected error code - %s %s' % (exc.code, exc.message))
def test_nonexisting_db(self):
db = Database('db_not_here', autocreate=False)
db = Database('db_not_here', auto_create=False)
with self.assertRaises(ServerError) as cm:
db.create_table(Person)
exc = cm.exception
@ -207,7 +203,7 @@ class DatabaseTestCase(TestCaseWithData):
self.assertFalse(db.db_exists)
def test_preexisting_db(self):
db = Database(self.database.db_name, autocreate=False)
db = Database(self.database.db_name, auto_create=False)
db.count(Person)
def test_missing_engine(self):
@ -289,5 +285,9 @@ class DatabaseTestCase(TestCaseWithData):
except ServerError as e:
if 'Not enough privileges' in e.message:
pass
elif 'no certificate file has been specified' in e.message:
pass
elif 'table must contain condition' in e.message:
pass
else:
raise

View File

@ -615,14 +615,21 @@ class FuncsTestCase(TestCaseWithData):
self._test_func(F.IPv4NumToString(F.toUInt32(1)), '0.0.0.1')
self._test_func(F.IPv4NumToStringClassC(F.toUInt32(1)), '0.0.0.xxx')
self._test_func(F.IPv4StringToNum('0.0.0.17'), 17)
self._test_func(F.IPv6NumToString(F.IPv4ToIPv6(F.IPv4StringToNum('192.168.0.1'))), '::ffff:192.168.0.1')
self._test_func(
F.IPv6NumToString(F.IPv4ToIPv6(F.IPv4StringToNum('192.168.0.1'))),
'::ffff:192.168.0.1'
)
self._test_func(F.IPv6NumToString(F.IPv6StringToNum('2a02:6b8::11')), '2a02:6b8::11')
self._test_func(F.toIPv4('10.20.30.40'), IPv4Address('10.20.30.40'))
self._test_func(F.toIPv6('2001:438:ffff::407d:1bc1'), IPv6Address('2001:438:ffff::407d:1bc1'))
self._test_func(
F.toIPv6('2001:438:ffff::407d:1bc1'), IPv6Address('2001:438:ffff::407d:1bc1')
)
self._test_func(F.IPv4CIDRToRange(F.toIPv4('192.168.5.2'), 16),
[IPv4Address('192.168.0.0'), IPv4Address('192.168.255.255')])
self._test_func(F.IPv6CIDRToRange(F.toIPv6('2001:0db8:0000:85a3:0000:0000:ac1f:8001'), 32),
[IPv6Address('2001:db8::'), IPv6Address('2001:db8:ffff:ffff:ffff:ffff:ffff:ffff')])
(IPv4Address('192.168.0.0'), IPv4Address('192.168.255.255')))
self._test_func(
F.IPv6CIDRToRange(F.toIPv6('2001:0db8:0000:85a3:0000:0000:ac1f:8001'), 32),
(IPv6Address('2001:db8::'), IPv6Address('2001:db8:ffff:ffff:ffff:ffff:ffff:ffff'))
)
def test_aggregate_funcs(self):
self._test_aggr(F.any(Person.first_name))

View File

@ -1,8 +1,9 @@
import unittest
from clickhouse_orm import F
from .base_test_with_data import *
from time import sleep
from clickhouse_orm.funcs import F
from .base_test_with_data import *
class MutationsTestCase(TestCaseWithData):

View File

@ -1,18 +1,16 @@
# -*- coding: utf-8 -*-
import unittest
from clickhouse_orm.database import Database
from clickhouse_orm.query import Q
from clickhouse_orm.funcs import F
from .base_test_with_data import *
from datetime import date, datetime
from enum import Enum
from decimal import Decimal
from clickhouse_orm.query import Q
from clickhouse_orm.funcs import F
from .base_test_with_data import *
from logging import getLogger
logger = getLogger('tests')
class QuerySetTestCase(TestCaseWithData):
def setUp(self):