GitHub actions testing (#29)

1. Moved testing from TravisCI to GitHub Actions
2. Added linter and fixed most style errors
3. Added development section to README
4. Added docker testing environment
This commit is contained in:
M1ha Shvn 2021-07-08 12:27:35 +05:00 committed by GitHub
parent 717c74cfd1
commit 3fba185be2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 283 additions and 155 deletions

15
.dockerignore Normal file
View File

@ -0,0 +1,15 @@
# Docs
docs/
# Python cache files
**/__pycache__/
# Private and public keys
*.key
*.ppk
*.pub
# Hidden apps directories
.github/
.idea/
.gitignore

90
.github/workflows/python-tests.yml vendored Normal file
View File

@ -0,0 +1,90 @@
name: Python unit tests
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.6", "3.7", "3.8", "3.9"]
postgres-version: ["9.6", "10", "11", "12"]
django-version: ["2.1", "2.2", "3.0", "3.1", "3.2"]
clickhouse-version: ["latest"]
redis-version: ["latest"]
services:
postgres:
image: postgres:${{ matrix.postgres-version }}
env:
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
clickhouse:
image: yandex/clickhouse-server:${{ matrix.clickhouse-version }}
ports:
- 8123:8123
redis:
image: redis:${{ matrix.redis-version }}
ports:
- 6379:6379
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Cache pip
uses: actions/cache@v2
with:
# This path is specific to Ubuntu
path: ~/.cache/pip
# Look to see if there is a cache hit for the corresponding requirements file
key: ${{ runner.os }}-pip-${{ hashFiles('requirements-test.txt') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Install pip dependencies
run: |
python -m pip install --upgrade pip wheel setuptools
python -m pip install -r requirements-test.txt
python -m pip install -U django==${{ matrix.django-version }}.*
python setup.py -q install
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --extend-exclude=build/
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Set up test databases
run: |
psql -tc 'SHOW server_version' -U postgres -h localhost
psql -c 'CREATE ROLE test;' -U postgres -h localhost
psql -c 'ALTER ROLE test WITH SUPERUSER;' -U postgres -h localhost
psql -c 'ALTER ROLE test WITH LOGIN;' -U postgres -h localhost
psql -c "ALTER ROLE test PASSWORD 'test';" -U postgres -h localhost
psql -c 'CREATE DATABASE test OWNER test;' -U postgres -h localhost
psql -c 'CREATE DATABASE test2 OWNER test;' -U postgres -h localhost
env:
PGPASSWORD: postgres
- name: Test with unittest
run: |
python runtests.py

View File

@ -1,101 +0,0 @@
dist: xenial
sudo: required
language: python
cache:
pip: true
apt: true
services:
- postgresql
- redis-server
addons:
postgresql: "11"
apt:
sources:
- sourceline: "deb http://repo.yandex.ru/clickhouse/deb/stable/ main/"
- sourceline: "deb https://packages.erlang-solutions.com/ubuntu xenial contrib"
key_url: "https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc"
- sourceline: "deb https://dl.bintray.com/rabbitmq/debian xenial main"
key_url: "https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc"
packages:
- dirmngr
- apt-transport-https
- postgresql-contrib-9.6
- postgresql-10
- postgresql-contrib-10
- postgresql-client-10
- postgresql-11
- postgresql-contrib-11
- postgresql-client-11
- postgresql-12
- postgresql-contrib-12
- postgresql-client-12
- unzip
- rabbitmq-server
python:
- 3.6
- 3.7
- 3.8
env:
- PG=9.6 DJANGO=2.1
- PG=10 DJANGO=2.1
- PG=11 DJANGO=2.1
- PG=12 DJANGO=2.1
- PG=9.6 DJANGO=2.2
- PG=10 DJANGO=2.2
- PG=11 DJANGO=2.2
- PG=12 DJANGO=2.2
- PG=9.6 DJANGO=3.0
- PG=10 DJANGO=3.0
- PG=11 DJANGO=3.0
- PG=12 DJANGO=3.0
- PG=9.6 DJANGO=3.1
- PG=10 DJANGO=3.1
- PG=11 DJANGO=3.1
- PG=12 DJANGO=3.1
before_install:
# Use default PostgreSQL 11 port
- sudo sed -i 's/port = 5433/port = 5432/' /etc/postgresql/11/main/postgresql.conf
- sudo cp /etc/postgresql/{10,11}/main/pg_hba.conf
- sudo sed -i 's/port = 5434/port = 5432/' /etc/postgresql/12/main/postgresql.conf
- sudo cp /etc/postgresql/{10,12}/main/pg_hba.conf
# Start PostgreSQL version we need
- sudo systemctl stop postgresql
- sudo systemctl start postgresql@$PG-main
# ClickHouse sources
- sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv E0C56BD4
- sudo apt-get update
install:
# Install ClickHouse
- sudo apt-get install clickhouse-client clickhouse-server clickhouse-common-static
- sudo service clickhouse-server restart
- pip install -r requirements-test.txt
- pip install -q Django==$DJANGO.*
- python setup.py -q install
before_script:
# Output software versions
- erl -eval 'erlang:display(erlang:system_info(otp_release)), halt().' -noshell
- rabbitmqctl status | grep "RabbitMQ"
- clickhouse-client --query "SELECT version();"
- psql -tc 'SHOW server_version' -U postgres
- psql -tc 'SHOW server_version' -U postgres
- psql -c 'CREATE ROLE test;' -U postgres
- psql -c 'ALTER ROLE test WITH SUPERUSER;' -U postgres
- psql -c 'ALTER ROLE test WITH LOGIN;' -U postgres
- psql -c "ALTER ROLE test PASSWORD 'test';" -U postgres
- psql -c 'CREATE DATABASE test OWNER test;' -U postgres
- psql -c 'CREATE DATABASE test2 OWNER test;' -U postgres
script:
python runtests.py

47
Dockerfile Normal file
View File

@ -0,0 +1,47 @@
ARG PYTHON_IMAGE_TAG=latest
FROM python:${PYTHON_IMAGE_TAG} AS image_stage
ARG APP_TAG="1.0.3"
LABEL \
org.label-schema.build-date=Now \
org.label-schema.maintainer="m1ha@carrotquest.io" \
org.label-schema.schema-version="1.0.0-rc1" \
org.label-schema.vcs-ref="v${APP_TAG}" \
org.label-schema.vcs-url="https://github.com/carrotquest/django-clickhouse" \
org.label-schema.vendor="Carrot quest" \
org.label-schema.version="${APP_TAG}"
ENV APP_UID ${APP_UID:-1000}
ENV APP_GID ${APP_GID:-1000}
ENV APP_NAME ${APP_NAME:-"app"}
# Configure utf-8 locales to make sure Python correctly handles unicode filenames
# Configure pip local path to copy data from pip_stage
ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 DJANGO_SETTINGS_MODULE=tests.settings PYTHONUSERBASE=/pip PATH=/pip/bin:$PATH
RUN set -eu && \
groupadd --gid "${APP_GID}" "app" && \
useradd --uid ${APP_UID} --gid ${APP_GID} --create-home --shell /bin/bash -d /app app && \
mkdir -p /pip && \
chmod 755 /app /pip && \
chown -R ${APP_UID}:${APP_GID} /app /pip
WORKDIR /app/src
# Install dependencies
# set -eu "breaks" pipeline on first error
COPY ./requirements-test.txt /app/requirements-test.txt
RUN --mount=type=cache,target=/root/.cache/pip \
set -eu && \
python3 -m pip install --upgrade pip setuptools wheel && \
python3 -m pip install --upgrade --requirement /app/requirements-test.txt
COPY . /app/src
RUN python3 setup.py -q install --user
USER ${APP_UID}
CMD ["python3", "runtests.py"]

39
docker-compose.yml Normal file
View File

@ -0,0 +1,39 @@
---
version: "3.9"
services:
redis_db:
image: redis
command: [sh, -c, "redis-server --save '' --appendonly no"] # disable persistence
mem_limit: 512m
cpus: 1
postgres_db:
image: postgres
environment:
- POSTGRES_PASSWORD=postgres
mem_limit: 1g
cpus: 1
clickhouse_db:
image: yandex/clickhouse-server
mem_limit: 1g
cpus: 1
run_tests:
image: django-clickhouse
build:
context: .
args:
- PYTHON_VER=latest
environment:
- REDIS_HOST=redis_db
- PGHOST=postgres_db
- PGUSER=postgres
- PGPASS=postgres
- "CLICK_HOUSE_HOST=http://clickhouse_db:8123"
depends_on:
- redis_db
- postgres_db
- clickhouse_db
mem_limit: 1g
cpus: 1

30
docs/development.md Normal file
View File

@ -0,0 +1,30 @@
# Development
## Basic info
This is an Open source project developed by `Carrot quest` team under MIT license.
Feel free to create issues and make pull requests.
Query and database system wraps [infi.clickhouse_orm](https://github.com/Infinidat/infi.clickhouse_orm) library.
If you want to develop QuerySet system, it is better to contribute there.
## General info about testing
Library test system is based on [django.test](https://docs.djangoproject.com/en/3.2/topics/testing/overview/).
You can find them in `tests` directory.
## Tests requirements
* [Redis](https://redis.io/)
* [Yandex ClickHouse](https://clickhouse.yandex/)
* [PostgreSQL](https://www.postgresql.org/)
* Pypi libraries listed in `requirements-test.txt` file
## Running tests
### Running in docker
1. Install [docker and docker-compose](https://www.docker.com/)
2. Run `docker-compose run run_tests` in project directory
### Running in virtual environment
1. Install all requirements listed above
2. [Create virtual environment](https://docs.python.org/3/tutorial/venv.html)
3. Install requirements
`pip3 install -U -r requirements-test.txt`
4. Start tests
`python3 runtests.py`

View File

@ -20,3 +20,4 @@
* [RedisStorage](storages.md#redisstorage) * [RedisStorage](storages.md#redisstorage)
* [Monitoring](monitoring.md) * [Monitoring](monitoring.md)
* [Performance notes](performance.md) * [Performance notes](performance.md)
* [Development](development.md)

View File

@ -9,3 +9,6 @@ statsd
django-pg-returning django-pg-returning
django-pg-bulk-update django-pg-bulk-update
redis redis
# Linter
flake8

View File

@ -18,6 +18,6 @@ if __name__ == "__main__":
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
django.setup() django.setup()
TestRunner = get_runner(settings) TestRunner = get_runner(settings)
test_runner = TestRunner() test_runner = TestRunner(interactive=False)
failures = test_runner.run_tests(["tests"]) failures = test_runner.run_tests(["tests"])
sys.exit(bool(failures)) sys.exit(bool(failures))

View File

@ -28,7 +28,7 @@ def django_pg_returning_available(using: str) -> bool:
:return: Boolean :return: Boolean
""" """
try: try:
import django_pg_returning import django_pg_returning # noqa: F401
return connections[using].vendor == 'postgresql' return connections[using].vendor == 'postgresql'
except ImportError: except ImportError:
return False return False

View File

@ -1,5 +1,5 @@
import logging import logging
from typing import Generator, Optional, Type, Iterable from typing import Optional, Type, Iterable
from infi.clickhouse_orm.database import Database as InfiDatabase, DatabaseException from infi.clickhouse_orm.database import Database as InfiDatabase, DatabaseException
from infi.clickhouse_orm.utils import parse_tsv from infi.clickhouse_orm.utils import parse_tsv
@ -35,8 +35,8 @@ class Database(InfiDatabase):
def _get_applied_migrations(self, migrations_package_name): def _get_applied_migrations(self, migrations_package_name):
raise NotImplementedError("This method is not supported by django_clickhouse.") raise NotImplementedError("This method is not supported by django_clickhouse.")
def select_tuples(self, query: str, model_class: Type['ClickHouseModel'], settings: Optional[dict] = None def select_tuples(self, query: str, model_class: Type['ClickHouseModel'], # noqa: F821
) -> Iterable[tuple]: settings: Optional[dict] = None) -> Iterable[tuple]:
""" """
This method selects model_class namedtuples, instead of class instances. This method selects model_class namedtuples, instead of class instances.
Less memory consumption, greater speed Less memory consumption, greater speed
@ -67,7 +67,7 @@ class Database(InfiDatabase):
yield item yield item
def insert_tuples(self, model_class: Type['ClickHouseModel'], model_tuples: Iterable[tuple], def insert_tuples(self, model_class: Type['ClickHouseModel'], model_tuples: Iterable[tuple], # noqa: F821
batch_size: Optional[int] = None, formatted: bool = False) -> None: batch_size: Optional[int] = None, formatted: bool = False) -> None:
""" """
Inserts model_class namedtuples Inserts model_class namedtuples

View File

@ -2,19 +2,20 @@
This file contains wrappers for infi.clckhouse_orm engines to use in django-clickhouse This file contains wrappers for infi.clckhouse_orm engines to use in django-clickhouse
""" """
import datetime import datetime
from typing import List, Type, Union, Iterable, Generator, Optional from typing import List, Type, Union, Iterable, Optional
from django.db.models import Model as DjangoModel from django.db.models import Model as DjangoModel
from infi.clickhouse_orm import engines as infi_engines from infi.clickhouse_orm import engines as infi_engines
from statsd.defaults.django import statsd from statsd.defaults.django import statsd
from .clickhouse_models import ClickHouseModel
from .configuration import config from .configuration import config
from .database import connections from .database import connections
from .utils import format_datetime from .utils import format_datetime
class InsertOnlyEngineMixin: class InsertOnlyEngineMixin:
def get_insert_batch(self, model_cls: Type['ClickHouseModel'], objects: List[DjangoModel]) -> Iterable[tuple]: def get_insert_batch(self, model_cls: Type[ClickHouseModel], objects: List[DjangoModel]) -> Iterable[tuple]:
""" """
Gets a list of model_cls instances to insert into database Gets a list of model_cls instances to insert into database
:param model_cls: ClickHouseModel subclass to import :param model_cls: ClickHouseModel subclass to import
@ -68,7 +69,7 @@ class CollapsingMergeTree(InsertOnlyEngineMixin, infi_engines.CollapsingMergeTre
max_date=max_date, object_pks=','.join(object_pks)) max_date=max_date, object_pks=','.join(object_pks))
return connections[db_alias].select_tuples(query, model_cls) return connections[db_alias].select_tuples(query, model_cls)
def get_final_versions(self, model_cls: Type['ClickHouseModel'], objects: Iterable[DjangoModel], def get_final_versions(self, model_cls: Type[ClickHouseModel], objects: Iterable[DjangoModel],
date_col: Optional[str] = None) -> Iterable[tuple]: date_col: Optional[str] = None) -> Iterable[tuple]:
""" """
Get objects, that are currently stored in ClickHouse. Get objects, that are currently stored in ClickHouse.
@ -122,7 +123,7 @@ class CollapsingMergeTree(InsertOnlyEngineMixin, infi_engines.CollapsingMergeTre
else: else:
return self._get_final_versions_by_final(*params) return self._get_final_versions_by_final(*params)
def get_insert_batch(self, model_cls: Type['ClickHouseModel'], objects: List[DjangoModel]) -> Iterable[tuple]: def get_insert_batch(self, model_cls: Type[ClickHouseModel], objects: List[DjangoModel]) -> Iterable[tuple]:
""" """
Gets a list of model_cls instances to insert into database Gets a list of model_cls instances to insert into database
:param model_cls: ClickHouseModel subclass to import :param model_cls: ClickHouseModel subclass to import

View File

@ -9,7 +9,7 @@ from django.db.models.signals import post_migrate
from django.dispatch import receiver from django.dispatch import receiver
# In order to support all operations import here # In order to support all operations import here
from infi.clickhouse_orm.migrations import * from infi.clickhouse_orm.migrations import * # noqa F401, F403
from infi.clickhouse_orm.database import ServerError, DatabaseException from infi.clickhouse_orm.database import ServerError, DatabaseException
from infi.clickhouse_orm.fields import StringField, DateField from infi.clickhouse_orm.fields import StringField, DateField

View File

@ -155,7 +155,7 @@ class ClickHouseSyncModel(DjangoModel):
return storage_cls() return storage_cls()
@classmethod @classmethod
def register_clickhouse_sync_model(cls, model_cls: Type['ClickHouseModel']) -> None: def register_clickhouse_sync_model(cls, model_cls: Type['ClickHouseModel']) -> None: # noqa: F821
""" """
Registers ClickHouse model to listen to this model updates Registers ClickHouse model to listen to this model updates
:param model_cls: Model class to register :param model_cls: Model class to register
@ -167,7 +167,7 @@ class ClickHouseSyncModel(DjangoModel):
cls._clickhouse_sync_models.add(model_cls) cls._clickhouse_sync_models.add(model_cls)
@classmethod @classmethod
def get_clickhouse_sync_models(cls) -> Set['ClickHouseModel']: def get_clickhouse_sync_models(cls) -> Set['ClickHouseModel']: # noqa: F821
""" """
Returns all clickhouse models, listening to this class Returns all clickhouse models, listening to this class
:return: A set of model classes to sync :return: A set of model classes to sync

View File

@ -7,7 +7,7 @@ from .utils import model_to_dict
class Django2ClickHouseModelSerializer: class Django2ClickHouseModelSerializer:
def __init__(self, model_cls: Type['ClickHouseModel'], fields: Optional[Iterable[str]] = None, def __init__(self, model_cls: Type['ClickHouseModel'], fields: Optional[Iterable[str]] = None, # noqa: F821
exclude_fields: Optional[Iterable[str]] = None, writable: bool = False, exclude_fields: Optional[Iterable[str]] = None, writable: bool = False,
defaults: Optional[dict] = None) -> None: defaults: Optional[dict] = None) -> None:
""" """

View File

@ -83,7 +83,7 @@ class Storage:
:param kwargs: Storage dependant arguments :param kwargs: Storage dependant arguments
:return: Number of records in queue :return: Number of records in queue
""" """
raise NotImplemented() raise NotImplementedError()
def get_operations(self, import_key: str, count: int, **kwargs) -> List[Tuple[str, str]]: def get_operations(self, import_key: str, count: int, **kwargs) -> List[Tuple[str, str]]:
""" """
@ -94,7 +94,7 @@ class Storage:
:param kwargs: Storage dependant arguments :param kwargs: Storage dependant arguments
:return: A list of tuples (operation, pk) in incoming order. :return: A list of tuples (operation, pk) in incoming order.
""" """
raise NotImplemented() raise NotImplementedError()
def register_operations(self, import_key: str, operation: str, *pks: Any) -> int: def register_operations(self, import_key: str, operation: str, *pks: Any) -> int:
""" """
@ -135,21 +135,21 @@ class Storage:
This method is used in tests to drop all storage data This method is used in tests to drop all storage data
:return: None :return: None
""" """
raise NotImplemented() raise NotImplementedError()
def get_last_sync_time(self, import_key: str) -> Optional[datetime.datetime]: def get_last_sync_time(self, import_key: str) -> Optional[datetime.datetime]:
""" """
Gets the last time, sync has been executed Gets the last time, sync has been executed
:return: datetime.datetime if last sync has been. Otherwise - None. :return: datetime.datetime if last sync has been. Otherwise - None.
""" """
raise NotImplemented() raise NotImplementedError()
def set_last_sync_time(self, import_key: str, dt: datetime.datetime) -> None: def set_last_sync_time(self, import_key: str, dt: datetime.datetime) -> None:
""" """
Sets successful sync time Sets successful sync time
:return: None :return: None
""" """
raise NotImplemented() raise NotImplementedError()
class RedisStorage(with_metaclass(SingletonMeta, Storage)): class RedisStorage(with_metaclass(SingletonMeta, Storage)):

View File

@ -15,9 +15,9 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", 'tests.settings')
django.setup() django.setup()
# This imports must be after django activation # This imports must be after django activation
from django.db.models import F from django.db.models import F # noqa: E402
from tests.clickhouse_models import ClickHouseCollapseTestModel from tests.clickhouse_models import ClickHouseCollapseTestModel # noqa: E402
from tests.models import TestModel from tests.models import TestModel # noqa: E402
logger = logging.getLogger('django-clickhouse') logger = logging.getLogger('django-clickhouse')

View File

@ -1,36 +1,39 @@
""" """
This file contains django settings to run tests with runtests.py This file contains django settings to run tests with runtests.py
""" """
from os import environ
SECRET_KEY = 'fake-key' SECRET_KEY = 'fake-key'
DATABASES = { DATABASES = {
'default': { 'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test', 'NAME': 'test',
'USER': 'test', 'USER': environ.get('PGUSER', 'test'),
'PASSWORD': 'test', 'PASSWORD': environ.get('PGPASS', 'test'),
'HOST': '127.0.0.1', 'HOST': environ.get('PGHOST', '127.0.0.1'),
'PORT': '5432' 'PORT': environ.get('PGPORT', 5432)
}, },
'secondary': { 'secondary': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test2', 'NAME': 'test2',
'USER': 'test', 'USER': environ.get('PGUSER', 'test'),
'PASSWORD': 'test', 'PASSWORD': environ.get('PGPASS', 'test'),
'HOST': '127.0.0.1', 'HOST': environ.get('PGHOST', '127.0.0.1'),
'PORT': '5432' 'PORT': environ.get('PGPORT', 5432)
}, },
# I need separate connections for multiprocessing tests # I need separate connections for multiprocessing tests
'test_db': { 'test_db': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_test', 'NAME': 'test_test',
'USER': 'test', 'USER': environ.get('PGUSER', 'test'),
'PASSWORD': 'test', 'PASSWORD': environ.get('PGPASS', 'test'),
'HOST': '127.0.0.1', 'HOST': environ.get('PGHOST', '127.0.0.1'),
'PORT': '5432' 'PORT': environ.get('PGPORT', 5432)
}, },
} }
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
LOGGING = { LOGGING = {
'version': 1, 'version': 1,
@ -58,22 +61,26 @@ INSTALLED_APPS = [
CLICKHOUSE_DATABASES = { CLICKHOUSE_DATABASES = {
'default': { 'default': {
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
'db_name': 'test', 'db_name': 'test',
'username': 'default', 'username': 'default',
'password': '' 'password': ''
}, },
'secondary': { 'secondary': {
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
'db_name': 'test_2', 'db_name': 'test_2',
'username': 'default', 'username': 'default',
'password': '' 'password': ''
}, },
'no_migrate': { 'no_migrate': {
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
'db_name': 'test_3', 'db_name': 'test_3',
'username': 'default', 'username': 'default',
'password': '', 'password': '',
'migrate': False 'migrate': False
}, },
'readonly': { 'readonly': {
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
'db_name': 'test_3', 'db_name': 'test_3',
'username': 'default', 'username': 'default',
'password': '', 'password': '',
@ -84,8 +91,8 @@ CLICKHOUSE_DATABASES = {
CLICKHOUSE_SYNC_BATCH_SIZE = 5000 CLICKHOUSE_SYNC_BATCH_SIZE = 5000
CLICKHOUSE_REDIS_CONFIG = { CLICKHOUSE_REDIS_CONFIG = {
'host': '127.0.0.1', 'host': environ.get('REDIS_HOST', '127.0.0.1'),
'port': 6379, 'port': environ.get('REDIS_PORT', 6379),
'db': 8, 'db': 8,
'socket_timeout': 10 'socket_timeout': 10
} }

View File

@ -48,5 +48,3 @@ class NamedTupleTest(TestCase):
t2 = TestTuple(1, 2, 3) t2 = TestTuple(1, 2, 3)
self.assertEqual(t1, t2) self.assertEqual(t1, t2)
self.assertEqual((1, 2, 3), t1) self.assertEqual((1, 2, 3), t1)

View File

@ -2,7 +2,7 @@ import datetime
from unittest import skipIf from unittest import skipIf
import django import django
from django.test import TransactionTestCase, TestCase from django.test import TransactionTestCase
from django.utils.timezone import now from django.utils.timezone import now
from tests.clickhouse_models import ClickHouseTestModel, ClickHouseSecondTestModel, ClickHouseCollapseTestModel, \ from tests.clickhouse_models import ClickHouseTestModel, ClickHouseSecondTestModel, ClickHouseCollapseTestModel, \

View File

@ -75,11 +75,10 @@ class StorageTest(TestCase):
def test_locks(self): def test_locks(self):
# Test that multiple can acquire locks in parallel # Test that multiple can acquire locks in parallel
# And single model can't # And single model can't
l = self.storage.get_lock(ClickHouseTestModel.get_import_key()) lock = self.storage.get_lock(ClickHouseTestModel.get_import_key())
l.acquire() lock.acquire()
with self.assertRaises(RedisLockTimeoutError): with self.assertRaises(RedisLockTimeoutError):
l.acquire() lock.acquire()
l2 = self.storage.get_lock(ClickHouseCollapseTestModel.get_import_key())
l2.acquire()
lock_2 = self.storage.get_lock(ClickHouseCollapseTestModel.get_import_key())
lock_2.acquire()

View File

@ -119,4 +119,3 @@ class TestSingletonMeta(TestCase):
b = Single() b = Single()
self.assertEqual(a, b) self.assertEqual(a, b)
self.assertEqual(2, b.test) self.assertEqual(2, b.test)