mirror of
https://github.com/carrotquest/django-clickhouse.git
synced 2025-07-13 17:42:27 +03:00
Compare commits
No commits in common. "master" and "v1.0.1" have entirely different histories.
|
@ -1,15 +0,0 @@
|
||||||
# Docs
|
|
||||||
docs/
|
|
||||||
|
|
||||||
# Python cache files
|
|
||||||
**/__pycache__/
|
|
||||||
|
|
||||||
# Private and public keys
|
|
||||||
*.key
|
|
||||||
*.ppk
|
|
||||||
*.pub
|
|
||||||
|
|
||||||
# Hidden apps directories
|
|
||||||
.github/
|
|
||||||
.idea/
|
|
||||||
.gitignore
|
|
31
.github/workflows/python-publish.yml
vendored
31
.github/workflows/python-publish.yml
vendored
|
@ -1,31 +0,0 @@
|
||||||
# This workflow will upload a Python Package using Twine when a release is created
|
|
||||||
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
|
|
||||||
|
|
||||||
name: Upload Python Package
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [created]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy:
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.x'
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install setuptools wheel twine
|
|
||||||
- name: Build and publish
|
|
||||||
env:
|
|
||||||
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
|
||||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
|
||||||
run: |
|
|
||||||
python setup.py sdist bdist_wheel
|
|
||||||
twine upload dist/*
|
|
134
.github/workflows/python-tests.yml
vendored
134
.github/workflows/python-tests.yml
vendored
|
@ -1,134 +0,0 @@
|
||||||
name: Python unit tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ master ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ master ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
|
||||||
postgres-version: ["9.6", "10", "11", "12", "13", "14", "15", "16"]
|
|
||||||
django-version: ["3.2", "4.0", "4.1", "4.2", "5.0", "5.1"]
|
|
||||||
clickhouse-version: ["latest"]
|
|
||||||
redis-version: ["latest"]
|
|
||||||
exclude:
|
|
||||||
# Django 4.0+ doesn't support PostgreSQL 9.6
|
|
||||||
- django-version: "4.0"
|
|
||||||
postgres-version: "9.6"
|
|
||||||
- django-version: "4.1"
|
|
||||||
postgres-version: "9.6"
|
|
||||||
- django-version: "4.2"
|
|
||||||
postgres-version: "9.6"
|
|
||||||
- django-version: "5.0"
|
|
||||||
postgres-version: "9.6"
|
|
||||||
- django-version: "5.1"
|
|
||||||
postgres-version: "9.6"
|
|
||||||
|
|
||||||
# Django 4.1+ doesn't support PostgreSQL 10
|
|
||||||
- django-version: "4.1"
|
|
||||||
postgres-version: "10"
|
|
||||||
- django-version: "4.2"
|
|
||||||
postgres-version: "10"
|
|
||||||
- django-version: "5.0"
|
|
||||||
postgres-version: "10"
|
|
||||||
- django-version: "5.1"
|
|
||||||
postgres-version: "10"
|
|
||||||
|
|
||||||
# Django 4.2+ doesn't support PostgreSQL 11
|
|
||||||
- django-version: "4.2"
|
|
||||||
postgres-version: "11"
|
|
||||||
- django-version: "5.0"
|
|
||||||
postgres-version: "11"
|
|
||||||
- django-version: "5.1"
|
|
||||||
postgres-version: "11"
|
|
||||||
|
|
||||||
# Django 5.1+ doesn't support PostgreSQL 12
|
|
||||||
- django-version: "5.1"
|
|
||||||
postgres-version: "12"
|
|
||||||
|
|
||||||
# Django 5.0+ does not support python 3.8, 3.9
|
|
||||||
- django-version: "5.0"
|
|
||||||
python-version: "3.8"
|
|
||||||
- django-version: "5.0"
|
|
||||||
python-version: "3.9"
|
|
||||||
- django-version: "5.1"
|
|
||||||
python-version: "3.8"
|
|
||||||
- django-version: "5.1"
|
|
||||||
python-version: "3.9"
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:${{ matrix.postgres-version }}
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: postgres
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
|
|
||||||
clickhouse:
|
|
||||||
image: yandex/clickhouse-server:${{ matrix.clickhouse-version }}
|
|
||||||
ports:
|
|
||||||
- 8123:8123
|
|
||||||
|
|
||||||
redis:
|
|
||||||
image: redis:${{ matrix.redis-version }}
|
|
||||||
ports:
|
|
||||||
- 6379:6379
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
|
|
||||||
- name: Cache pip
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
# This path is specific to Ubuntu
|
|
||||||
path: ~/.cache/pip
|
|
||||||
# Look to see if there is a cache hit for the corresponding requirements file
|
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements-test.txt') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pip-
|
|
||||||
${{ runner.os }}-
|
|
||||||
|
|
||||||
- name: Install pip dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip wheel setuptools
|
|
||||||
python -m pip install -r requirements-test.txt
|
|
||||||
python -m pip install -U django==${{ matrix.django-version }}.*
|
|
||||||
python setup.py -q install
|
|
||||||
|
|
||||||
- name: Lint with flake8
|
|
||||||
run: |
|
|
||||||
# stop the build if there are Python syntax errors or undefined names
|
|
||||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --extend-exclude=build/
|
|
||||||
|
|
||||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
|
||||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
|
||||||
- name: Set up test databases
|
|
||||||
run: |
|
|
||||||
psql -tc 'SHOW server_version' -U postgres -h localhost
|
|
||||||
psql -c 'CREATE ROLE test;' -U postgres -h localhost
|
|
||||||
psql -c 'ALTER ROLE test WITH SUPERUSER;' -U postgres -h localhost
|
|
||||||
psql -c 'ALTER ROLE test WITH LOGIN;' -U postgres -h localhost
|
|
||||||
psql -c "ALTER ROLE test PASSWORD 'test';" -U postgres -h localhost
|
|
||||||
psql -c 'CREATE DATABASE test OWNER test;' -U postgres -h localhost
|
|
||||||
psql -c 'CREATE DATABASE test2 OWNER test;' -U postgres -h localhost
|
|
||||||
env:
|
|
||||||
PGPASSWORD: postgres
|
|
||||||
|
|
||||||
- name: Test with unittest
|
|
||||||
run: |
|
|
||||||
python runtests.py
|
|
97
.travis.yml
Normal file
97
.travis.yml
Normal file
|
@ -0,0 +1,97 @@
|
||||||
|
dist: xenial
|
||||||
|
sudo: required
|
||||||
|
language: python
|
||||||
|
cache:
|
||||||
|
pip: true
|
||||||
|
apt: true
|
||||||
|
|
||||||
|
services:
|
||||||
|
- postgresql
|
||||||
|
- redis-server
|
||||||
|
addons:
|
||||||
|
postgresql: "11"
|
||||||
|
apt:
|
||||||
|
sources:
|
||||||
|
- sourceline: "deb http://repo.yandex.ru/clickhouse/deb/stable/ main/"
|
||||||
|
- sourceline: "deb https://packages.erlang-solutions.com/ubuntu xenial contrib"
|
||||||
|
key_url: "https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc"
|
||||||
|
- sourceline: "deb https://dl.bintray.com/rabbitmq/debian xenial main"
|
||||||
|
key_url: "https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc"
|
||||||
|
packages:
|
||||||
|
- dirmngr
|
||||||
|
- apt-transport-https
|
||||||
|
- postgresql-contrib-9.6
|
||||||
|
- postgresql-10
|
||||||
|
- postgresql-contrib-10
|
||||||
|
- postgresql-client-10
|
||||||
|
- postgresql-11
|
||||||
|
- postgresql-contrib-11
|
||||||
|
- postgresql-client-11
|
||||||
|
- postgresql-12
|
||||||
|
- postgresql-contrib-12
|
||||||
|
- postgresql-client-12
|
||||||
|
- unzip
|
||||||
|
- rabbitmq-server
|
||||||
|
|
||||||
|
python:
|
||||||
|
- 3.6
|
||||||
|
- 3.7
|
||||||
|
- 3.8
|
||||||
|
|
||||||
|
env:
|
||||||
|
- PG=9.6 DJANGO=2.1
|
||||||
|
- PG=10 DJANGO=2.1
|
||||||
|
- PG=11 DJANGO=2.1
|
||||||
|
- PG=12 DJANGO=2.1
|
||||||
|
- PG=9.6 DJANGO=2.2
|
||||||
|
- PG=10 DJANGO=2.2
|
||||||
|
- PG=11 DJANGO=2.2
|
||||||
|
- PG=12 DJANGO=2.2
|
||||||
|
- PG=9.6 DJANGO=3.0
|
||||||
|
- PG=10 DJANGO=3.0
|
||||||
|
- PG=11 DJANGO=3.0
|
||||||
|
- PG=12 DJANGO=3.0
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
# Use default PostgreSQL 11 port
|
||||||
|
- sudo sed -i 's/port = 5433/port = 5432/' /etc/postgresql/11/main/postgresql.conf
|
||||||
|
- sudo cp /etc/postgresql/{10,11}/main/pg_hba.conf
|
||||||
|
|
||||||
|
- sudo sed -i 's/port = 5434/port = 5432/' /etc/postgresql/12/main/postgresql.conf
|
||||||
|
- sudo cp /etc/postgresql/{10,12}/main/pg_hba.conf
|
||||||
|
|
||||||
|
# Start PostgreSQL version we need
|
||||||
|
- sudo systemctl stop postgresql
|
||||||
|
- sudo systemctl start postgresql@$PG-main
|
||||||
|
|
||||||
|
# ClickHouse sources
|
||||||
|
- sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv E0C56BD4
|
||||||
|
- sudo apt-get update
|
||||||
|
|
||||||
|
|
||||||
|
install:
|
||||||
|
# Install ClickHouse
|
||||||
|
- sudo apt-get install clickhouse-client clickhouse-server clickhouse-common-static
|
||||||
|
- sudo service clickhouse-server restart
|
||||||
|
|
||||||
|
- pip install -r requirements-test.txt
|
||||||
|
- pip install -q Django==$DJANGO.*
|
||||||
|
- python setup.py -q install
|
||||||
|
|
||||||
|
before_script:
|
||||||
|
# Output software versions
|
||||||
|
- erl -eval 'erlang:display(erlang:system_info(otp_release)), halt().' -noshell
|
||||||
|
- rabbitmqctl status | grep "RabbitMQ"
|
||||||
|
- clickhouse-client --query "SELECT version();"
|
||||||
|
- psql -tc 'SHOW server_version' -U postgres
|
||||||
|
|
||||||
|
- psql -tc 'SHOW server_version' -U postgres
|
||||||
|
- psql -c 'CREATE ROLE test;' -U postgres
|
||||||
|
- psql -c 'ALTER ROLE test WITH SUPERUSER;' -U postgres
|
||||||
|
- psql -c 'ALTER ROLE test WITH LOGIN;' -U postgres
|
||||||
|
- psql -c "ALTER ROLE test PASSWORD 'test';" -U postgres
|
||||||
|
- psql -c 'CREATE DATABASE test OWNER test;' -U postgres
|
||||||
|
- psql -c 'CREATE DATABASE test2 OWNER test;' -U postgres
|
||||||
|
|
||||||
|
script:
|
||||||
|
python runtests.py
|
47
Dockerfile
47
Dockerfile
|
@ -1,47 +0,0 @@
|
||||||
ARG PYTHON_IMAGE_TAG=latest
|
|
||||||
|
|
||||||
FROM python:${PYTHON_IMAGE_TAG} AS image_stage
|
|
||||||
|
|
||||||
ARG APP_TAG="1.0.3"
|
|
||||||
|
|
||||||
LABEL \
|
|
||||||
org.label-schema.build-date=Now \
|
|
||||||
org.label-schema.maintainer="m1ha@carrotquest.io" \
|
|
||||||
org.label-schema.schema-version="1.0.0-rc1" \
|
|
||||||
org.label-schema.vcs-ref="v${APP_TAG}" \
|
|
||||||
org.label-schema.vcs-url="https://github.com/carrotquest/django-clickhouse" \
|
|
||||||
org.label-schema.vendor="Carrot quest" \
|
|
||||||
org.label-schema.version="${APP_TAG}"
|
|
||||||
|
|
||||||
ENV APP_UID ${APP_UID:-1000}
|
|
||||||
ENV APP_GID ${APP_GID:-1000}
|
|
||||||
ENV APP_NAME ${APP_NAME:-"app"}
|
|
||||||
|
|
||||||
# Configure utf-8 locales to make sure Python correctly handles unicode filenames
|
|
||||||
# Configure pip local path to copy data from pip_stage
|
|
||||||
ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 DJANGO_SETTINGS_MODULE=tests.settings PYTHONUSERBASE=/pip PATH=/pip/bin:$PATH
|
|
||||||
|
|
||||||
RUN set -eu && \
|
|
||||||
groupadd --gid "${APP_GID}" "app" && \
|
|
||||||
useradd --uid ${APP_UID} --gid ${APP_GID} --create-home --shell /bin/bash -d /app app && \
|
|
||||||
mkdir -p /pip && \
|
|
||||||
chmod 755 /app /pip && \
|
|
||||||
chown -R ${APP_UID}:${APP_GID} /app /pip
|
|
||||||
|
|
||||||
WORKDIR /app/src
|
|
||||||
|
|
||||||
# Install dependencies
|
|
||||||
# set -eu "breaks" pipeline on first error
|
|
||||||
COPY ./requirements-test.txt /app/requirements-test.txt
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
|
||||||
set -eu && \
|
|
||||||
python3 -m pip install --upgrade pip setuptools wheel && \
|
|
||||||
python3 -m pip install --upgrade --requirement /app/requirements-test.txt
|
|
||||||
|
|
||||||
COPY . /app/src
|
|
||||||
|
|
||||||
RUN python3 setup.py -q install --user
|
|
||||||
|
|
||||||
USER ${APP_UID}
|
|
||||||
|
|
||||||
CMD ["python3", "runtests.py"]
|
|
21
LICENSE
21
LICENSE
|
@ -1,21 +0,0 @@
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2019 Carrot quest
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
|
@ -1,4 +1,2 @@
|
||||||
[](https://github.com/carrotquest/django-clickhouse/actions/workflows/python-tests.yml) [](https://github.com/carrotquest/django-clickhouse/actions/workflows/python-publish.yml) [](https://pepy.tech/project/django-clickhouse)
|
|
||||||
|
|
||||||
# django-clickhouse
|
# django-clickhouse
|
||||||
Documentation is [here](docs/index.md)
|
Documentation is [here](docs/index.md)
|
|
@ -1,39 +0,0 @@
|
||||||
---
|
|
||||||
version: "3.9"
|
|
||||||
services:
|
|
||||||
redis_db:
|
|
||||||
image: redis
|
|
||||||
command: [sh, -c, "redis-server --save '' --appendonly no"] # disable persistence
|
|
||||||
mem_limit: 512m
|
|
||||||
cpus: 1
|
|
||||||
|
|
||||||
postgres_db:
|
|
||||||
image: postgres
|
|
||||||
environment:
|
|
||||||
- POSTGRES_PASSWORD=postgres
|
|
||||||
mem_limit: 1g
|
|
||||||
cpus: 1
|
|
||||||
|
|
||||||
clickhouse_db:
|
|
||||||
image: yandex/clickhouse-server
|
|
||||||
mem_limit: 1g
|
|
||||||
cpus: 1
|
|
||||||
|
|
||||||
run_tests:
|
|
||||||
image: django-clickhouse
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
args:
|
|
||||||
- PYTHON_IMAGE_TAG=latest
|
|
||||||
environment:
|
|
||||||
- REDIS_HOST=redis_db
|
|
||||||
- PGHOST=postgres_db
|
|
||||||
- PGUSER=postgres
|
|
||||||
- PGPASS=postgres
|
|
||||||
- "CLICK_HOUSE_HOST=http://clickhouse_db:8123"
|
|
||||||
depends_on:
|
|
||||||
- redis_db
|
|
||||||
- postgres_db
|
|
||||||
- clickhouse_db
|
|
||||||
mem_limit: 1g
|
|
||||||
cpus: 1
|
|
|
@ -16,8 +16,9 @@ It is based on [infi.clickhouse-orm](https://github.com/Infinidat/infi.clickhous
|
||||||
* [Django](https://docs.djangoproject.com/) 1.7+
|
* [Django](https://docs.djangoproject.com/) 1.7+
|
||||||
* [Yandex ClickHouse](https://clickhouse.yandex/)
|
* [Yandex ClickHouse](https://clickhouse.yandex/)
|
||||||
* [infi.clickhouse-orm](https://github.com/Infinidat/infi.clickhouse_orm)
|
* [infi.clickhouse-orm](https://github.com/Infinidat/infi.clickhouse_orm)
|
||||||
* [pytz](https://pypi.org/project/pytz/) for python before 3.3
|
* [pytz](https://pypi.org/project/pytz/)
|
||||||
* [typing](https://pypi.org/project/typing/) for python before 3.5
|
* [six](https://pypi.org/project/six/)
|
||||||
|
* [typing](https://pypi.org/project/typing/)
|
||||||
* [psycopg2](https://www.psycopg.org/)
|
* [psycopg2](https://www.psycopg.org/)
|
||||||
* [celery](http://www.celeryproject.org/)
|
* [celery](http://www.celeryproject.org/)
|
||||||
* [statsd](https://pypi.org/project/statsd/)
|
* [statsd](https://pypi.org/project/statsd/)
|
||||||
|
|
|
@ -1,30 +0,0 @@
|
||||||
# Development
|
|
||||||
## Basic info
|
|
||||||
This is an Open source project developed by `Carrot quest` team under MIT license.
|
|
||||||
Feel free to create issues and make pull requests.
|
|
||||||
Query and database system wraps [infi.clickhouse_orm](https://github.com/Infinidat/infi.clickhouse_orm) library.
|
|
||||||
If you want to develop QuerySet system, it is better to contribute there.
|
|
||||||
|
|
||||||
|
|
||||||
## General info about testing
|
|
||||||
Library test system is based on [django.test](https://docs.djangoproject.com/en/3.2/topics/testing/overview/).
|
|
||||||
You can find them in `tests` directory.
|
|
||||||
|
|
||||||
## Tests requirements
|
|
||||||
* [Redis](https://redis.io/)
|
|
||||||
* [Yandex ClickHouse](https://clickhouse.yandex/)
|
|
||||||
* [PostgreSQL](https://www.postgresql.org/)
|
|
||||||
* Pypi libraries listed in `requirements-test.txt` file
|
|
||||||
|
|
||||||
## Running tests
|
|
||||||
### Running in docker
|
|
||||||
1. Install [docker and docker-compose](https://www.docker.com/)
|
|
||||||
2. Run `docker-compose run run_tests` in project directory
|
|
||||||
|
|
||||||
### Running in virtual environment
|
|
||||||
1. Install all requirements listed above
|
|
||||||
2. [Create virtual environment](https://docs.python.org/3/tutorial/venv.html)
|
|
||||||
3. Install requirements
|
|
||||||
`pip3 install -U -r requirements-test.txt`
|
|
||||||
4. Start tests
|
|
||||||
`python3 runtests.py`
|
|
|
@ -20,4 +20,3 @@
|
||||||
* [RedisStorage](storages.md#redisstorage)
|
* [RedisStorage](storages.md#redisstorage)
|
||||||
* [Monitoring](monitoring.md)
|
* [Monitoring](monitoring.md)
|
||||||
* [Performance notes](performance.md)
|
* [Performance notes](performance.md)
|
||||||
* [Development](development.md)
|
|
|
@ -57,30 +57,6 @@ By default migrations are applied to all [CLICKHOUSE_DATABASES](configuration.md
|
||||||
Note: migrations are only applied, with django `default` database.
|
Note: migrations are only applied, with django `default` database.
|
||||||
So if you call `python manage.py migrate --database=secondary` they wouldn't be applied.
|
So if you call `python manage.py migrate --database=secondary` they wouldn't be applied.
|
||||||
|
|
||||||
## Admin migration command
|
|
||||||
In order to make migrations separately from django's `manage.py migrate` command,
|
|
||||||
this library implements custom `manage.py` command `clickhouse_migrate`.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
```bash
|
|
||||||
python manage.py clickhouse_migrate [--help] [--database <db_alias>] [--verbosity {0,1,2,3}] [app_label] [migration_number]
|
|
||||||
```
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
* `app_label: Optional[str]` - If set, migrates only given django application
|
|
||||||
* `migration_number: Optional[int]` - If set, migrate django app with `app_label` to migration with this number
|
|
||||||
**Important note**: Library currently does not support unapplying migrations.
|
|
||||||
If already applied migration is given - it will do noting.
|
|
||||||
* `--database: Optional[str]` - If set, migrates only this database alias from [CLICKHOUSE_DATABASES config parameter](configuration.md#clickhouse_databases)
|
|
||||||
* `--verbosity: Optional[int] = 1` - Level of debug output. See [here](https://docs.djangoproject.com/en/3.2/ref/django-admin/#cmdoption-verbosity) for more details.
|
|
||||||
* `--help` - Print help
|
|
||||||
|
|
||||||
|
|
||||||
## Migration operations enhancements
|
|
||||||
* `RunSQL`, `RunPython`
|
|
||||||
Can accept `hints: dict = {}` parameter in order to set migration database alias (`force_migrate_on_databases: List[str]` key) or model (`model: Union[str, Type[ClickHouseModel]]` key)
|
|
||||||
|
|
||||||
|
|
||||||
## Migration algorithm
|
## Migration algorithm
|
||||||
- Get a list of databases from `CLICKHOUSE_DATABASES` setting. Migrate them one by one.
|
- Get a list of databases from `CLICKHOUSE_DATABASES` setting. Migrate them one by one.
|
||||||
- Find all django apps from `INSTALLED_APPS` setting, which have no `readonly=True` attribute and have `migrate=True` attribute. Migrate them one by one.
|
- Find all django apps from `INSTALLED_APPS` setting, which have no `readonly=True` attribute and have `migrate=True` attribute. Migrate them one by one.
|
||||||
|
|
|
@ -1,19 +1,17 @@
|
||||||
# Usage overview
|
# Usage overview
|
||||||
## Requirements
|
## Requirements
|
||||||
At the beginning I expect, that you already have:
|
At the begging I expect, that you already have:
|
||||||
1. [ClickHouse](https://clickhouse.tech/docs/en/) (with [ZooKeeper](https://zookeeper.apache.org/), if you use replication)
|
1. [ClickHouse](https://clickhouse.tech/docs/en/) (with [ZooKeeper](https://zookeeper.apache.org/), if you use replication)
|
||||||
2. Relational database used with [Django](https://www.djangoproject.com/). For instance, [PostgreSQL](https://www.postgresql.org/)
|
2. Relational database used with [Django](https://www.djangoproject.com/). For instance, [PostgreSQL](https://www.postgresql.org/)
|
||||||
3. [Django database set up](https://docs.djangoproject.com/en/3.0/ref/databases/)
|
3. [Django database set up](https://docs.djangoproject.com/en/3.0/ref/databases/)
|
||||||
4. [Intermediate storage](storages.md) set up. For instance, [Redis](https://redis.io/)
|
4. [Intermediate storage](storages.md) set up. For instance, [Redis](https://redis.io/).
|
||||||
5. [Celery set up](https://docs.celeryproject.org/en/stable/django/first-steps-with-django.html) in order to sync data automatically.
|
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
Add required parameters to [Django settings.py](https://docs.djangoproject.com/en/3.0/topics/settings/):
|
Add required parameters to [Django settings.py](https://docs.djangoproject.com/en/3.0/topics/settings/):
|
||||||
1. Add `'django_clickhouse'` to `INSTALLED_APPS`
|
1. [CLICKHOUSE_DATABASES](configuration.md#clickhouse_databases)
|
||||||
2. [CLICKHOUSE_DATABASES](configuration.md#clickhouse_databases)
|
2. [Intermediate storage](storages.md) configuration. For instance, [RedisStorage](storages.md#redisstorage)
|
||||||
3. [Intermediate storage](storages.md) configuration. For instance, [RedisStorage](storages.md#redisstorage)
|
3. It's recommended to change [CLICKHOUSE_CELERY_QUEUE](configuration.md#clickhouse_celery_queue)
|
||||||
4. It's recommended to change [CLICKHOUSE_CELERY_QUEUE](configuration.md#clickhouse_celery_queue)
|
4. Add sync task to [celerybeat schedule](http://docs.celeryproject.org/en/v2.3.3/userguide/periodic-tasks.html).
|
||||||
5. Add sync task to [celerybeat schedule](http://docs.celeryproject.org/en/v2.3.3/userguide/periodic-tasks.html).
|
|
||||||
Note, that executing planner every 2 seconds doesn't mean sync is executed every 2 seconds.
|
Note, that executing planner every 2 seconds doesn't mean sync is executed every 2 seconds.
|
||||||
Sync time depends on model sync_delay attribute value and [CLICKHOUSE_SYNC_DELAY](configuration.md#clickhouse_sync_delay) configuration parameter.
|
Sync time depends on model sync_delay attribute value and [CLICKHOUSE_SYNC_DELAY](configuration.md#clickhouse_sync_delay) configuration parameter.
|
||||||
You can read more in [sync section](synchronization.md).
|
You can read more in [sync section](synchronization.md).
|
||||||
|
@ -22,12 +20,6 @@ You can also change other [configuration parameters](configuration.md) depending
|
||||||
|
|
||||||
#### Example
|
#### Example
|
||||||
```python
|
```python
|
||||||
INSTALLED_APPS = (
|
|
||||||
# Your apps may go here
|
|
||||||
'django_clickhouse',
|
|
||||||
# Your apps may go here
|
|
||||||
)
|
|
||||||
|
|
||||||
# django-clickhouse library setup
|
# django-clickhouse library setup
|
||||||
CLICKHOUSE_DATABASES = {
|
CLICKHOUSE_DATABASES = {
|
||||||
# Connection name to refer in using(...) method
|
# Connection name to refer in using(...) method
|
||||||
|
@ -85,9 +77,6 @@ from my_app.models import User
|
||||||
class ClickHouseUser(ClickHouseModel):
|
class ClickHouseUser(ClickHouseModel):
|
||||||
django_model = User
|
django_model = User
|
||||||
|
|
||||||
# Uncomment the line below if you want your models to be synced automatically
|
|
||||||
# sync_enabled = True
|
|
||||||
|
|
||||||
id = fields.UInt32Field()
|
id = fields.UInt32Field()
|
||||||
first_name = fields.StringField()
|
first_name = fields.StringField()
|
||||||
birthday = fields.DateField()
|
birthday = fields.DateField()
|
||||||
|
@ -96,36 +85,23 @@ class ClickHouseUser(ClickHouseModel):
|
||||||
engine = MergeTree('birthday', ('birthday',))
|
engine = MergeTree('birthday', ('birthday',))
|
||||||
```
|
```
|
||||||
|
|
||||||
**Important note**: `clickhouse_model.py` file is not anyhow imported by django initialization code. So if your models are not used anywhere excluding this file, you should import it somewhere in your code if you want synchroniztion working correctly. For instance, you can customise [AppConfig](https://docs.djangoproject.com/en/5.0/ref/applications/#django.apps.AppConfig.ready) like:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class MyAppConfig(AppConfig):
|
|
||||||
name = 'my_app'
|
|
||||||
|
|
||||||
def ready(self):
|
|
||||||
from my_app.clickhouse_models import ClickHouseUser
|
|
||||||
```
|
|
||||||
|
|
||||||
## Migration to create table in ClickHouse
|
## Migration to create table in ClickHouse
|
||||||
1. Read [migrations](migrations.md) section
|
1. Read [migrations](migrations.md) section
|
||||||
2. Create `clickhouse_migrations` package in your django app
|
2. Create `clickhouse_migrations` package in your django app
|
||||||
3. Create `0001_initial.py` file inside the created package. Result structure should be:
|
3. Create `0001_initial.py` file inside the created package. Result structure should be:
|
||||||
```
|
```
|
||||||
my_app
|
my_app
|
||||||
| clickhouse_migrations
|
>> clickhouse_migrations
|
||||||
|-- __init__.py
|
>>>> __init__.py
|
||||||
|-- 0001_initial.py
|
>>>> 0001_initial.py
|
||||||
| clickhouse_models.py
|
>> clickhouse_models.py
|
||||||
| models.py
|
>> models.py
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Add content to file `0001_initial.py`:
|
4. Add content to file `0001_initial.py`:
|
||||||
```python
|
```python
|
||||||
from django_clickhouse import migrations
|
from django_clickhouse import migrations
|
||||||
from my_app.clickhouse_models import ClickHouseUser
|
from my_app.cilckhouse_models import ClickHouseUser
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
operations = [
|
operations = [
|
||||||
|
|
|
@ -25,17 +25,12 @@ Router is a class, defining 3 methods:
|
||||||
Returns `database alias` to use for given `model` for `SELECT` queries.
|
Returns `database alias` to use for given `model` for `SELECT` queries.
|
||||||
* `def db_for_write(self, model: ClickHouseModel, **hints) -> str`
|
* `def db_for_write(self, model: ClickHouseModel, **hints) -> str`
|
||||||
Returns `database alias` to use for given `model` for `INSERT` queries.
|
Returns `database alias` to use for given `model` for `INSERT` queries.
|
||||||
* `def allow_migrate(self, db_alias: str, app_label: str, operation: Operation, **hints: dict) -> bool`
|
* `def allow_migrate(self, db_alias: str, app_label: str, operation: Operation, model: Optional[ClickHouseModel] = None, **hints: dict) -> bool`
|
||||||
Checks if migration `operation` should be applied in django application `app_label` on database `db_alias`.
|
Checks if migration `operation` should be applied in django application `app_label` on database `db_alias`.
|
||||||
Optional `hints` help to pass additional info which can be used to test migrations availability on concrete model.
|
Optional `model` field can be used to determine migrations on concrete model.
|
||||||
|
|
||||||
By default [CLICKHOUSE_DATABASE_ROUTER](configuration.md#clickhouse_database_router) is used.
|
By default [CLICKHOUSE_DATABASE_ROUTER](configuration.md#clickhouse_database_router) is used.
|
||||||
It gets routing information from model fields, described below.
|
It gets routing information from model fields, described below.
|
||||||
It also gives ability to use 2 kinds of hints:
|
|
||||||
* `force_migrate_on_databases: Iterable[str]` - concrete database aliases where migration should be applied
|
|
||||||
* `model: Type[ClickHouseModel]` - a model class, to read routing attributes from.
|
|
||||||
Can be set as class or its string name.
|
|
||||||
If name is set, class is searched in current `<app_label>.<config.MODELS_MODULE>` package.
|
|
||||||
|
|
||||||
## ClickHouseModel routing attributes
|
## ClickHouseModel routing attributes
|
||||||
Default database router reads routing settings from model attributes.
|
Default database router reads routing settings from model attributes.
|
||||||
|
|
|
@ -1,14 +1,11 @@
|
||||||
celery
|
|
||||||
Django (>=1.7)
|
Django (>=1.7)
|
||||||
|
pytz
|
||||||
|
six
|
||||||
|
typing
|
||||||
|
psycopg2
|
||||||
infi.clickhouse-orm
|
infi.clickhouse-orm
|
||||||
pytz; python_version < '3.3'
|
celery
|
||||||
statsd
|
statsd
|
||||||
typing; python_version < '3.5'
|
|
||||||
|
|
||||||
psycopg2-binary
|
|
||||||
django-pg-returning
|
django-pg-returning
|
||||||
django-pg-bulk-update
|
django-pg-bulk-update
|
||||||
redis
|
redis
|
||||||
|
|
||||||
# Linter
|
|
||||||
flake8
|
|
|
@ -1,6 +1,8 @@
|
||||||
celery
|
|
||||||
Django (>=1.7)
|
Django (>=1.7)
|
||||||
|
pytz
|
||||||
|
six
|
||||||
|
typing
|
||||||
|
psycopg2
|
||||||
infi.clickhouse-orm
|
infi.clickhouse-orm
|
||||||
pytz; python_version < '3.3'
|
celery
|
||||||
statsd
|
statsd
|
||||||
typing; python_version < '3.5'
|
|
|
@ -18,6 +18,6 @@ if __name__ == "__main__":
|
||||||
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
|
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
|
||||||
django.setup()
|
django.setup()
|
||||||
TestRunner = get_runner(settings)
|
TestRunner = get_runner(settings)
|
||||||
test_runner = TestRunner(interactive=False)
|
test_runner = TestRunner()
|
||||||
failures = test_runner.run_tests(["tests"])
|
failures = test_runner.run_tests(["tests"])
|
||||||
sys.exit(bool(failures))
|
sys.exit(bool(failures))
|
||||||
|
|
4
setup.py
4
setup.py
|
@ -13,8 +13,8 @@ with open('requirements.txt') as f:
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='django-clickhouse',
|
name='django-clickhouse',
|
||||||
version='1.2.2',
|
version='1.0.1',
|
||||||
packages=['django_clickhouse', 'django_clickhouse.management.commands'],
|
packages=['django_clickhouse'],
|
||||||
package_dir={'': 'src'},
|
package_dir={'': 'src'},
|
||||||
url='https://github.com/carrotquest/django-clickhouse',
|
url='https://github.com/carrotquest/django-clickhouse',
|
||||||
license='BSD 3-clause "New" or "Revised" License',
|
license='BSD 3-clause "New" or "Revised" License',
|
||||||
|
|
|
@ -9,9 +9,9 @@ from itertools import chain
|
||||||
from typing import List, Tuple, Iterable, Set, Any, Optional
|
from typing import List, Tuple, Iterable, Set, Any, Optional
|
||||||
|
|
||||||
from django.db.models import Model as DjangoModel, QuerySet as DjangoQuerySet
|
from django.db.models import Model as DjangoModel, QuerySet as DjangoQuerySet
|
||||||
from django.utils.timezone import now
|
|
||||||
from infi.clickhouse_orm.engines import CollapsingMergeTree
|
from infi.clickhouse_orm.engines import CollapsingMergeTree
|
||||||
from infi.clickhouse_orm.models import Model as InfiModel, ModelBase as InfiModelBase
|
from infi.clickhouse_orm.models import Model as InfiModel, ModelBase as InfiModelBase
|
||||||
|
from six import with_metaclass
|
||||||
from statsd.defaults.django import statsd
|
from statsd.defaults.django import statsd
|
||||||
|
|
||||||
from .compatibility import namedtuple
|
from .compatibility import namedtuple
|
||||||
|
@ -41,7 +41,7 @@ class ClickHouseModelMeta(InfiModelBase):
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
class ClickHouseModel(InfiModel, metaclass=ClickHouseModelMeta):
|
class ClickHouseModel(with_metaclass(ClickHouseModelMeta, InfiModel)):
|
||||||
"""
|
"""
|
||||||
Base model for all other models
|
Base model for all other models
|
||||||
"""
|
"""
|
||||||
|
@ -94,7 +94,7 @@ class ClickHouseModel(InfiModel, metaclass=ClickHouseModelMeta):
|
||||||
return namedtuple("%sTuple" % cls.__name__, field_names, defaults=default_values)
|
return namedtuple("%sTuple" % cls.__name__, field_names, defaults=default_values)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def objects_in(cls, database: Database) -> QuerySet:
|
def objects_in(cls, database: Database)-> QuerySet:
|
||||||
return QuerySet(cls, database)
|
return QuerySet(cls, database)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -121,7 +121,7 @@ class ClickHouseModel(InfiModel, metaclass=ClickHouseModelMeta):
|
||||||
return connections[db_alias]
|
return connections[db_alias]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_django_model_serializer(cls, writable: bool = False, defaults: Optional[dict] = None
|
def get_django_model_serializer(cls, writable: bool= False, defaults: Optional[dict] = None
|
||||||
) -> Django2ClickHouseModelSerializer:
|
) -> Django2ClickHouseModelSerializer:
|
||||||
serializer_cls = lazy_class_import(cls.django_model_serializer)
|
serializer_cls = lazy_class_import(cls.django_model_serializer)
|
||||||
return serializer_cls(cls, writable=writable, defaults=defaults)
|
return serializer_cls(cls, writable=writable, defaults=defaults)
|
||||||
|
@ -236,10 +236,7 @@ class ClickHouseModel(InfiModel, metaclass=ClickHouseModelMeta):
|
||||||
|
|
||||||
with statsd.timer(statsd_key.format('steps.get_operations')):
|
with statsd.timer(statsd_key.format('steps.get_operations')):
|
||||||
operations = storage.get_operations(import_key, cls.get_sync_batch_size())
|
operations = storage.get_operations(import_key, cls.get_sync_batch_size())
|
||||||
|
statsd.incr(statsd_key.format('operations'), len(operations))
|
||||||
statsd.incr(statsd_key.format('operations'), len(operations))
|
|
||||||
logger.debug('django-clickhouse: got %d operations from storage (key: %s)'
|
|
||||||
% (len(operations), import_key))
|
|
||||||
|
|
||||||
if operations:
|
if operations:
|
||||||
with statsd.timer(statsd_key.format('steps.get_sync_objects')):
|
with statsd.timer(statsd_key.format('steps.get_sync_objects')):
|
||||||
|
@ -248,18 +245,13 @@ class ClickHouseModel(InfiModel, metaclass=ClickHouseModelMeta):
|
||||||
import_objects = []
|
import_objects = []
|
||||||
|
|
||||||
statsd.incr(statsd_key.format('import_objects'), len(import_objects))
|
statsd.incr(statsd_key.format('import_objects'), len(import_objects))
|
||||||
logger.debug('django-clickhouse: got %d objects to import from database (key: %s)'
|
|
||||||
% (len(import_objects), import_key))
|
|
||||||
|
|
||||||
if import_objects:
|
if import_objects:
|
||||||
with statsd.timer(statsd_key.format('steps.get_insert_batch')):
|
with statsd.timer(statsd_key.format('steps.get_insert_batch')):
|
||||||
# NOTE I don't use generator pattern here, as it move all time into insert.
|
# NOTE I don't use generator pattern here, as it move all time into insert.
|
||||||
# That makes hard to understand where real problem is in monitoring
|
# That makes hard to understand where real problem is in monitoring
|
||||||
batch = tuple(cls.get_insert_batch(import_objects))
|
batch = tuple(cls.get_insert_batch(import_objects))
|
||||||
|
|
||||||
logger.debug('django-clickhouse: formed %d ClickHouse objects to insert (key: %s)'
|
|
||||||
% (len(batch), import_key))
|
|
||||||
|
|
||||||
with statsd.timer(statsd_key.format('steps.insert')):
|
with statsd.timer(statsd_key.format('steps.insert')):
|
||||||
cls.insert_batch(batch)
|
cls.insert_batch(batch)
|
||||||
|
|
||||||
|
@ -291,7 +283,7 @@ class ClickHouseModel(InfiModel, metaclass=ClickHouseModelMeta):
|
||||||
res = (datetime.datetime.now() - last_sync_time).total_seconds() >= cls.get_sync_delay()
|
res = (datetime.datetime.now() - last_sync_time).total_seconds() >= cls.get_sync_delay()
|
||||||
logger.debug('django-clickhouse: need_sync returned %s for class %s as no last sync found'
|
logger.debug('django-clickhouse: need_sync returned %s for class %s as no last sync found'
|
||||||
' (now: %s, last: %s, delay: %d)'
|
' (now: %s, last: %s, delay: %d)'
|
||||||
% (res, cls.__name__, now().isoformat(), last_sync_time.isoformat(),
|
% (res, cls.__name__, datetime.datetime.now().isoformat(), last_sync_time.isoformat(),
|
||||||
cls.get_sync_delay()))
|
cls.get_sync_delay()))
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
@ -320,10 +312,7 @@ class ClickHouseMultiModel(ClickHouseModel):
|
||||||
|
|
||||||
with statsd.timer(statsd_key.format('steps.get_operations')):
|
with statsd.timer(statsd_key.format('steps.get_operations')):
|
||||||
operations = storage.get_operations(import_key, cls.get_sync_batch_size())
|
operations = storage.get_operations(import_key, cls.get_sync_batch_size())
|
||||||
|
statsd.incr(statsd_key.format('operations'), len(operations))
|
||||||
statsd.incr(statsd_key.format('operations'), len(operations))
|
|
||||||
logger.debug('django-clickhouse: got %d operations from storage (key: %s)'
|
|
||||||
% (len(operations), import_key))
|
|
||||||
|
|
||||||
if operations:
|
if operations:
|
||||||
with statsd.timer(statsd_key.format('steps.get_sync_objects')):
|
with statsd.timer(statsd_key.format('steps.get_sync_objects')):
|
||||||
|
@ -332,8 +321,6 @@ class ClickHouseMultiModel(ClickHouseModel):
|
||||||
import_objects = []
|
import_objects = []
|
||||||
|
|
||||||
statsd.incr(statsd_key.format('import_objects'), len(import_objects))
|
statsd.incr(statsd_key.format('import_objects'), len(import_objects))
|
||||||
logger.debug('django-clickhouse: got %d objects to import from database (key: %s)'
|
|
||||||
% (len(import_objects), import_key))
|
|
||||||
|
|
||||||
if import_objects:
|
if import_objects:
|
||||||
batches = {}
|
batches = {}
|
||||||
|
@ -344,10 +331,7 @@ class ClickHouseMultiModel(ClickHouseModel):
|
||||||
# NOTE I don't use generator pattern here, as it move all time into insert.
|
# NOTE I don't use generator pattern here, as it move all time into insert.
|
||||||
# That makes hard to understand where real problem is in monitoring
|
# That makes hard to understand where real problem is in monitoring
|
||||||
batch = tuple(model_cls.get_insert_batch(import_objects))
|
batch = tuple(model_cls.get_insert_batch(import_objects))
|
||||||
|
return model_cls, batch
|
||||||
logger.debug('django-clickhouse: formed %d ClickHouse objects to insert'
|
|
||||||
' (model_cls: %s, key: %s)' % (len(batch), model_cls.__name__, import_key))
|
|
||||||
return model_cls, batch
|
|
||||||
|
|
||||||
res = exec_multi_arg_func(_sub_model_func, cls.sub_models, threads_count=len(cls.sub_models))
|
res = exec_multi_arg_func(_sub_model_func, cls.sub_models, threads_count=len(cls.sub_models))
|
||||||
batches = dict(res)
|
batches = dict(res)
|
||||||
|
|
|
@ -28,7 +28,7 @@ def django_pg_returning_available(using: str) -> bool:
|
||||||
:return: Boolean
|
:return: Boolean
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
import django_pg_returning # noqa: F401
|
import django_pg_returning
|
||||||
return connections[using].vendor == 'postgresql'
|
return connections[using].vendor == 'postgresql'
|
||||||
except ImportError:
|
except ImportError:
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
import logging
|
import logging
|
||||||
from typing import Optional, Type, Iterable
|
from typing import Generator, Optional, Type, Iterable
|
||||||
|
|
||||||
from infi.clickhouse_orm.database import Database as InfiDatabase, DatabaseException
|
from infi.clickhouse_orm.database import Database as InfiDatabase, DatabaseException
|
||||||
from infi.clickhouse_orm.utils import parse_tsv
|
from infi.clickhouse_orm.utils import parse_tsv
|
||||||
|
from six import next
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from statsd.defaults.django import statsd
|
from statsd.defaults.django import statsd
|
||||||
|
|
||||||
|
@ -34,8 +35,8 @@ class Database(InfiDatabase):
|
||||||
def _get_applied_migrations(self, migrations_package_name):
|
def _get_applied_migrations(self, migrations_package_name):
|
||||||
raise NotImplementedError("This method is not supported by django_clickhouse.")
|
raise NotImplementedError("This method is not supported by django_clickhouse.")
|
||||||
|
|
||||||
def select_tuples(self, query: str, model_class: Type['ClickHouseModel'], # noqa: F821
|
def select_tuples(self, query: str, model_class: Type['ClickHouseModel'], settings: Optional[dict] = None
|
||||||
settings: Optional[dict] = None) -> Iterable[tuple]:
|
) -> Iterable[tuple]:
|
||||||
"""
|
"""
|
||||||
This method selects model_class namedtuples, instead of class instances.
|
This method selects model_class namedtuples, instead of class instances.
|
||||||
Less memory consumption, greater speed
|
Less memory consumption, greater speed
|
||||||
|
@ -66,7 +67,7 @@ class Database(InfiDatabase):
|
||||||
|
|
||||||
yield item
|
yield item
|
||||||
|
|
||||||
def insert_tuples(self, model_class: Type['ClickHouseModel'], model_tuples: Iterable[tuple], # noqa: F821
|
def insert_tuples(self, model_class: Type['ClickHouseModel'], model_tuples: Iterable[tuple],
|
||||||
batch_size: Optional[int] = None, formatted: bool = False) -> None:
|
batch_size: Optional[int] = None, formatted: bool = False) -> None:
|
||||||
"""
|
"""
|
||||||
Inserts model_class namedtuples
|
Inserts model_class namedtuples
|
||||||
|
|
|
@ -2,24 +2,19 @@
|
||||||
This file contains wrappers for infi.clckhouse_orm engines to use in django-clickhouse
|
This file contains wrappers for infi.clckhouse_orm engines to use in django-clickhouse
|
||||||
"""
|
"""
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
from typing import List, Type, Union, Iterable, Generator, Optional
|
||||||
from typing import List, Type, Union, Iterable, Optional, Tuple, NamedTuple
|
|
||||||
|
|
||||||
from django.db.models import Model as DjangoModel
|
from django.db.models import Model as DjangoModel
|
||||||
from infi.clickhouse_orm import engines as infi_engines
|
from infi.clickhouse_orm import engines as infi_engines
|
||||||
from statsd.defaults.django import statsd
|
from statsd.defaults.django import statsd
|
||||||
|
|
||||||
from .clickhouse_models import ClickHouseModel
|
|
||||||
from .configuration import config
|
from .configuration import config
|
||||||
from .database import connections
|
from .database import connections
|
||||||
from .utils import format_datetime
|
from .utils import format_datetime
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger('django-clickhouse')
|
|
||||||
|
|
||||||
|
|
||||||
class InsertOnlyEngineMixin:
|
class InsertOnlyEngineMixin:
|
||||||
def get_insert_batch(self, model_cls: Type[ClickHouseModel], objects: List[DjangoModel]) -> Iterable[tuple]:
|
def get_insert_batch(self, model_cls: Type['ClickHouseModel'], objects: List[DjangoModel]) -> Iterable[tuple]:
|
||||||
"""
|
"""
|
||||||
Gets a list of model_cls instances to insert into database
|
Gets a list of model_cls instances to insert into database
|
||||||
:param model_cls: ClickHouseModel subclass to import
|
:param model_cls: ClickHouseModel subclass to import
|
||||||
|
@ -49,64 +44,43 @@ class CollapsingMergeTree(InsertOnlyEngineMixin, infi_engines.CollapsingMergeTre
|
||||||
self.version_col = kwargs.pop('version_col', None)
|
self.version_col = kwargs.pop('version_col', None)
|
||||||
super(CollapsingMergeTree, self).__init__(*args, **kwargs)
|
super(CollapsingMergeTree, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
def _get_final_versions_by_version(self, db_alias: str, model_cls: Type[ClickHouseModel], object_pks: Iterable[str],
|
def _get_final_versions_by_version(self, db_alias, model_cls, min_date, max_date, object_pks, date_col, columns):
|
||||||
columns: str, date_range_filter: str = '') -> List[NamedTuple]:
|
|
||||||
"""
|
|
||||||
Performs request to ClickHouse in order to fetch latest version for each object pk
|
|
||||||
:param db_alias: ClickHouse database alias used
|
|
||||||
:param model_cls: Model class for which data is fetched
|
|
||||||
:param object_pks: Objects primary keys to filter by
|
|
||||||
:param columns: Columns to fetch
|
|
||||||
:param date_range_filter: Optional date_range_filter which speeds up query if date_col is set
|
|
||||||
:return: List of named tuples with requested columns
|
|
||||||
"""
|
|
||||||
if date_range_filter:
|
|
||||||
date_range_filter = 'PREWHERE {}'.format(date_range_filter)
|
|
||||||
|
|
||||||
query = """
|
query = """
|
||||||
SELECT {columns}
|
SELECT {columns} FROM $table WHERE (`{pk_column}`, `{version_col}`) IN (
|
||||||
FROM $table
|
SELECT `{pk_column}`, MAX(`{version_col}`)
|
||||||
{date_range_filter}
|
FROM $table
|
||||||
WHERE `{pk_column}` IN ({object_pks})
|
PREWHERE `{date_col}` >= '{min_date}' AND `{date_col}` <= '{max_date}'
|
||||||
ORDER BY `{pk_column}`, `{version_col}` DESC
|
AND `{pk_column}` IN ({object_pks})
|
||||||
LIMIT 1 BY `{pk_column}`
|
GROUP BY `{pk_column}`
|
||||||
""".format(columns=','.join(columns), version_col=self.version_col, pk_column=self.pk_column,
|
)
|
||||||
date_range_filter=date_range_filter, object_pks=','.join(object_pks), sign_col=self.sign_col)
|
""".format(columns=','.join(columns), version_col=self.version_col, date_col=date_col, pk_column=self.pk_column,
|
||||||
|
min_date=min_date, max_date=max_date, object_pks=','.join(object_pks))
|
||||||
|
|
||||||
return connections[db_alias].select_tuples(query, model_cls)
|
return connections[db_alias].select_tuples(query, model_cls)
|
||||||
|
|
||||||
def _get_final_versions_by_final(self, db_alias: str, model_cls: Type[ClickHouseModel], object_pks: Iterable[str],
|
def _get_final_versions_by_final(self, db_alias, model_cls, min_date, max_date, object_pks, date_col, columns):
|
||||||
columns: str, date_range_filter: str = '') -> List[NamedTuple]:
|
|
||||||
"""
|
|
||||||
Performs request to ClickHouse in order to fetch latest version for each object pk
|
|
||||||
:param db_alias: ClickHouse database alias used
|
|
||||||
:param model_cls: Model class for which data is fetched
|
|
||||||
:param object_pks: Objects primary keys to filter by
|
|
||||||
:param columns: Columns to fetch
|
|
||||||
:param date_range_filter: Optional date_range_filter which speeds up query if date_col is set
|
|
||||||
:return: List of named tuples with requested columns
|
|
||||||
"""
|
|
||||||
if date_range_filter:
|
|
||||||
date_range_filter += ' AND'
|
|
||||||
|
|
||||||
query = """
|
query = """
|
||||||
SELECT {columns} FROM $table FINAL
|
SELECT {columns} FROM $table FINAL
|
||||||
WHERE {date_range_filter} `{pk_column}` IN ({object_pks})
|
WHERE `{date_col}` >= '{min_date}' AND `{date_col}` <= '{max_date}'
|
||||||
|
AND `{pk_column}` IN ({object_pks})
|
||||||
"""
|
"""
|
||||||
query = query.format(columns=','.join(columns), pk_column=self.pk_column, date_range_filter=date_range_filter,
|
query = query.format(columns=','.join(columns), date_col=date_col, pk_column=self.pk_column, min_date=min_date,
|
||||||
object_pks=','.join(object_pks))
|
max_date=max_date, object_pks=','.join(object_pks))
|
||||||
return connections[db_alias].select_tuples(query, model_cls)
|
return connections[db_alias].select_tuples(query, model_cls)
|
||||||
|
|
||||||
def _get_date_rate_filter(self, objects, model_cls: Type[ClickHouseModel], db_alias: str,
|
def get_final_versions(self, model_cls: Type['ClickHouseModel'], objects: Iterable[DjangoModel],
|
||||||
date_col: Optional[str]) -> str:
|
date_col: Optional[str] = None) -> Iterable[tuple]:
|
||||||
"""
|
"""
|
||||||
Generates datetime filter to speed up final queries, if date_col is present
|
Get objects, that are currently stored in ClickHouse.
|
||||||
:param objects: Objects, which are inserted
|
Depending on the partition key this can be different for different models.
|
||||||
:param model_cls: Model class for which data is fetched
|
In common case, this method is optimized for date field that doesn't change.
|
||||||
:param db_alias: ClickHouse database alias used
|
It also supposes primary key to by self.pk_column
|
||||||
:param date_col: Optional column name, where partition date is hold. Defaults to self.date_col
|
:param model_cls: ClickHouseModel subclass to import
|
||||||
:return: String to add to WHERE or PREWHERE query section
|
:param objects: Objects for which final versions are searched
|
||||||
|
:param date_col: Optional column name, where partiion date is hold. Defaults to self.date_col
|
||||||
|
:return: A generator of named tuples, representing previous state
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _dt_to_str(dt: Union[datetime.date, datetime.datetime]) -> str:
|
def _dt_to_str(dt: Union[datetime.date, datetime.datetime]) -> str:
|
||||||
if isinstance(dt, datetime.datetime):
|
if isinstance(dt, datetime.datetime):
|
||||||
return format_datetime(dt, 0, db_alias=db_alias)
|
return format_datetime(dt, 0, db_alias=db_alias)
|
||||||
|
@ -115,15 +89,10 @@ class CollapsingMergeTree(InsertOnlyEngineMixin, infi_engines.CollapsingMergeTre
|
||||||
else:
|
else:
|
||||||
raise Exception('Invalid date or datetime object: `%s`' % dt)
|
raise Exception('Invalid date or datetime object: `%s`' % dt)
|
||||||
|
|
||||||
|
if not objects:
|
||||||
|
raise StopIteration()
|
||||||
|
|
||||||
date_col = date_col or self.date_col
|
date_col = date_col or self.date_col
|
||||||
|
|
||||||
if not date_col:
|
|
||||||
logger.warning('django-clickhouse: date_col is not provided for model %s.'
|
|
||||||
' This can cause significant performance problems while fetching data.'
|
|
||||||
' It is worth inheriting CollapsingMergeTree engine with custom get_final_versions() method,'
|
|
||||||
' based on your partition_key' % model_cls)
|
|
||||||
return ''
|
|
||||||
|
|
||||||
min_date, max_date = None, None
|
min_date, max_date = None, None
|
||||||
for obj in objects:
|
for obj in objects:
|
||||||
obj_date = getattr(obj, date_col)
|
obj_date = getattr(obj, date_col)
|
||||||
|
@ -134,46 +103,26 @@ class CollapsingMergeTree(InsertOnlyEngineMixin, infi_engines.CollapsingMergeTre
|
||||||
if max_date is None or max_date < obj_date:
|
if max_date is None or max_date < obj_date:
|
||||||
max_date = obj_date
|
max_date = obj_date
|
||||||
|
|
||||||
min_date = _dt_to_str(min_date)
|
|
||||||
max_date = _dt_to_str(max_date)
|
|
||||||
|
|
||||||
return "`{date_col}` >= '{min_date}' AND `{date_col}` <= '{max_date}'".\
|
|
||||||
format(min_date=min_date, max_date=max_date, date_col=date_col)
|
|
||||||
|
|
||||||
def get_final_versions(self, model_cls: Type[ClickHouseModel], objects: Iterable[DjangoModel],
|
|
||||||
date_col: Optional[str] = None) -> Iterable[tuple]:
|
|
||||||
"""
|
|
||||||
Get objects, that are currently stored in ClickHouse.
|
|
||||||
Depending on the partition key this can be different for different models.
|
|
||||||
In common case, this method is optimized for date field that doesn't change.
|
|
||||||
It also supposes primary key to by self.pk_column
|
|
||||||
:param model_cls: ClickHouseModel subclass to import
|
|
||||||
:param objects: Objects for which final versions are searched
|
|
||||||
:param date_col: Optional column name, where partition date is hold. Defaults to self.date_col
|
|
||||||
:return: A generator of named tuples, representing previous state
|
|
||||||
"""
|
|
||||||
if not objects:
|
|
||||||
raise StopIteration()
|
|
||||||
|
|
||||||
object_pks = [str(getattr(obj, self.pk_column)) for obj in objects]
|
object_pks = [str(getattr(obj, self.pk_column)) for obj in objects]
|
||||||
|
|
||||||
db_alias = model_cls.get_database_alias()
|
db_alias = model_cls.get_database_alias()
|
||||||
|
|
||||||
date_range_filter = self._get_date_rate_filter(objects, model_cls, db_alias, date_col)
|
min_date = _dt_to_str(min_date)
|
||||||
|
max_date = _dt_to_str(max_date)
|
||||||
|
|
||||||
# Get fields. Sign is replaced to negative for further processing
|
# Get fields. Sign is replaced to negative for further processing
|
||||||
columns = list(model_cls.fields(writable=True).keys())
|
columns = list(model_cls.fields(writable=True).keys())
|
||||||
columns.remove(self.sign_col)
|
columns.remove(self.sign_col)
|
||||||
columns.append('-1 AS sign')
|
columns.append('-1 AS sign')
|
||||||
|
|
||||||
params = (db_alias, model_cls, object_pks, columns, date_range_filter)
|
params = (db_alias, model_cls, min_date, max_date, object_pks, date_col, columns)
|
||||||
|
|
||||||
if self.version_col:
|
if self.version_col:
|
||||||
return self._get_final_versions_by_version(*params)
|
return self._get_final_versions_by_version(*params)
|
||||||
else:
|
else:
|
||||||
return self._get_final_versions_by_final(*params)
|
return self._get_final_versions_by_final(*params)
|
||||||
|
|
||||||
def get_insert_batch(self, model_cls: Type[ClickHouseModel], objects: List[DjangoModel]) -> Iterable[tuple]:
|
def get_insert_batch(self, model_cls: Type['ClickHouseModel'], objects: List[DjangoModel]) -> Iterable[tuple]:
|
||||||
"""
|
"""
|
||||||
Gets a list of model_cls instances to insert into database
|
Gets a list of model_cls instances to insert into database
|
||||||
:param model_cls: ClickHouseModel subclass to import
|
:param model_cls: ClickHouseModel subclass to import
|
||||||
|
|
|
@ -1,45 +0,0 @@
|
||||||
"""
|
|
||||||
Django command that applies migrations for ClickHouse database
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
|
|
||||||
from django.apps import apps as django_apps
|
|
||||||
from django.core.management import BaseCommand, CommandParser
|
|
||||||
|
|
||||||
from ...configuration import config
|
|
||||||
from ...migrations import migrate_app
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = 'Migrates ClickHouse databases'
|
|
||||||
requires_migrations_checks = False
|
|
||||||
|
|
||||||
def add_arguments(self, parser: CommandParser) -> None:
|
|
||||||
parser.add_argument('app_label', nargs='?', type=str,
|
|
||||||
help='Django App name to migrate. By default all found apps are migrated.')
|
|
||||||
|
|
||||||
parser.add_argument('migration_number', nargs='?', type=int,
|
|
||||||
help='Migration number in selected django app to migrate to.'
|
|
||||||
' By default all available migrations are applied.'
|
|
||||||
' Note that library currently have no ability rollback migrations')
|
|
||||||
|
|
||||||
parser.add_argument('--database', '-d', nargs='?', type=str, required=False, choices=list(config.DATABASES.keys()),
|
|
||||||
help='ClickHouse database alias key from CLICKHOUSE_DATABASES django setting.'
|
|
||||||
' By default migrations are applied to all databases.')
|
|
||||||
|
|
||||||
def handle(self, *args, **options) -> None:
|
|
||||||
apps = [options['app_label']] if options['app_label'] else [app.name for app in django_apps.get_app_configs()]
|
|
||||||
databases = [options['database']] if options['database'] else list(config.DATABASES.keys())
|
|
||||||
kwargs = {'up_to': options['migration_number']} if options['migration_number'] else {}
|
|
||||||
|
|
||||||
self.stdout.write(self.style.MIGRATE_HEADING(
|
|
||||||
"Applying ClickHouse migrations for apps %s in databases %s" % (json.dumps(apps), json.dumps(databases))))
|
|
||||||
|
|
||||||
any_migrations_applied = False
|
|
||||||
for app_label in apps:
|
|
||||||
for db_alias in databases:
|
|
||||||
res = migrate_app(app_label, db_alias, verbosity=options['verbosity'], **kwargs)
|
|
||||||
any_migrations_applied = any_migrations_applied or res
|
|
||||||
|
|
||||||
if not any_migrations_applied:
|
|
||||||
self.stdout.write("No migrations to apply")
|
|
|
@ -9,8 +9,7 @@ from django.db.models.signals import post_migrate
|
||||||
from django.dispatch import receiver
|
from django.dispatch import receiver
|
||||||
|
|
||||||
# In order to support all operations import here
|
# In order to support all operations import here
|
||||||
from infi.clickhouse_orm.migrations import * # noqa F401, F403
|
from infi.clickhouse_orm.migrations import *
|
||||||
from infi.clickhouse_orm.migrations import RunSQL as LibRunSQL, RunPython as LibRunPython
|
|
||||||
|
|
||||||
from infi.clickhouse_orm.database import ServerError, DatabaseException
|
from infi.clickhouse_orm.database import ServerError, DatabaseException
|
||||||
from infi.clickhouse_orm.fields import StringField, DateField
|
from infi.clickhouse_orm.fields import StringField, DateField
|
||||||
|
@ -40,69 +39,49 @@ class Migration:
|
||||||
database = database or connections[db_alias]
|
database = database or connections[db_alias]
|
||||||
|
|
||||||
for op in self.operations:
|
for op in self.operations:
|
||||||
|
model_class = getattr(op, 'model_class', None)
|
||||||
hints = getattr(op, 'hints', {})
|
hints = getattr(op, 'hints', {})
|
||||||
|
|
||||||
if db_router.allow_migrate(db_alias, self.__module__, op, **hints):
|
if db_router.allow_migrate(db_alias, self.__module__, op, model_class, **hints):
|
||||||
op.apply(database)
|
op.apply(database)
|
||||||
|
|
||||||
|
|
||||||
def migrate_app(app_label: str, db_alias: str, up_to: int = 9999, database: Optional[Database] = None,
|
def migrate_app(app_label: str, db_alias: str, up_to: int = 9999, database: Optional[Database] = None) -> None:
|
||||||
verbosity: int = 1) -> bool:
|
|
||||||
"""
|
"""
|
||||||
Migrates given django app
|
Migrates given django app
|
||||||
:param app_label: App label to migrate
|
:param app_label: App label to migrate
|
||||||
:param db_alias: Database alias to migrate
|
:param db_alias: Database alias to migrate
|
||||||
:param up_to: Migration number to migrate to
|
:param up_to: Migration number to migrate to
|
||||||
:param database: Sometimes I want to pass db object directly for testing purposes
|
:param database: Sometimes I want to pass db object directly for testing purposes
|
||||||
:param verbosity: 0-4, уровень verbosity вывода
|
:return: None
|
||||||
:return: True if any migration has been applied
|
|
||||||
"""
|
"""
|
||||||
# Can't migrate such connection, just skip it
|
# Can't migrate such connection, just skip it
|
||||||
if config.DATABASES[db_alias].get('readonly', False):
|
if config.DATABASES[db_alias].get('readonly', False):
|
||||||
if verbosity > 1:
|
return
|
||||||
print('Skipping database "%s": marked as readonly' % db_alias)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Ignore force not migrated databases
|
# Ignore force not migrated databases
|
||||||
if not config.DATABASES[db_alias].get('migrate', True):
|
if not config.DATABASES[db_alias].get('migrate', True):
|
||||||
if verbosity > 1:
|
return
|
||||||
print('Skipping database "%s": migrations are restricted in configuration' % db_alias)
|
|
||||||
return False
|
|
||||||
|
|
||||||
migrations_package = "%s.%s" % (app_label, config.MIGRATIONS_PACKAGE)
|
migrations_package = "%s.%s" % (app_label, config.MIGRATIONS_PACKAGE)
|
||||||
|
|
||||||
if not module_exists(migrations_package):
|
if module_exists(migrations_package):
|
||||||
if verbosity > 1:
|
database = database or connections[db_alias]
|
||||||
print('Skipping migrations for app "%s": no migration_package "%s"' % (app_label, migrations_package))
|
migration_history_model = lazy_class_import(config.MIGRATION_HISTORY_MODEL)
|
||||||
return False
|
|
||||||
|
|
||||||
database = database or connections[db_alias]
|
applied_migrations = migration_history_model.get_applied_migrations(db_alias, migrations_package)
|
||||||
migration_history_model = lazy_class_import(config.MIGRATION_HISTORY_MODEL)
|
modules = import_submodules(migrations_package)
|
||||||
|
unapplied_migrations = set(modules.keys()) - applied_migrations
|
||||||
|
|
||||||
applied_migrations = migration_history_model.get_applied_migrations(db_alias, migrations_package)
|
for name in sorted(unapplied_migrations):
|
||||||
modules = import_submodules(migrations_package)
|
|
||||||
unapplied_migrations = set(modules.keys()) - applied_migrations
|
|
||||||
|
|
||||||
any_applied = False
|
|
||||||
for name in sorted(unapplied_migrations):
|
|
||||||
if int(name[:4]) > up_to:
|
|
||||||
break
|
|
||||||
|
|
||||||
if verbosity > 0:
|
|
||||||
print('Applying ClickHouse migration %s for app %s in database %s' % (name, app_label, db_alias))
|
print('Applying ClickHouse migration %s for app %s in database %s' % (name, app_label, db_alias))
|
||||||
|
migration = modules[name].Migration()
|
||||||
|
migration.apply(db_alias, database=database)
|
||||||
|
|
||||||
migration = modules[name].Migration()
|
migration_history_model.set_migration_applied(db_alias, migrations_package, name)
|
||||||
migration.apply(db_alias, database=database)
|
|
||||||
|
|
||||||
migration_history_model.set_migration_applied(db_alias, migrations_package, name)
|
if int(name[:4]) >= up_to:
|
||||||
any_applied = True
|
break
|
||||||
|
|
||||||
if not any_applied:
|
|
||||||
if verbosity > 1:
|
|
||||||
print('No migrations to apply for app "%s" does not exist' % app_label)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@receiver(post_migrate)
|
@receiver(post_migrate)
|
||||||
|
@ -112,19 +91,21 @@ def clickhouse_migrate(sender, **kwargs):
|
||||||
return
|
return
|
||||||
|
|
||||||
if kwargs.get('using', DJANGO_DEFAULT_DB_ALIAS) != DJANGO_DEFAULT_DB_ALIAS:
|
if kwargs.get('using', DJANGO_DEFAULT_DB_ALIAS) != DJANGO_DEFAULT_DB_ALIAS:
|
||||||
# Don't call sync for every database. Just once.
|
# Не надо выполнять синхронизацию для каждого шарда. Только один раз.
|
||||||
return
|
return
|
||||||
|
|
||||||
app_name = kwargs['app_config'].name
|
app_name = kwargs['app_config'].name
|
||||||
|
|
||||||
for db_alias in config.DATABASES:
|
for db_alias in config.DATABASES:
|
||||||
migrate_app(app_name, db_alias, verbosity=kwargs.get('verbosity', 1))
|
migrate_app(app_name, db_alias)
|
||||||
|
|
||||||
|
|
||||||
class MigrationHistory(ClickHouseModel):
|
class MigrationHistory(ClickHouseModel):
|
||||||
"""
|
"""
|
||||||
A model for storing which migrations were already applied to database.
|
A model for storing which migrations were already applied to database.
|
||||||
|
This
|
||||||
"""
|
"""
|
||||||
|
|
||||||
db_alias = StringField()
|
db_alias = StringField()
|
||||||
package_name = StringField()
|
package_name = StringField()
|
||||||
module_name = StringField()
|
module_name = StringField()
|
||||||
|
@ -176,15 +157,3 @@ class MigrationHistory(ClickHouseModel):
|
||||||
@classmethod
|
@classmethod
|
||||||
def table_name(cls):
|
def table_name(cls):
|
||||||
return 'django_clickhouse_migrations'
|
return 'django_clickhouse_migrations'
|
||||||
|
|
||||||
|
|
||||||
class RunSQL(LibRunSQL):
|
|
||||||
def __init__(self, *args, hints: Optional[dict] = None, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.hints = hints or {}
|
|
||||||
|
|
||||||
|
|
||||||
class RunPython(LibRunPython):
|
|
||||||
def __init__(self, *args, hints: Optional[dict] = None, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.hints = hints or {}
|
|
||||||
|
|
|
@ -5,6 +5,7 @@ It saves all operations to storage in order to write them to ClickHouse later.
|
||||||
|
|
||||||
from typing import Optional, Any, Type, Set
|
from typing import Optional, Any, Type, Set
|
||||||
|
|
||||||
|
import six
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.db.models import QuerySet as DjangoQuerySet, Model as DjangoModel, Manager as DjangoManager
|
from django.db.models import QuerySet as DjangoQuerySet, Model as DjangoModel, Manager as DjangoManager
|
||||||
from django.db.models.manager import BaseManager
|
from django.db.models.manager import BaseManager
|
||||||
|
@ -63,8 +64,8 @@ class ClickHouseSyncBulkUpdateQuerySetMixin(ClickHouseSyncRegisterMixin, BulkUpd
|
||||||
pk_name = self.model._meta.pk.name
|
pk_name = self.model._meta.pk.name
|
||||||
if returning is None:
|
if returning is None:
|
||||||
returning = pk_name
|
returning = pk_name
|
||||||
elif isinstance(returning, str):
|
elif isinstance(returning, six.string_types):
|
||||||
returning = [pk_name, returning] if returning != '*' else '*'
|
returning = [pk_name, returning]
|
||||||
else:
|
else:
|
||||||
returning = list(returning) + [pk_name]
|
returning = list(returning) + [pk_name]
|
||||||
|
|
||||||
|
@ -154,7 +155,7 @@ class ClickHouseSyncModel(DjangoModel):
|
||||||
return storage_cls()
|
return storage_cls()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def register_clickhouse_sync_model(cls, model_cls: Type['ClickHouseModel']) -> None: # noqa: F821
|
def register_clickhouse_sync_model(cls, model_cls: Type['ClickHouseModel']) -> None:
|
||||||
"""
|
"""
|
||||||
Registers ClickHouse model to listen to this model updates
|
Registers ClickHouse model to listen to this model updates
|
||||||
:param model_cls: Model class to register
|
:param model_cls: Model class to register
|
||||||
|
@ -166,7 +167,7 @@ class ClickHouseSyncModel(DjangoModel):
|
||||||
cls._clickhouse_sync_models.add(model_cls)
|
cls._clickhouse_sync_models.add(model_cls)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_clickhouse_sync_models(cls) -> Set['ClickHouseModel']: # noqa: F821
|
def get_clickhouse_sync_models(cls) -> Set['ClickHouseModel']:
|
||||||
"""
|
"""
|
||||||
Returns all clickhouse models, listening to this class
|
Returns all clickhouse models, listening to this class
|
||||||
:return: A set of model classes to sync
|
:return: A set of model classes to sync
|
||||||
|
|
|
@ -4,6 +4,7 @@ This file defines router to find appropriate database
|
||||||
from typing import Type
|
from typing import Type
|
||||||
|
|
||||||
import random
|
import random
|
||||||
|
import six
|
||||||
from infi.clickhouse_orm.migrations import Operation, DropTable, CreateTable
|
from infi.clickhouse_orm.migrations import Operation, DropTable, CreateTable
|
||||||
|
|
||||||
from .clickhouse_models import ClickHouseModel
|
from .clickhouse_models import ClickHouseModel
|
||||||
|
@ -30,26 +31,24 @@ class DefaultRouter:
|
||||||
"""
|
"""
|
||||||
return random.choice(model.write_db_aliases)
|
return random.choice(model.write_db_aliases)
|
||||||
|
|
||||||
def allow_migrate(self, db_alias: str, app_label: str, operation: Operation, **hints) -> bool:
|
def allow_migrate(self, db_alias: str, app_label: str, operation: Operation,
|
||||||
|
model=None, **hints) -> bool:
|
||||||
"""
|
"""
|
||||||
Checks if migration can be applied to given database
|
Checks if migration can be applied to given database
|
||||||
:param db_alias: Database alias to check
|
:param db_alias: Database alias to check
|
||||||
:param app_label: App from which migration is got
|
:param app_label: App from which migration is got
|
||||||
:param operation: Operation object to perform
|
:param operation: Operation object to perform
|
||||||
|
:param model: Model migration is applied to
|
||||||
:param hints: Hints to make correct decision
|
:param hints: Hints to make correct decision
|
||||||
:return: boolean
|
:return: boolean
|
||||||
"""
|
"""
|
||||||
if hints.get("force_migrate_on_databases", None):
|
if hints.get("force_migrate_on_databases", None):
|
||||||
return db_alias in hints["force_migrate_on_databases"]
|
return db_alias in hints["force_migrate_on_databases"]
|
||||||
|
|
||||||
model = hints.get('model') or getattr(operation, 'model_class', None)
|
if hints.get('model'):
|
||||||
if model is None:
|
model = '%s.%s.%s' % (app_label, config.MODELS_MODULE, hints['model']) \
|
||||||
raise ValueError('"model_class" attribute is not defined for operation "%s". '
|
if isinstance(hints['model'], six.string_types) else hints['model']
|
||||||
'Please provide "force_migrate_on_databases" or "model" in hints.'
|
|
||||||
% operation.__class__.__name__)
|
|
||||||
|
|
||||||
model = '%s.%s.%s' % (app_label, config.MODELS_MODULE, model) \
|
|
||||||
if isinstance(model, str) else model
|
|
||||||
model = lazy_class_import(model)
|
model = lazy_class_import(model)
|
||||||
|
|
||||||
if operation.__class__ not in {CreateTable, DropTable}:
|
if operation.__class__ not in {CreateTable, DropTable}:
|
||||||
|
|
|
@ -7,7 +7,7 @@ from .utils import model_to_dict
|
||||||
|
|
||||||
|
|
||||||
class Django2ClickHouseModelSerializer:
|
class Django2ClickHouseModelSerializer:
|
||||||
def __init__(self, model_cls: Type['ClickHouseModel'], fields: Optional[Iterable[str]] = None, # noqa: F821
|
def __init__(self, model_cls: Type['ClickHouseModel'], fields: Optional[Iterable[str]] = None,
|
||||||
exclude_fields: Optional[Iterable[str]] = None, writable: bool = False,
|
exclude_fields: Optional[Iterable[str]] = None, writable: bool = False,
|
||||||
defaults: Optional[dict] = None) -> None:
|
defaults: Optional[dict] = None) -> None:
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -11,15 +11,13 @@ import logging
|
||||||
from typing import Any, Optional, List, Tuple
|
from typing import Any, Optional, List, Tuple
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from six import with_metaclass
|
||||||
from celery.utils.nodenames import gethostname
|
|
||||||
from django.utils.timezone import now
|
|
||||||
from statsd.defaults.django import statsd
|
from statsd.defaults.django import statsd
|
||||||
|
|
||||||
from .configuration import config
|
from .configuration import config
|
||||||
from .exceptions import ConfigurationError, RedisLockTimeoutError
|
from .exceptions import ConfigurationError, RedisLockTimeoutError
|
||||||
from .redis import redis_zadd
|
from .redis import redis_zadd
|
||||||
from .utils import check_pid_exists, get_subclasses, SingletonMeta
|
from .utils import check_pid, get_subclasses, SingletonMeta
|
||||||
|
|
||||||
logger = logging.getLogger('django-clickhouse')
|
logger = logging.getLogger('django-clickhouse')
|
||||||
|
|
||||||
|
@ -85,7 +83,7 @@ class Storage:
|
||||||
:param kwargs: Storage dependant arguments
|
:param kwargs: Storage dependant arguments
|
||||||
:return: Number of records in queue
|
:return: Number of records in queue
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplemented()
|
||||||
|
|
||||||
def get_operations(self, import_key: str, count: int, **kwargs) -> List[Tuple[str, str]]:
|
def get_operations(self, import_key: str, count: int, **kwargs) -> List[Tuple[str, str]]:
|
||||||
"""
|
"""
|
||||||
|
@ -96,7 +94,7 @@ class Storage:
|
||||||
:param kwargs: Storage dependant arguments
|
:param kwargs: Storage dependant arguments
|
||||||
:return: A list of tuples (operation, pk) in incoming order.
|
:return: A list of tuples (operation, pk) in incoming order.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplemented()
|
||||||
|
|
||||||
def register_operations(self, import_key: str, operation: str, *pks: Any) -> int:
|
def register_operations(self, import_key: str, operation: str, *pks: Any) -> int:
|
||||||
"""
|
"""
|
||||||
|
@ -137,24 +135,24 @@ class Storage:
|
||||||
This method is used in tests to drop all storage data
|
This method is used in tests to drop all storage data
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplemented()
|
||||||
|
|
||||||
def get_last_sync_time(self, import_key: str) -> Optional[datetime.datetime]:
|
def get_last_sync_time(self, import_key: str) -> Optional[datetime.datetime]:
|
||||||
"""
|
"""
|
||||||
Gets the last time, sync has been executed
|
Gets the last time, sync has been executed
|
||||||
:return: datetime.datetime if last sync has been. Otherwise - None.
|
:return: datetime.datetime if last sync has been. Otherwise - None.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplemented()
|
||||||
|
|
||||||
def set_last_sync_time(self, import_key: str, dt: datetime.datetime) -> None:
|
def set_last_sync_time(self, import_key: str, dt: datetime.datetime) -> None:
|
||||||
"""
|
"""
|
||||||
Sets successful sync time
|
Sets successful sync time
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError()
|
raise NotImplemented()
|
||||||
|
|
||||||
|
|
||||||
class RedisStorage(Storage, metaclass=SingletonMeta):
|
class RedisStorage(with_metaclass(SingletonMeta, Storage)):
|
||||||
"""
|
"""
|
||||||
Fast in-memory storage made on bases of redis and redis-py library.
|
Fast in-memory storage made on bases of redis and redis-py library.
|
||||||
Requires:
|
Requires:
|
||||||
|
@ -189,7 +187,8 @@ class RedisStorage(Storage, metaclass=SingletonMeta):
|
||||||
|
|
||||||
def get_operations(self, import_key, count, **kwargs):
|
def get_operations(self, import_key, count, **kwargs):
|
||||||
ops_key = self.REDIS_KEY_OPS_TEMPLATE.format(import_key=import_key)
|
ops_key = self.REDIS_KEY_OPS_TEMPLATE.format(import_key=import_key)
|
||||||
res = self._redis.zrangebyscore(ops_key, '-inf', now().timestamp(), start=0, num=count, withscores=True)
|
res = self._redis.zrangebyscore(ops_key, '-inf', datetime.datetime.now().timestamp(), start=0, num=count,
|
||||||
|
withscores=True)
|
||||||
|
|
||||||
if res:
|
if res:
|
||||||
ops, scores = zip(*res)
|
ops, scores = zip(*res)
|
||||||
|
@ -216,31 +215,19 @@ class RedisStorage(Storage, metaclass=SingletonMeta):
|
||||||
# Block process to be single threaded. Default sync delay is 10 * default sync delay.
|
# Block process to be single threaded. Default sync delay is 10 * default sync delay.
|
||||||
# It can be changed for model, by passing `lock_timeout` argument to pre_sync
|
# It can be changed for model, by passing `lock_timeout` argument to pre_sync
|
||||||
lock = self.get_lock(import_key, **kwargs)
|
lock = self.get_lock(import_key, **kwargs)
|
||||||
current_host_name = gethostname()
|
|
||||||
lock_pid_key = self.REDIS_KEY_LOCK_PID.format(import_key=import_key)
|
lock_pid_key = self.REDIS_KEY_LOCK_PID.format(import_key=import_key)
|
||||||
try:
|
try:
|
||||||
lock.acquire()
|
lock.acquire()
|
||||||
self._redis.set(lock_pid_key, '%s:%s' % (current_host_name, os.getpid()))
|
self._redis.set(lock_pid_key, os.getpid())
|
||||||
except RedisLockTimeoutError:
|
except RedisLockTimeoutError:
|
||||||
statsd.incr('%s.sync.%s.lock.timeout' % (config.STATSD_PREFIX, import_key))
|
statsd.incr('%s.sync.%s.lock.timeout' % (config.STATSD_PREFIX, import_key))
|
||||||
|
|
||||||
# Lock is busy. But If the process has been killed, I don't want to wait any more.
|
# Lock is busy. But If the process has been killed, I don't want to wait any more.
|
||||||
# I assume that lock has been killed if it works on the same host (other than localhost)
|
# Let's check if pid exists
|
||||||
# and there is no process alive.
|
pid = int(self._redis.get(lock_pid_key) or 0)
|
||||||
# I also assume that there are no hosts with same hostname other than localhost.
|
if pid and not check_pid(pid):
|
||||||
# Note: previously value contained only pid. Let's support old value for back compatibility
|
|
||||||
active_lock_data = self._redis.get(lock_pid_key).split(b":")
|
|
||||||
active_pid = int(active_lock_data[-1] or 0)
|
|
||||||
active_host_name = active_lock_data[0] \
|
|
||||||
if len(active_lock_data) > 1 and active_lock_data[0] != "localhost" else None
|
|
||||||
|
|
||||||
if (
|
|
||||||
active_pid and active_host_name
|
|
||||||
and active_host_name == current_host_name and not check_pid_exists(active_pid)
|
|
||||||
):
|
|
||||||
statsd.incr('%s.sync.%s.lock.hard_release' % (config.STATSD_PREFIX, import_key))
|
statsd.incr('%s.sync.%s.lock.hard_release' % (config.STATSD_PREFIX, import_key))
|
||||||
logger.warning('django-clickhouse: hard releasing lock "%s" locked by pid %d (process is dead)'
|
logger.warning('django-clickhouse: hard releasing lock "%s" locked by pid %d (process is dead)'
|
||||||
% (import_key, active_pid))
|
% (import_key, pid))
|
||||||
self._redis.delete(lock_pid_key)
|
self._redis.delete(lock_pid_key)
|
||||||
lock.hard_release()
|
lock.hard_release()
|
||||||
self.pre_sync(import_key, **kwargs)
|
self.pre_sync(import_key, **kwargs)
|
||||||
|
@ -264,7 +251,7 @@ class RedisStorage(Storage, metaclass=SingletonMeta):
|
||||||
self.post_batch_removed(import_key, batch_size)
|
self.post_batch_removed(import_key, batch_size)
|
||||||
self.get_lock(import_key, **kwargs).release()
|
self.get_lock(import_key, **kwargs).release()
|
||||||
|
|
||||||
logger.info('django-clickhouse: removed %d operations from storage (key: %s)' % (batch_size, import_key))
|
logger.info('django-clickhouse: synced %d items (key: %s)' % (batch_size, import_key))
|
||||||
|
|
||||||
def post_sync_failed(self, import_key, **kwargs):
|
def post_sync_failed(self, import_key, **kwargs):
|
||||||
# unblock lock after sync completed
|
# unblock lock after sync completed
|
||||||
|
|
|
@ -1,39 +1,35 @@
|
||||||
import datetime
|
import datetime
|
||||||
import importlib
|
import importlib
|
||||||
from typing import Type, Union
|
|
||||||
|
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from django.apps import apps as django_apps
|
from django.conf import settings
|
||||||
from infi.clickhouse_orm.utils import import_submodules
|
from infi.clickhouse_orm.utils import import_submodules
|
||||||
|
|
||||||
from django_clickhouse.clickhouse_models import ClickHouseModel
|
from django_clickhouse.clickhouse_models import ClickHouseModel
|
||||||
from .configuration import config
|
from .configuration import config
|
||||||
from .utils import get_subclasses, lazy_class_import
|
from .utils import get_subclasses
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue=config.CELERY_QUEUE)
|
@shared_task(queue=config.CELERY_QUEUE)
|
||||||
def sync_clickhouse_model(model_cls: Union[Type[ClickHouseModel], str]) -> None:
|
def sync_clickhouse_model(model_cls) -> None:
|
||||||
"""
|
"""
|
||||||
Syncs one batch of given ClickHouseModel
|
Syncs one batch of given ClickHouseModel
|
||||||
:param model_cls: ClickHouseModel subclass or python path to it
|
:param model_cls: ClickHouseModel subclass
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
model_cls = lazy_class_import(model_cls)
|
|
||||||
|
|
||||||
# If sync will not finish it is not fatal to set up sync period here: sync will be executed next time
|
|
||||||
model_cls.get_storage().set_last_sync_time(model_cls.get_import_key(), datetime.datetime.now())
|
model_cls.get_storage().set_last_sync_time(model_cls.get_import_key(), datetime.datetime.now())
|
||||||
model_cls.sync_batch_from_storage()
|
model_cls.sync_batch_from_storage()
|
||||||
|
|
||||||
|
|
||||||
@shared_task(queue=config.CELERY_QUEUE)
|
@shared_task(queue=config.CELERY_QUEUE)
|
||||||
def clickhouse_auto_sync() -> None:
|
def clickhouse_auto_sync():
|
||||||
"""
|
"""
|
||||||
Plans syncing models
|
Plans syncing models
|
||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
# Import all model modules
|
# Import all model modules
|
||||||
for app in django_apps.get_app_configs():
|
for app in settings.INSTALLED_APPS:
|
||||||
package_name = "%s.%s" % (app.name, config.MODELS_MODULE)
|
package_name = "%s.%s" % (app, config.MODELS_MODULE)
|
||||||
try:
|
try:
|
||||||
module = importlib.import_module(package_name)
|
module = importlib.import_module(package_name)
|
||||||
if hasattr(module, '__path__'):
|
if hasattr(module, '__path__'):
|
||||||
|
@ -41,8 +37,9 @@ def clickhouse_auto_sync() -> None:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# Start
|
||||||
for cls in get_subclasses(ClickHouseModel, recursive=True):
|
for cls in get_subclasses(ClickHouseModel, recursive=True):
|
||||||
if cls.need_sync():
|
if cls.need_sync():
|
||||||
# I pass class as a string in order to make it JSON serializable
|
# Даже если синхронизация вдруг не выполнится, не страшно, что мы установили период синхронизации
|
||||||
cls_path = "%s.%s" % (cls.__module__, cls.__name__)
|
# Она выполнится следующей таской через интервал.
|
||||||
sync_clickhouse_model.delay(cls_path)
|
sync_clickhouse_model.delay(cls)
|
||||||
|
|
|
@ -7,6 +7,7 @@ from itertools import chain
|
||||||
from typing import Union, Any, Optional, TypeVar, Set, Dict, Iterable, Tuple, Iterator, Callable, List
|
from typing import Union, Any, Optional, TypeVar, Set, Dict, Iterable, Tuple, Iterator, Callable, List
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
|
import six
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
from importlib.util import find_spec
|
from importlib.util import find_spec
|
||||||
from django.db.models import Model as DjangoModel
|
from django.db.models import Model as DjangoModel
|
||||||
|
@ -75,7 +76,7 @@ def lazy_class_import(obj: Union[str, Any]) -> Any:
|
||||||
:param obj: A string class path or object to return
|
:param obj: A string class path or object to return
|
||||||
:return: Imported object
|
:return: Imported object
|
||||||
"""
|
"""
|
||||||
if isinstance(obj, str):
|
if isinstance(obj, six.string_types):
|
||||||
module_name, obj_name = obj.rsplit('.', 1)
|
module_name, obj_name = obj.rsplit('.', 1)
|
||||||
module = import_module(module_name)
|
module = import_module(module_name)
|
||||||
|
|
||||||
|
@ -127,7 +128,7 @@ def model_to_dict(instance: DjangoModel, fields: Optional[Iterable[str]] = None
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def check_pid_exists(pid):
|
def check_pid(pid):
|
||||||
"""
|
"""
|
||||||
Check For the existence of a unix pid.
|
Check For the existence of a unix pid.
|
||||||
"""
|
"""
|
||||||
|
@ -171,28 +172,11 @@ class ExceptionThread(Thread):
|
||||||
super(ExceptionThread, self).__init__(*args, **kwargs)
|
super(ExceptionThread, self).__init__(*args, **kwargs)
|
||||||
self.exc = None
|
self.exc = None
|
||||||
|
|
||||||
def _close_django_db_connections(self):
|
|
||||||
"""
|
|
||||||
In Django every thread has its own database connection pool.
|
|
||||||
But django does not close them automatically in child threads.
|
|
||||||
As a result, this can cause database connection leaking.
|
|
||||||
Here we close connections manually when thread execution is finished.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
from django.db import connections as db_connections
|
|
||||||
except (ModuleNotFoundError, ImportError):
|
|
||||||
db_connections = None
|
|
||||||
|
|
||||||
if db_connections:
|
|
||||||
db_connections.close_all()
|
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
try:
|
try:
|
||||||
return super(ExceptionThread, self).run()
|
return super(ExceptionThread, self).run()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.exc = e
|
self.exc = e
|
||||||
finally:
|
|
||||||
self._close_django_db_connections()
|
|
||||||
|
|
||||||
def join(self, timeout=None):
|
def join(self, timeout=None):
|
||||||
super(ExceptionThread, self).join(timeout=timeout)
|
super(ExceptionThread, self).join(timeout=timeout)
|
||||||
|
@ -287,4 +271,4 @@ class SingletonMeta(type):
|
||||||
def __call__(cls, *args, **kwargs):
|
def __call__(cls, *args, **kwargs):
|
||||||
if cls not in cls._instances:
|
if cls not in cls._instances:
|
||||||
cls._instances[cls] = super(SingletonMeta, cls).__call__(*args, **kwargs)
|
cls._instances[cls] = super(SingletonMeta, cls).__call__(*args, **kwargs)
|
||||||
return cls._instances[cls]
|
return cls._instances[cls]
|
|
@ -1,6 +0,0 @@
|
||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class UnitTestAppConfig(AppConfig):
|
|
||||||
name = 'tests'
|
|
||||||
verbose_name = "Unit test app"
|
|
10
tests/fixtures/test_model.json
vendored
10
tests/fixtures/test_model.json
vendored
|
@ -5,7 +5,7 @@
|
||||||
"fields": {
|
"fields": {
|
||||||
"value": 100,
|
"value": 100,
|
||||||
"created_date": "2018-01-01",
|
"created_date": "2018-01-01",
|
||||||
"created": "2018-01-01 00:00:00+0000"
|
"created": "2018-01-01 00:00:00"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -14,7 +14,7 @@
|
||||||
"fields": {
|
"fields": {
|
||||||
"value": 200,
|
"value": 200,
|
||||||
"created_date": "2018-02-01",
|
"created_date": "2018-02-01",
|
||||||
"created": "2018-02-01 00:00:00+0000"
|
"created": "2018-02-01 00:00:00"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -23,7 +23,7 @@
|
||||||
"fields": {
|
"fields": {
|
||||||
"value": 300,
|
"value": 300,
|
||||||
"created_date": "2018-03-01",
|
"created_date": "2018-03-01",
|
||||||
"created": "2018-03-01 00:00:00+0000"
|
"created": "2018-03-01 00:00:00"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -32,7 +32,7 @@
|
||||||
"fields": {
|
"fields": {
|
||||||
"value": 400,
|
"value": 400,
|
||||||
"created_date": "2018-04-01",
|
"created_date": "2018-04-01",
|
||||||
"created": "2018-04-01 00:00:00+0000"
|
"created": "2018-04-01 00:00:00"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -41,7 +41,7 @@
|
||||||
"fields": {
|
"fields": {
|
||||||
"value": 500,
|
"value": 500,
|
||||||
"created_date": "2018-05-01",
|
"created_date": "2018-05-01",
|
||||||
"created": "2018-05-01 00:00:00+0000"
|
"created": "2018-05-01 00:00:00"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
4
tests/fixtures/test_secondary_model.json
vendored
4
tests/fixtures/test_secondary_model.json
vendored
|
@ -5,7 +5,7 @@
|
||||||
"fields": {
|
"fields": {
|
||||||
"value": 100,
|
"value": 100,
|
||||||
"created_date": "2018-01-01",
|
"created_date": "2018-01-01",
|
||||||
"created": "2018-02-01 00:00:00+0000"
|
"created": "2018-02-01 00:00:00"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -14,7 +14,7 @@
|
||||||
"fields": {
|
"fields": {
|
||||||
"value": 200,
|
"value": 200,
|
||||||
"created_date": "2018-02-01",
|
"created_date": "2018-02-01",
|
||||||
"created": "2018-02-01 00:00:00+0000"
|
"created": "2018-02-01 00:00:00"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
|
@ -8,7 +8,6 @@ from time import sleep
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from django.utils.timezone import now
|
|
||||||
|
|
||||||
# set Django environment
|
# set Django environment
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
@ -16,9 +15,9 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", 'tests.settings')
|
||||||
django.setup()
|
django.setup()
|
||||||
|
|
||||||
# This imports must be after django activation
|
# This imports must be after django activation
|
||||||
from django.db.models import F # noqa: E402
|
from django.db.models import F
|
||||||
from tests.clickhouse_models import ClickHouseCollapseTestModel # noqa: E402
|
from tests.clickhouse_models import ClickHouseCollapseTestModel
|
||||||
from tests.models import TestModel # noqa: E402
|
from tests.models import TestModel
|
||||||
|
|
||||||
logger = logging.getLogger('django-clickhouse')
|
logger = logging.getLogger('django-clickhouse')
|
||||||
|
|
||||||
|
@ -26,7 +25,7 @@ logger = logging.getLogger('django-clickhouse')
|
||||||
def create(batch_size=1000, test_time=60, period=1, **kwargs):
|
def create(batch_size=1000, test_time=60, period=1, **kwargs):
|
||||||
for iteration in range(int(test_time / period)):
|
for iteration in range(int(test_time / period)):
|
||||||
res = TestModel.objects.db_manager('test_db').bulk_create([
|
res = TestModel.objects.db_manager('test_db').bulk_create([
|
||||||
TestModel(created=now(), created_date='2018-01-01', value=iteration * batch_size + i)
|
TestModel(created=datetime.datetime.now(), created_date='2018-01-01', value=iteration * batch_size + i)
|
||||||
for i in range(batch_size)
|
for i in range(batch_size)
|
||||||
])
|
])
|
||||||
logger.info('django-clickhouse: test created %d records' % len(res))
|
logger.info('django-clickhouse: test created %d records' % len(res))
|
||||||
|
@ -55,8 +54,8 @@ def sync(period=1, test_time=60, **kwargs):
|
||||||
if kwargs['once']:
|
if kwargs['once']:
|
||||||
ClickHouseCollapseTestModel.sync_batch_from_storage()
|
ClickHouseCollapseTestModel.sync_batch_from_storage()
|
||||||
else:
|
else:
|
||||||
start = now()
|
start = datetime.datetime.now()
|
||||||
while (now() - start).total_seconds() < test_time:
|
while (datetime.datetime.now() - start).total_seconds() < test_time:
|
||||||
ClickHouseCollapseTestModel.sync_batch_from_storage()
|
ClickHouseCollapseTestModel.sync_batch_from_storage()
|
||||||
sleep(period)
|
sleep(period)
|
||||||
|
|
||||||
|
|
|
@ -1,40 +1,36 @@
|
||||||
"""
|
"""
|
||||||
This file contains django settings to run tests with runtests.py
|
This file contains django settings to run tests with runtests.py
|
||||||
"""
|
"""
|
||||||
from os import environ
|
|
||||||
|
|
||||||
SECRET_KEY = 'fake-key'
|
SECRET_KEY = 'fake-key'
|
||||||
USE_TZ = True
|
|
||||||
|
|
||||||
DATABASES = {
|
DATABASES = {
|
||||||
'default': {
|
'default': {
|
||||||
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
||||||
'NAME': 'test',
|
'NAME': 'test',
|
||||||
'USER': environ.get('PGUSER', 'test'),
|
'USER': 'test',
|
||||||
'PASSWORD': environ.get('PGPASS', 'test'),
|
'PASSWORD': 'test',
|
||||||
'HOST': environ.get('PGHOST', '127.0.0.1'),
|
'HOST': '127.0.0.1',
|
||||||
'PORT': environ.get('PGPORT', 5432)
|
'PORT': '5432'
|
||||||
},
|
},
|
||||||
'secondary': {
|
'secondary': {
|
||||||
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
||||||
'NAME': 'test2',
|
'NAME': 'test2',
|
||||||
'USER': environ.get('PGUSER', 'test'),
|
'USER': 'test',
|
||||||
'PASSWORD': environ.get('PGPASS', 'test'),
|
'PASSWORD': 'test',
|
||||||
'HOST': environ.get('PGHOST', '127.0.0.1'),
|
'HOST': '127.0.0.1',
|
||||||
'PORT': environ.get('PGPORT', 5432)
|
'PORT': '5432'
|
||||||
},
|
},
|
||||||
|
|
||||||
# I need separate connections for multiprocessing tests
|
# I need separate connections for multiprocessing tests
|
||||||
'test_db': {
|
'test_db': {
|
||||||
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
'ENGINE': 'django.db.backends.postgresql_psycopg2',
|
||||||
'NAME': 'test_test',
|
'NAME': 'test_test',
|
||||||
'USER': environ.get('PGUSER', 'test'),
|
'USER': 'test',
|
||||||
'PASSWORD': environ.get('PGPASS', 'test'),
|
'PASSWORD': 'test',
|
||||||
'HOST': environ.get('PGHOST', '127.0.0.1'),
|
'HOST': '127.0.0.1',
|
||||||
'PORT': environ.get('PGPORT', 5432)
|
'PORT': '5432'
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
|
|
||||||
|
|
||||||
LOGGING = {
|
LOGGING = {
|
||||||
'version': 1,
|
'version': 1,
|
||||||
|
@ -57,33 +53,27 @@ LOGGING = {
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
INSTALLED_APPS = [
|
||||||
"src",
|
"src",
|
||||||
|
"tests"
|
||||||
# This app is included with config in order to test all is working fine here
|
|
||||||
"tests.apps.UnitTestAppConfig"
|
|
||||||
]
|
]
|
||||||
|
|
||||||
CLICKHOUSE_DATABASES = {
|
CLICKHOUSE_DATABASES = {
|
||||||
'default': {
|
'default': {
|
||||||
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
|
|
||||||
'db_name': 'test',
|
'db_name': 'test',
|
||||||
'username': 'default',
|
'username': 'default',
|
||||||
'password': ''
|
'password': ''
|
||||||
},
|
},
|
||||||
'secondary': {
|
'secondary': {
|
||||||
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
|
|
||||||
'db_name': 'test_2',
|
'db_name': 'test_2',
|
||||||
'username': 'default',
|
'username': 'default',
|
||||||
'password': ''
|
'password': ''
|
||||||
},
|
},
|
||||||
'no_migrate': {
|
'no_migrate': {
|
||||||
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
|
|
||||||
'db_name': 'test_3',
|
'db_name': 'test_3',
|
||||||
'username': 'default',
|
'username': 'default',
|
||||||
'password': '',
|
'password': '',
|
||||||
'migrate': False
|
'migrate': False
|
||||||
},
|
},
|
||||||
'readonly': {
|
'readonly': {
|
||||||
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
|
|
||||||
'db_name': 'test_3',
|
'db_name': 'test_3',
|
||||||
'username': 'default',
|
'username': 'default',
|
||||||
'password': '',
|
'password': '',
|
||||||
|
@ -94,8 +84,8 @@ CLICKHOUSE_DATABASES = {
|
||||||
CLICKHOUSE_SYNC_BATCH_SIZE = 5000
|
CLICKHOUSE_SYNC_BATCH_SIZE = 5000
|
||||||
|
|
||||||
CLICKHOUSE_REDIS_CONFIG = {
|
CLICKHOUSE_REDIS_CONFIG = {
|
||||||
'host': environ.get('REDIS_HOST', '127.0.0.1'),
|
'host': '127.0.0.1',
|
||||||
'port': environ.get('REDIS_PORT', 6379),
|
'port': 6379,
|
||||||
'db': 8,
|
'db': 8,
|
||||||
'socket_timeout': 10
|
'socket_timeout': 10
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
from django.utils.timezone import now
|
|
||||||
|
|
||||||
from tests.clickhouse_models import ClickHouseTestModel
|
from tests.clickhouse_models import ClickHouseTestModel
|
||||||
|
|
||||||
|
@ -21,11 +20,11 @@ class ClickHouseModelTest(TestCase):
|
||||||
self.assertTrue(ClickHouseTestModel.need_sync())
|
self.assertTrue(ClickHouseTestModel.need_sync())
|
||||||
|
|
||||||
# Time hasn't passed - no sync
|
# Time hasn't passed - no sync
|
||||||
self.storage.set_last_sync_time(ClickHouseTestModel.get_import_key(), now())
|
self.storage.set_last_sync_time(ClickHouseTestModel.get_import_key(), datetime.datetime.now())
|
||||||
self.assertFalse(ClickHouseTestModel.need_sync())
|
self.assertFalse(ClickHouseTestModel.need_sync())
|
||||||
|
|
||||||
# Time has passed
|
# Time has passed
|
||||||
sync_delay = ClickHouseTestModel.get_sync_delay()
|
sync_delay = ClickHouseTestModel.get_sync_delay()
|
||||||
self.storage.set_last_sync_time(ClickHouseTestModel.get_import_key(),
|
self.storage.set_last_sync_time(ClickHouseTestModel.get_import_key(),
|
||||||
now() - datetime.timedelta(seconds=sync_delay + 1))
|
datetime.datetime.now() - datetime.timedelta(seconds=sync_delay + 1))
|
||||||
self.assertTrue(ClickHouseTestModel.need_sync())
|
self.assertTrue(ClickHouseTestModel.need_sync())
|
||||||
|
|
|
@ -48,3 +48,5 @@ class NamedTupleTest(TestCase):
|
||||||
t2 = TestTuple(1, 2, 3)
|
t2 = TestTuple(1, 2, 3)
|
||||||
self.assertEqual(t1, t2)
|
self.assertEqual(t1, t2)
|
||||||
self.assertEqual((1, 2, 3), t1)
|
self.assertEqual((1, 2, 3), t1)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -88,25 +88,12 @@ class CollapsingMergeTreeTest(TestCase):
|
||||||
self.objects, date_col='created')
|
self.objects, date_col='created')
|
||||||
self._test_final_versions(final_versions)
|
self._test_final_versions(final_versions)
|
||||||
|
|
||||||
def test_get_final_versions_by_final_no_date_col(self):
|
|
||||||
ClickHouseCollapseTestModel.engine.date_col = None
|
|
||||||
final_versions = ClickHouseCollapseTestModel.engine.get_final_versions(ClickHouseCollapseTestModel,
|
|
||||||
self.objects)
|
|
||||||
self._test_final_versions(final_versions)
|
|
||||||
|
|
||||||
def test_get_final_versions_by_version_datetime(self):
|
def test_get_final_versions_by_version_datetime(self):
|
||||||
ClickHouseCollapseTestModel.engine.version_col = 'version'
|
ClickHouseCollapseTestModel.engine.version_col = 'version'
|
||||||
final_versions = ClickHouseCollapseTestModel.engine.get_final_versions(ClickHouseCollapseTestModel,
|
final_versions = ClickHouseCollapseTestModel.engine.get_final_versions(ClickHouseCollapseTestModel,
|
||||||
self.objects, date_col='created')
|
self.objects, date_col='created')
|
||||||
self._test_final_versions(final_versions)
|
self._test_final_versions(final_versions)
|
||||||
|
|
||||||
def test_get_final_versions_by_version_no_date_col(self):
|
|
||||||
ClickHouseCollapseTestModel.engine.version_col = 'version'
|
|
||||||
ClickHouseCollapseTestModel.engine.date_col = None
|
|
||||||
final_versions = ClickHouseCollapseTestModel.engine.get_final_versions(ClickHouseCollapseTestModel,
|
|
||||||
self.objects)
|
|
||||||
self._test_final_versions(final_versions)
|
|
||||||
|
|
||||||
def test_versions(self):
|
def test_versions(self):
|
||||||
ClickHouseCollapseTestModel.engine.version_col = 'version'
|
ClickHouseCollapseTestModel.engine.version_col = 'version'
|
||||||
batch = ClickHouseCollapseTestModel.get_insert_batch(self.objects)
|
batch = ClickHouseCollapseTestModel.get_insert_batch(self.objects)
|
||||||
|
|
|
@ -1,19 +1,14 @@
|
||||||
from typing import List, Dict, Any
|
|
||||||
from unittest import mock
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.test import TestCase, override_settings
|
from django.test import TestCase, override_settings
|
||||||
|
from django_clickhouse.migrations import MigrationHistory
|
||||||
|
|
||||||
from django_clickhouse.configuration import config
|
|
||||||
from django_clickhouse.database import connections
|
from django_clickhouse.database import connections
|
||||||
from django_clickhouse.management.commands.clickhouse_migrate import Command
|
from django_clickhouse.migrations import migrate_app
|
||||||
from django_clickhouse.migrations import MigrationHistory, migrate_app
|
|
||||||
from django_clickhouse.routers import DefaultRouter
|
from django_clickhouse.routers import DefaultRouter
|
||||||
from tests.clickhouse_models import ClickHouseTestModel
|
from tests.clickhouse_models import ClickHouseTestModel
|
||||||
|
|
||||||
|
|
||||||
class NoMigrateRouter(DefaultRouter):
|
class NoMigrateRouter(DefaultRouter):
|
||||||
def allow_migrate(self, db_alias, app_label, operation, **hints):
|
def allow_migrate(self, db_alias, app_label, operation, model=None, **hints):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@ -58,104 +53,3 @@ class MigrateAppTest(TestCase):
|
||||||
def test_readonly_connections(self):
|
def test_readonly_connections(self):
|
||||||
migrate_app('tests', 'readonly')
|
migrate_app('tests', 'readonly')
|
||||||
self.assertFalse(table_exists(connections['readonly'], ClickHouseTestModel))
|
self.assertFalse(table_exists(connections['readonly'], ClickHouseTestModel))
|
||||||
|
|
||||||
|
|
||||||
@override_settings(CLICKHOUSE_MIGRATE_WITH_DEFAULT_DB=False)
|
|
||||||
@mock.patch('django_clickhouse.management.commands.clickhouse_migrate.migrate_app', return_value=True)
|
|
||||||
class MigrateDjangoCommandTest(TestCase):
|
|
||||||
APP_LABELS = ('src', 'tests')
|
|
||||||
|
|
||||||
def setUp(self) -> None:
|
|
||||||
self.cmd = Command()
|
|
||||||
|
|
||||||
def test_handle_all(self, migrate_app_mock):
|
|
||||||
self.cmd.handle(verbosity=3, app_label=None, database=None, migration_number=None)
|
|
||||||
|
|
||||||
self.assertEqual(len(config.DATABASES.keys()) * len(self.APP_LABELS), migrate_app_mock.call_count)
|
|
||||||
for db_alias in config.DATABASES.keys():
|
|
||||||
for app_label in self.APP_LABELS:
|
|
||||||
migrate_app_mock.assert_any_call(app_label, db_alias, verbosity=3)
|
|
||||||
|
|
||||||
def test_handle_app(self, migrate_app_mock):
|
|
||||||
self.cmd.handle(verbosity=3, app_label='tests', database=None, migration_number=None)
|
|
||||||
|
|
||||||
self.assertEqual(len(config.DATABASES.keys()), migrate_app_mock.call_count)
|
|
||||||
for db_alias in config.DATABASES.keys():
|
|
||||||
migrate_app_mock.assert_any_call('tests', db_alias, verbosity=3)
|
|
||||||
|
|
||||||
def test_handle_database(self, migrate_app_mock):
|
|
||||||
self.cmd.handle(verbosity=3, database='default', app_label=None, migration_number=None)
|
|
||||||
|
|
||||||
self.assertEqual(len(settings.INSTALLED_APPS), migrate_app_mock.call_count)
|
|
||||||
for app_label in self.APP_LABELS:
|
|
||||||
migrate_app_mock.assert_any_call(app_label, 'default', verbosity=3)
|
|
||||||
|
|
||||||
def test_handle_app_and_database(self, migrate_app_mock):
|
|
||||||
self.cmd.handle(verbosity=3, app_label='tests', database='default', migration_number=None)
|
|
||||||
|
|
||||||
migrate_app_mock.assert_called_with('tests', 'default', verbosity=3)
|
|
||||||
|
|
||||||
def test_handle_migration_number(self, migrate_app_mock):
|
|
||||||
self.cmd.handle(verbosity=3, database='default', app_label='tests', migration_number=1)
|
|
||||||
|
|
||||||
migrate_app_mock.assert_called_with('tests', 'default', up_to=1, verbosity=3)
|
|
||||||
|
|
||||||
def _test_parser_results(self, argv: List[str], expected: Dict[str, Any]) -> None:
|
|
||||||
"""
|
|
||||||
Tests if parser process input correctly.
|
|
||||||
Checks only expected parameters, ignores others.
|
|
||||||
:param argv: List of string arguments from command line
|
|
||||||
:param expected: Dictionary of expected results
|
|
||||||
:return: None
|
|
||||||
:raises AssertionError: If expected result is incorrect
|
|
||||||
"""
|
|
||||||
parser = self.cmd.create_parser('./manage.py', 'clickhouse_migrate')
|
|
||||||
|
|
||||||
options = parser.parse_args(argv)
|
|
||||||
|
|
||||||
# Copied from django.core.management.base.BaseCommand.run_from_argv('...')
|
|
||||||
cmd_options = vars(options)
|
|
||||||
cmd_options.pop('args', ())
|
|
||||||
|
|
||||||
self.assertDictEqual(expected, {opt: cmd_options[opt] for opt in expected.keys()})
|
|
||||||
|
|
||||||
def test_parser(self, _):
|
|
||||||
with self.subTest('Simple'):
|
|
||||||
self._test_parser_results([], {
|
|
||||||
'app_label': None,
|
|
||||||
'database': None,
|
|
||||||
'migration_number': None,
|
|
||||||
'verbosity': 1
|
|
||||||
})
|
|
||||||
|
|
||||||
with self.subTest('App label'):
|
|
||||||
self._test_parser_results(['tests'], {
|
|
||||||
'app_label': 'tests',
|
|
||||||
'database': None,
|
|
||||||
'migration_number': None,
|
|
||||||
'verbosity': 1
|
|
||||||
})
|
|
||||||
|
|
||||||
with self.subTest('App label and migration number'):
|
|
||||||
self._test_parser_results(['tests', '123'], {
|
|
||||||
'app_label': 'tests',
|
|
||||||
'database': None,
|
|
||||||
'migration_number': 123,
|
|
||||||
'verbosity': 1
|
|
||||||
})
|
|
||||||
|
|
||||||
with self.subTest('Database'):
|
|
||||||
self._test_parser_results(['--database', 'default'], {
|
|
||||||
'app_label': None,
|
|
||||||
'database': 'default',
|
|
||||||
'migration_number': None,
|
|
||||||
'verbosity': 1
|
|
||||||
})
|
|
||||||
|
|
||||||
with self.subTest('Verbosity'):
|
|
||||||
self._test_parser_results(['--verbosity', '2'], {
|
|
||||||
'app_label': None,
|
|
||||||
'database': None,
|
|
||||||
'migration_number': None,
|
|
||||||
'verbosity': 2
|
|
||||||
})
|
|
||||||
|
|
|
@ -25,8 +25,6 @@ class TestOperations(TransactionTestCase):
|
||||||
fixtures = ['test_model']
|
fixtures = ['test_model']
|
||||||
django_model = TestModel
|
django_model = TestModel
|
||||||
clickhouse_model = ClickHouseTestModel
|
clickhouse_model = ClickHouseTestModel
|
||||||
|
|
||||||
databases = ['default', 'secondary']
|
|
||||||
db_alias = 'default'
|
db_alias = 'default'
|
||||||
multi_db = True
|
multi_db = True
|
||||||
|
|
||||||
|
@ -40,7 +38,7 @@ class TestOperations(TransactionTestCase):
|
||||||
|
|
||||||
def test_save(self):
|
def test_save(self):
|
||||||
# INSERT operation
|
# INSERT operation
|
||||||
instance = self.django_model(created_date=datetime.date.today(), created=now(), value=2)
|
instance = self.django_model(created_date=datetime.date.today(), created=datetime.datetime.now(), value=2)
|
||||||
instance.save()
|
instance.save()
|
||||||
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk))],
|
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk))],
|
||||||
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
||||||
|
@ -52,13 +50,13 @@ class TestOperations(TransactionTestCase):
|
||||||
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
||||||
|
|
||||||
def test_create(self):
|
def test_create(self):
|
||||||
instance = self.django_model.objects.create(pk=100555, created_date=datetime.date.today(), created=now(),
|
instance = self.django_model.objects.create(pk=100555, created_date=datetime.date.today(),
|
||||||
value=2)
|
created=datetime.datetime.now(), value=2)
|
||||||
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk))],
|
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk))],
|
||||||
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
||||||
|
|
||||||
def test_bulk_create(self):
|
def test_bulk_create(self):
|
||||||
items = [self.django_model(created_date=datetime.date.today(), created=now(), value=i)
|
items = [self.django_model(created_date=datetime.date.today(), created=datetime.datetime.now(), value=i)
|
||||||
for i in range(5)]
|
for i in range(5)]
|
||||||
items = self.django_model.objects.bulk_create(items)
|
items = self.django_model.objects.bulk_create(items)
|
||||||
self.assertEqual(5, len(items))
|
self.assertEqual(5, len(items))
|
||||||
|
@ -99,22 +97,6 @@ class TestOperations(TransactionTestCase):
|
||||||
self.assertSetEqual({('insert', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
|
self.assertSetEqual({('insert', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
|
||||||
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
|
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
|
||||||
|
|
||||||
def test_pg_bulk_create_returning(self):
|
|
||||||
now_dt = now()
|
|
||||||
res = self.django_model.objects.pg_bulk_create([
|
|
||||||
{'value': i, 'created': now_dt, 'created_date': now_dt.date()}
|
|
||||||
for i in range(5)
|
|
||||||
], returning='*')
|
|
||||||
|
|
||||||
self.assertEqual(5, len(res))
|
|
||||||
for i, instance in enumerate(res):
|
|
||||||
self.assertEqual(instance.created, now_dt)
|
|
||||||
self.assertEqual(instance.created_date, now_dt.date())
|
|
||||||
self.assertEqual(i, instance.value)
|
|
||||||
|
|
||||||
self.assertSetEqual({('insert', "%s.%d" % (self.db_alias, instance.pk)) for instance in res},
|
|
||||||
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
|
|
||||||
|
|
||||||
def test_pg_bulk_update(self):
|
def test_pg_bulk_update(self):
|
||||||
items = list(self.django_model.objects.filter(pk__in={1, 2}))
|
items = list(self.django_model.objects.filter(pk__in={1, 2}))
|
||||||
|
|
||||||
|
@ -131,21 +113,6 @@ class TestOperations(TransactionTestCase):
|
||||||
self.assertSetEqual({('update', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
|
self.assertSetEqual({('update', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
|
||||||
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
|
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
|
||||||
|
|
||||||
def test_pg_bulk_update_returning(self):
|
|
||||||
items = list(self.django_model.objects.filter(pk__in={1, 2}))
|
|
||||||
|
|
||||||
res = self.django_model.objects.pg_bulk_update([
|
|
||||||
{'id': instance.pk, 'value': instance.pk * 10}
|
|
||||||
for instance in items
|
|
||||||
], returning='*')
|
|
||||||
|
|
||||||
self.assertEqual(2, len(res))
|
|
||||||
for instance in res:
|
|
||||||
self.assertEqual(instance.value, instance.pk * 10)
|
|
||||||
|
|
||||||
self.assertSetEqual({('update', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
|
|
||||||
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
|
|
||||||
|
|
||||||
def test_pg_bulk_update_or_create(self):
|
def test_pg_bulk_update_or_create(self):
|
||||||
items = list(self.django_model.objects.filter(pk__in={1, 2}))
|
items = list(self.django_model.objects.filter(pk__in={1, 2}))
|
||||||
|
|
||||||
|
@ -166,28 +133,9 @@ class TestOperations(TransactionTestCase):
|
||||||
self.assertSetEqual({('update', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
|
self.assertSetEqual({('update', "%s.%d" % (self.db_alias, instance.pk)) for instance in items},
|
||||||
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
|
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
|
||||||
|
|
||||||
def test_pg_bulk_update_or_create_returning(self):
|
|
||||||
items = list(self.django_model.objects.filter(pk__in={1, 2}))
|
|
||||||
|
|
||||||
data = [{
|
|
||||||
'id': instance.pk,
|
|
||||||
'value': instance.pk * 10,
|
|
||||||
'created_date': instance.created_date,
|
|
||||||
'created': instance.created
|
|
||||||
} for instance in items] + [{'id': 11, 'value': 110, 'created_date': datetime.date.today(), 'created': now()}]
|
|
||||||
|
|
||||||
res = self.django_model.objects.pg_bulk_update_or_create(data, returning='*')
|
|
||||||
|
|
||||||
self.assertEqual(3, len(res))
|
|
||||||
for instance in res:
|
|
||||||
self.assertEqual(instance.value, instance.pk * 10)
|
|
||||||
|
|
||||||
self.assertSetEqual({('update', "%s.%d" % (self.db_alias, instance.pk)) for instance in res},
|
|
||||||
set(self.storage.get_operations(self.clickhouse_model.get_import_key(), 10)))
|
|
||||||
|
|
||||||
def test_get_or_create(self):
|
def test_get_or_create(self):
|
||||||
instance, created = self.django_model.objects. \
|
instance, created = self.django_model.objects. \
|
||||||
get_or_create(pk=100, defaults={'created_date': datetime.date.today(), 'created': now(),
|
get_or_create(pk=100, defaults={'created_date': datetime.date.today(), 'created': datetime.datetime.now(),
|
||||||
'value': 2})
|
'value': 2})
|
||||||
|
|
||||||
self.assertTrue(created)
|
self.assertTrue(created)
|
||||||
|
@ -203,7 +151,8 @@ class TestOperations(TransactionTestCase):
|
||||||
|
|
||||||
def test_update_or_create(self):
|
def test_update_or_create(self):
|
||||||
instance, created = self.django_model.objects. \
|
instance, created = self.django_model.objects. \
|
||||||
update_or_create(pk=100, defaults={'created_date': datetime.date.today(), 'created': now(), 'value': 2})
|
update_or_create(pk=100, defaults={'created_date': datetime.date.today(),
|
||||||
|
'created': datetime.datetime.now(), 'value': 2})
|
||||||
self.assertTrue(created)
|
self.assertTrue(created)
|
||||||
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk))],
|
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk))],
|
||||||
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
||||||
|
@ -228,7 +177,7 @@ class TestOperations(TransactionTestCase):
|
||||||
|
|
||||||
def test_bulk_create_returning(self):
|
def test_bulk_create_returning(self):
|
||||||
items = [
|
items = [
|
||||||
self.django_model(created_date=datetime.date.today(), created=now(), value=i)
|
self.django_model(created_date=datetime.date.today(), created=datetime.datetime.now(), value=i)
|
||||||
for i in range(5)
|
for i in range(5)
|
||||||
]
|
]
|
||||||
items = self.django_model.objects.bulk_create_returning(items)
|
items = self.django_model.objects.bulk_create_returning(items)
|
||||||
|
@ -259,7 +208,7 @@ class TestOperations(TransactionTestCase):
|
||||||
|
|
||||||
def test_save_returning(self):
|
def test_save_returning(self):
|
||||||
# INSERT operation
|
# INSERT operation
|
||||||
instance = self.django_model(created_date=datetime.date.today(), created=now(), value=2)
|
instance = self.django_model(created_date=datetime.date.today(), created=datetime.datetime.now(), value=2)
|
||||||
instance.save_returning()
|
instance.save_returning()
|
||||||
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk))],
|
self.assertListEqual([('insert', "%s.%d" % (self.db_alias, instance.pk))],
|
||||||
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
self.storage.get_operations(self.clickhouse_model.get_import_key(), 10))
|
||||||
|
@ -283,7 +232,6 @@ class TestOperations(TransactionTestCase):
|
||||||
|
|
||||||
|
|
||||||
class TestSecondaryOperations(TestOperations):
|
class TestSecondaryOperations(TestOperations):
|
||||||
# from django.db.models.fields import *
|
|
||||||
fixtures = ['test_secondary_model']
|
fixtures = ['test_secondary_model']
|
||||||
django_model = SecondaryTestModel
|
django_model = SecondaryTestModel
|
||||||
clickhouse_model = ClickHouseSecondTestModel
|
clickhouse_model = ClickHouseSecondTestModel
|
||||||
|
|
|
@ -1,59 +0,0 @@
|
||||||
from django.test import SimpleTestCase
|
|
||||||
|
|
||||||
from django_clickhouse.migrations import RunSQL, CreateTable
|
|
||||||
from django_clickhouse.routers import DefaultRouter
|
|
||||||
from tests.clickhouse_models import ClickHouseTestModel
|
|
||||||
|
|
||||||
|
|
||||||
class DefaultRouterAllowMigrateTest(SimpleTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.router = DefaultRouter()
|
|
||||||
self.operation = RunSQL('SELECT 1')
|
|
||||||
|
|
||||||
def test_hints_model_class(self):
|
|
||||||
hints = {'model': ClickHouseTestModel}
|
|
||||||
|
|
||||||
with self.subTest('Allow migrate'):
|
|
||||||
res = self.router.allow_migrate('default', 'tests', self.operation, **hints)
|
|
||||||
self.assertTrue(res)
|
|
||||||
|
|
||||||
with self.subTest('Reject migrate'):
|
|
||||||
res = self.router.allow_migrate('other', 'tests', self.operation, **hints)
|
|
||||||
self.assertFalse(res)
|
|
||||||
|
|
||||||
def test_hints_model_name(self):
|
|
||||||
hints = {'model': 'ClickHouseTestModel'}
|
|
||||||
|
|
||||||
with self.subTest('Allow migrate'):
|
|
||||||
res = self.router.allow_migrate('default', 'tests', self.operation, **hints)
|
|
||||||
self.assertTrue(res)
|
|
||||||
|
|
||||||
with self.subTest('Reject migrate'):
|
|
||||||
res = self.router.allow_migrate('other', 'tests', self.operation, **hints)
|
|
||||||
self.assertFalse(res)
|
|
||||||
|
|
||||||
def test_hints_force_migrate_on_databases(self):
|
|
||||||
hints = {'force_migrate_on_databases': ['secondary']}
|
|
||||||
|
|
||||||
with self.subTest('Allow migrate'):
|
|
||||||
res = self.router.allow_migrate('secondary', 'apps', self.operation, **hints)
|
|
||||||
self.assertTrue(res)
|
|
||||||
|
|
||||||
with self.subTest('Reject migrate'):
|
|
||||||
res = self.router.allow_migrate('default', 'apps', self.operation, **hints)
|
|
||||||
self.assertFalse(res)
|
|
||||||
|
|
||||||
def test_model_operation(self):
|
|
||||||
with self.subTest('Allow migrate'):
|
|
||||||
operation = CreateTable(ClickHouseTestModel)
|
|
||||||
res = self.router.allow_migrate('default', 'apps', operation)
|
|
||||||
self.assertTrue(res)
|
|
||||||
|
|
||||||
with self.subTest('Reject migrate'):
|
|
||||||
operation = CreateTable(ClickHouseTestModel)
|
|
||||||
res = self.router.allow_migrate('other', 'apps', operation)
|
|
||||||
self.assertFalse(res)
|
|
||||||
|
|
||||||
def test_no_model(self):
|
|
||||||
with self.assertRaises(ValueError):
|
|
||||||
self.router.allow_migrate('default', 'apps', self.operation)
|
|
|
@ -12,7 +12,7 @@ class StorageTest(TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.storage.flush()
|
self.storage.flush()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.storage.flush()
|
self.storage.flush()
|
||||||
|
|
||||||
|
@ -75,10 +75,11 @@ class StorageTest(TestCase):
|
||||||
def test_locks(self):
|
def test_locks(self):
|
||||||
# Test that multiple can acquire locks in parallel
|
# Test that multiple can acquire locks in parallel
|
||||||
# And single model can't
|
# And single model can't
|
||||||
lock = self.storage.get_lock(ClickHouseTestModel.get_import_key())
|
l = self.storage.get_lock(ClickHouseTestModel.get_import_key())
|
||||||
lock.acquire()
|
l.acquire()
|
||||||
with self.assertRaises(RedisLockTimeoutError):
|
with self.assertRaises(RedisLockTimeoutError):
|
||||||
lock.acquire()
|
l.acquire()
|
||||||
|
|
||||||
|
l2 = self.storage.get_lock(ClickHouseCollapseTestModel.get_import_key())
|
||||||
|
l2.acquire()
|
||||||
|
|
||||||
lock_2 = self.storage.get_lock(ClickHouseCollapseTestModel.get_import_key())
|
|
||||||
lock_2.acquire()
|
|
||||||
|
|
|
@ -2,18 +2,15 @@ import datetime
|
||||||
import logging
|
import logging
|
||||||
from subprocess import Popen
|
from subprocess import Popen
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from unittest import expectedFailure, skip, mock
|
from unittest import expectedFailure, skip
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from django.test import TransactionTestCase
|
from django.test import TransactionTestCase
|
||||||
from django.test.testcases import TestCase
|
|
||||||
from django.utils.timezone import now
|
from django.utils.timezone import now
|
||||||
from random import randint
|
from random import randint
|
||||||
|
|
||||||
from django_clickhouse.database import connections
|
from django_clickhouse.database import connections
|
||||||
from django_clickhouse.migrations import migrate_app
|
from django_clickhouse.migrations import migrate_app
|
||||||
from django_clickhouse.storages import RedisStorage
|
|
||||||
from django_clickhouse.tasks import sync_clickhouse_model, clickhouse_auto_sync
|
|
||||||
from django_clickhouse.utils import int_ranges
|
from django_clickhouse.utils import int_ranges
|
||||||
from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel
|
from tests.clickhouse_models import ClickHouseTestModel, ClickHouseCollapseTestModel, ClickHouseMultiTestModel
|
||||||
from tests.models import TestModel
|
from tests.models import TestModel
|
||||||
|
@ -30,7 +27,7 @@ class SyncTest(TransactionTestCase):
|
||||||
ClickHouseTestModel.get_storage().flush()
|
ClickHouseTestModel.get_storage().flush()
|
||||||
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
obj = TestModel.objects.create(value=1, created=now(), created_date=datetime.date.today())
|
obj = TestModel.objects.create(value=1, created=datetime.datetime.now(), created_date=datetime.date.today())
|
||||||
ClickHouseTestModel.sync_batch_from_storage()
|
ClickHouseTestModel.sync_batch_from_storage()
|
||||||
|
|
||||||
synced_data = list(ClickHouseTestModel.objects.all())
|
synced_data = list(ClickHouseTestModel.objects.all())
|
||||||
|
@ -40,7 +37,7 @@ class SyncTest(TransactionTestCase):
|
||||||
self.assertEqual(obj.id, synced_data[0].id)
|
self.assertEqual(obj.id, synced_data[0].id)
|
||||||
|
|
||||||
def test_collapsing_update_by_final(self):
|
def test_collapsing_update_by_final(self):
|
||||||
obj = TestModel.objects.create(value=1, created=now(), created_date=datetime.date.today())
|
obj = TestModel.objects.create(value=1, created=datetime.datetime.now(), created_date=datetime.date.today())
|
||||||
obj.value = 2
|
obj.value = 2
|
||||||
obj.save()
|
obj.save()
|
||||||
ClickHouseCollapseTestModel.sync_batch_from_storage()
|
ClickHouseCollapseTestModel.sync_batch_from_storage()
|
||||||
|
@ -63,7 +60,7 @@ class SyncTest(TransactionTestCase):
|
||||||
def test_collapsing_update_by_version(self):
|
def test_collapsing_update_by_version(self):
|
||||||
ClickHouseCollapseTestModel.engine.version_col = 'version'
|
ClickHouseCollapseTestModel.engine.version_col = 'version'
|
||||||
|
|
||||||
obj = TestModel.objects.create(value=1, created=now(), created_date=datetime.date.today())
|
obj = TestModel.objects.create(value=1, created=datetime.datetime.now(), created_date=datetime.date.today())
|
||||||
obj.value = 2
|
obj.value = 2
|
||||||
obj.save()
|
obj.save()
|
||||||
ClickHouseCollapseTestModel.sync_batch_from_storage()
|
ClickHouseCollapseTestModel.sync_batch_from_storage()
|
||||||
|
@ -97,7 +94,7 @@ class SyncTest(TransactionTestCase):
|
||||||
self.assertEqual(0, len(synced_data))
|
self.assertEqual(0, len(synced_data))
|
||||||
|
|
||||||
def test_multi_model(self):
|
def test_multi_model(self):
|
||||||
obj = TestModel.objects.create(value=1, created=now(), created_date=datetime.date.today())
|
obj = TestModel.objects.create(value=1, created=datetime.datetime.now(), created_date=datetime.date.today())
|
||||||
obj.value = 2
|
obj.value = 2
|
||||||
obj.save()
|
obj.save()
|
||||||
ClickHouseMultiTestModel.sync_batch_from_storage()
|
ClickHouseMultiTestModel.sync_batch_from_storage()
|
||||||
|
@ -210,48 +207,6 @@ class KillTest(TransactionTestCase):
|
||||||
self._check_data()
|
self._check_data()
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(ClickHouseTestModel, 'sync_batch_from_storage')
|
|
||||||
class SyncClickHouseModelTest(TestCase):
|
|
||||||
def test_model_as_class(self, sync_mock):
|
|
||||||
sync_clickhouse_model(ClickHouseTestModel)
|
|
||||||
sync_mock.assert_called()
|
|
||||||
|
|
||||||
def test_model_as_string(self, sync_mock):
|
|
||||||
sync_clickhouse_model('tests.clickhouse_models.ClickHouseTestModel')
|
|
||||||
sync_mock.assert_called()
|
|
||||||
|
|
||||||
@mock.patch.object(RedisStorage, 'set_last_sync_time')
|
|
||||||
def test_last_sync_time_called(self, storage_mock, _):
|
|
||||||
sync_clickhouse_model(ClickHouseTestModel)
|
|
||||||
storage_mock.assert_called()
|
|
||||||
self.assertEqual(2, len(storage_mock.call_args))
|
|
||||||
self.assertEqual(storage_mock.call_args[0][0], 'ClickHouseTestModel')
|
|
||||||
self.assertIsInstance(storage_mock.call_args[0][1], datetime.datetime)
|
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(sync_clickhouse_model, 'delay')
|
|
||||||
class ClickHouseAutoSyncTest(TestCase):
|
|
||||||
@mock.patch('django_clickhouse.tasks.get_subclasses', return_value=[ClickHouseTestModel])
|
|
||||||
@mock.patch.object(ClickHouseTestModel, 'need_sync', return_value=True)
|
|
||||||
def test_needs_sync_enabled(self, need_sync_mock, get_subclasses_mock, sync_delay_mock):
|
|
||||||
clickhouse_auto_sync()
|
|
||||||
sync_delay_mock.assert_called_with('tests.clickhouse_models.ClickHouseTestModel')
|
|
||||||
|
|
||||||
@mock.patch('django_clickhouse.tasks.get_subclasses', return_value=[ClickHouseTestModel])
|
|
||||||
@mock.patch.object(ClickHouseTestModel, 'need_sync', return_value=False)
|
|
||||||
def test_does_not_need_sync(self, need_sync_mock, get_subclasses_mock, sync_delay_mock):
|
|
||||||
clickhouse_auto_sync()
|
|
||||||
sync_delay_mock.assert_not_called()
|
|
||||||
|
|
||||||
@mock.patch('django_clickhouse.tasks.get_subclasses',
|
|
||||||
return_value=[ClickHouseTestModel, ClickHouseCollapseTestModel])
|
|
||||||
@mock.patch.object(ClickHouseTestModel, 'need_sync', return_value=True)
|
|
||||||
@mock.patch.object(ClickHouseCollapseTestModel, 'need_sync', return_value=True)
|
|
||||||
def test_multiple_models(self, need_sync_1_mock, need_sync_2_mock, get_subclasses_mock, sync_delay_mock):
|
|
||||||
clickhouse_auto_sync()
|
|
||||||
self.assertEqual(2, sync_delay_mock.call_count)
|
|
||||||
|
|
||||||
|
|
||||||
# Used to profile sync execution time. Disabled by default
|
# Used to profile sync execution time. Disabled by default
|
||||||
@skip
|
@skip
|
||||||
class ProfileTest(TransactionTestCase):
|
class ProfileTest(TransactionTestCase):
|
||||||
|
@ -268,7 +223,7 @@ class ProfileTest(TransactionTestCase):
|
||||||
ClickHouseTestModel.sync_enabled = False
|
ClickHouseTestModel.sync_enabled = False
|
||||||
|
|
||||||
TestModel.objects.bulk_create([
|
TestModel.objects.bulk_create([
|
||||||
TestModel(created=now(), created_date='2018-01-01', value=i)
|
TestModel(created=datetime.datetime.now(), created_date='2018-01-01', value=i)
|
||||||
for i in range(self.BATCH_SIZE)
|
for i in range(self.BATCH_SIZE)
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ from queue import Queue
|
||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from django.test import TestCase
|
from django.test import TestCase
|
||||||
|
from six import with_metaclass
|
||||||
|
|
||||||
from django_clickhouse.models import ClickHouseSyncModel
|
from django_clickhouse.models import ClickHouseSyncModel
|
||||||
from django_clickhouse.utils import get_tz_offset, format_datetime, lazy_class_import, int_ranges, exec_in_parallel, \
|
from django_clickhouse.utils import get_tz_offset, format_datetime, lazy_class_import, int_ranges, exec_in_parallel, \
|
||||||
|
@ -109,7 +110,7 @@ class TestExecInParallel(TestCase):
|
||||||
|
|
||||||
class TestSingletonMeta(TestCase):
|
class TestSingletonMeta(TestCase):
|
||||||
def test_singleton(self):
|
def test_singleton(self):
|
||||||
class Single(metaclass=SingletonMeta):
|
class Single(with_metaclass(SingletonMeta)):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.test = 1
|
self.test = 1
|
||||||
|
|
||||||
|
@ -118,3 +119,4 @@ class TestSingletonMeta(TestCase):
|
||||||
b = Single()
|
b = Single()
|
||||||
self.assertEqual(a, b)
|
self.assertEqual(a, b)
|
||||||
self.assertEqual(2, b.test)
|
self.assertEqual(2, b.test)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user