Merge remote-tracking branch 'origin/master'

# Conflicts:
#	requirements-test.txt
This commit is contained in:
M1ha 2021-07-08 12:29:25 +05:00
commit 8d8a051636
24 changed files with 282 additions and 154 deletions

15
.dockerignore Normal file
View File

@ -0,0 +1,15 @@
# Docs
docs/
# Python cache files
**/__pycache__/
# Private and public keys
*.key
*.ppk
*.pub
# Hidden apps directories
.github/
.idea/
.gitignore

90
.github/workflows/python-tests.yml vendored Normal file
View File

@ -0,0 +1,90 @@
name: Python unit tests
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.6", "3.7", "3.8", "3.9"]
postgres-version: ["9.6", "10", "11", "12"]
django-version: ["2.1", "2.2", "3.0", "3.1", "3.2"]
clickhouse-version: ["latest"]
redis-version: ["latest"]
services:
postgres:
image: postgres:${{ matrix.postgres-version }}
env:
POSTGRES_PASSWORD: postgres
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
clickhouse:
image: yandex/clickhouse-server:${{ matrix.clickhouse-version }}
ports:
- 8123:8123
redis:
image: redis:${{ matrix.redis-version }}
ports:
- 6379:6379
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Cache pip
uses: actions/cache@v2
with:
# This path is specific to Ubuntu
path: ~/.cache/pip
# Look to see if there is a cache hit for the corresponding requirements file
key: ${{ runner.os }}-pip-${{ hashFiles('requirements-test.txt') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Install pip dependencies
run: |
python -m pip install --upgrade pip wheel setuptools
python -m pip install -r requirements-test.txt
python -m pip install -U django==${{ matrix.django-version }}.*
python setup.py -q install
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --extend-exclude=build/
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Set up test databases
run: |
psql -tc 'SHOW server_version' -U postgres -h localhost
psql -c 'CREATE ROLE test;' -U postgres -h localhost
psql -c 'ALTER ROLE test WITH SUPERUSER;' -U postgres -h localhost
psql -c 'ALTER ROLE test WITH LOGIN;' -U postgres -h localhost
psql -c "ALTER ROLE test PASSWORD 'test';" -U postgres -h localhost
psql -c 'CREATE DATABASE test OWNER test;' -U postgres -h localhost
psql -c 'CREATE DATABASE test2 OWNER test;' -U postgres -h localhost
env:
PGPASSWORD: postgres
- name: Test with unittest
run: |
python runtests.py

View File

@ -1,101 +0,0 @@
dist: xenial
sudo: required
language: python
cache:
pip: true
apt: true
services:
- postgresql
- redis-server
addons:
postgresql: "11"
apt:
sources:
- sourceline: "deb http://repo.yandex.ru/clickhouse/deb/stable/ main/"
- sourceline: "deb https://packages.erlang-solutions.com/ubuntu xenial contrib"
key_url: "https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc"
- sourceline: "deb https://dl.bintray.com/rabbitmq/debian xenial main"
key_url: "https://github.com/rabbitmq/signing-keys/releases/download/2.0/rabbitmq-release-signing-key.asc"
packages:
- dirmngr
- apt-transport-https
- postgresql-contrib-9.6
- postgresql-10
- postgresql-contrib-10
- postgresql-client-10
- postgresql-11
- postgresql-contrib-11
- postgresql-client-11
- postgresql-12
- postgresql-contrib-12
- postgresql-client-12
- unzip
- rabbitmq-server
python:
- 3.6
- 3.7
- 3.8
env:
- PG=9.6 DJANGO=2.1
- PG=10 DJANGO=2.1
- PG=11 DJANGO=2.1
- PG=12 DJANGO=2.1
- PG=9.6 DJANGO=2.2
- PG=10 DJANGO=2.2
- PG=11 DJANGO=2.2
- PG=12 DJANGO=2.2
- PG=9.6 DJANGO=3.0
- PG=10 DJANGO=3.0
- PG=11 DJANGO=3.0
- PG=12 DJANGO=3.0
- PG=9.6 DJANGO=3.1
- PG=10 DJANGO=3.1
- PG=11 DJANGO=3.1
- PG=12 DJANGO=3.1
before_install:
# Use default PostgreSQL 11 port
- sudo sed -i 's/port = 5433/port = 5432/' /etc/postgresql/11/main/postgresql.conf
- sudo cp /etc/postgresql/{10,11}/main/pg_hba.conf
- sudo sed -i 's/port = 5434/port = 5432/' /etc/postgresql/12/main/postgresql.conf
- sudo cp /etc/postgresql/{10,12}/main/pg_hba.conf
# Start PostgreSQL version we need
- sudo systemctl stop postgresql
- sudo systemctl start postgresql@$PG-main
# ClickHouse sources
- sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv E0C56BD4
- sudo apt-get update
install:
# Install ClickHouse
- sudo apt-get install clickhouse-client clickhouse-server clickhouse-common-static
- sudo service clickhouse-server restart
- pip install -r requirements-test.txt
- pip install -q Django==$DJANGO.*
- python setup.py -q install
before_script:
# Output software versions
- erl -eval 'erlang:display(erlang:system_info(otp_release)), halt().' -noshell
- rabbitmqctl status | grep "RabbitMQ"
- clickhouse-client --query "SELECT version();"
- psql -tc 'SHOW server_version' -U postgres
- psql -tc 'SHOW server_version' -U postgres
- psql -c 'CREATE ROLE test;' -U postgres
- psql -c 'ALTER ROLE test WITH SUPERUSER;' -U postgres
- psql -c 'ALTER ROLE test WITH LOGIN;' -U postgres
- psql -c "ALTER ROLE test PASSWORD 'test';" -U postgres
- psql -c 'CREATE DATABASE test OWNER test;' -U postgres
- psql -c 'CREATE DATABASE test2 OWNER test;' -U postgres
script:
python runtests.py

47
Dockerfile Normal file
View File

@ -0,0 +1,47 @@
ARG PYTHON_IMAGE_TAG=latest
FROM python:${PYTHON_IMAGE_TAG} AS image_stage
ARG APP_TAG="1.0.3"
LABEL \
org.label-schema.build-date=Now \
org.label-schema.maintainer="m1ha@carrotquest.io" \
org.label-schema.schema-version="1.0.0-rc1" \
org.label-schema.vcs-ref="v${APP_TAG}" \
org.label-schema.vcs-url="https://github.com/carrotquest/django-clickhouse" \
org.label-schema.vendor="Carrot quest" \
org.label-schema.version="${APP_TAG}"
ENV APP_UID ${APP_UID:-1000}
ENV APP_GID ${APP_GID:-1000}
ENV APP_NAME ${APP_NAME:-"app"}
# Configure utf-8 locales to make sure Python correctly handles unicode filenames
# Configure pip local path to copy data from pip_stage
ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 DJANGO_SETTINGS_MODULE=tests.settings PYTHONUSERBASE=/pip PATH=/pip/bin:$PATH
RUN set -eu && \
groupadd --gid "${APP_GID}" "app" && \
useradd --uid ${APP_UID} --gid ${APP_GID} --create-home --shell /bin/bash -d /app app && \
mkdir -p /pip && \
chmod 755 /app /pip && \
chown -R ${APP_UID}:${APP_GID} /app /pip
WORKDIR /app/src
# Install dependencies
# set -eu "breaks" pipeline on first error
COPY ./requirements-test.txt /app/requirements-test.txt
RUN --mount=type=cache,target=/root/.cache/pip \
set -eu && \
python3 -m pip install --upgrade pip setuptools wheel && \
python3 -m pip install --upgrade --requirement /app/requirements-test.txt
COPY . /app/src
RUN python3 setup.py -q install --user
USER ${APP_UID}
CMD ["python3", "runtests.py"]

39
docker-compose.yml Normal file
View File

@ -0,0 +1,39 @@
---
version: "3.9"
services:
redis_db:
image: redis
command: [sh, -c, "redis-server --save '' --appendonly no"] # disable persistence
mem_limit: 512m
cpus: 1
postgres_db:
image: postgres
environment:
- POSTGRES_PASSWORD=postgres
mem_limit: 1g
cpus: 1
clickhouse_db:
image: yandex/clickhouse-server
mem_limit: 1g
cpus: 1
run_tests:
image: django-clickhouse
build:
context: .
args:
- PYTHON_VER=latest
environment:
- REDIS_HOST=redis_db
- PGHOST=postgres_db
- PGUSER=postgres
- PGPASS=postgres
- "CLICK_HOUSE_HOST=http://clickhouse_db:8123"
depends_on:
- redis_db
- postgres_db
- clickhouse_db
mem_limit: 1g
cpus: 1

30
docs/development.md Normal file
View File

@ -0,0 +1,30 @@
# Development
## Basic info
This is an Open source project developed by `Carrot quest` team under MIT license.
Feel free to create issues and make pull requests.
Query and database system wraps [infi.clickhouse_orm](https://github.com/Infinidat/infi.clickhouse_orm) library.
If you want to develop QuerySet system, it is better to contribute there.
## General info about testing
Library test system is based on [django.test](https://docs.djangoproject.com/en/3.2/topics/testing/overview/).
You can find them in `tests` directory.
## Tests requirements
* [Redis](https://redis.io/)
* [Yandex ClickHouse](https://clickhouse.yandex/)
* [PostgreSQL](https://www.postgresql.org/)
* Pypi libraries listed in `requirements-test.txt` file
## Running tests
### Running in docker
1. Install [docker and docker-compose](https://www.docker.com/)
2. Run `docker-compose run run_tests` in project directory
### Running in virtual environment
1. Install all requirements listed above
2. [Create virtual environment](https://docs.python.org/3/tutorial/venv.html)
3. Install requirements
`pip3 install -U -r requirements-test.txt`
4. Start tests
`python3 runtests.py`

View File

@ -20,3 +20,4 @@
* [RedisStorage](storages.md#redisstorage)
* [Monitoring](monitoring.md)
* [Performance notes](performance.md)
* [Development](development.md)

View File

@ -9,3 +9,6 @@ psycopg2-binary
django-pg-returning
django-pg-bulk-update
redis
# Linter
flake8

View File

@ -18,6 +18,6 @@ if __name__ == "__main__":
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner()
test_runner = TestRunner(interactive=False)
failures = test_runner.run_tests(["tests"])
sys.exit(bool(failures))

View File

@ -93,7 +93,7 @@ class ClickHouseModel(InfiModel, metaclass=ClickHouseModelMeta):
return namedtuple("%sTuple" % cls.__name__, field_names, defaults=default_values)
@classmethod
def objects_in(cls, database: Database)-> QuerySet:
def objects_in(cls, database: Database) -> QuerySet:
return QuerySet(cls, database)
@classmethod
@ -120,7 +120,7 @@ class ClickHouseModel(InfiModel, metaclass=ClickHouseModelMeta):
return connections[db_alias]
@classmethod
def get_django_model_serializer(cls, writable: bool= False, defaults: Optional[dict] = None
def get_django_model_serializer(cls, writable: bool = False, defaults: Optional[dict] = None
) -> Django2ClickHouseModelSerializer:
serializer_cls = lazy_class_import(cls.django_model_serializer)
return serializer_cls(cls, writable=writable, defaults=defaults)

View File

@ -28,7 +28,7 @@ def django_pg_returning_available(using: str) -> bool:
:return: Boolean
"""
try:
import django_pg_returning
import django_pg_returning # noqa: F401
return connections[using].vendor == 'postgresql'
except ImportError:
return False

View File

@ -1,5 +1,5 @@
import logging
from typing import Generator, Optional, Type, Iterable
from typing import Optional, Type, Iterable
from infi.clickhouse_orm.database import Database as InfiDatabase, DatabaseException
from infi.clickhouse_orm.utils import parse_tsv
@ -34,8 +34,8 @@ class Database(InfiDatabase):
def _get_applied_migrations(self, migrations_package_name):
raise NotImplementedError("This method is not supported by django_clickhouse.")
def select_tuples(self, query: str, model_class: Type['ClickHouseModel'], settings: Optional[dict] = None
) -> Iterable[tuple]:
def select_tuples(self, query: str, model_class: Type['ClickHouseModel'], # noqa: F821
settings: Optional[dict] = None) -> Iterable[tuple]:
"""
This method selects model_class namedtuples, instead of class instances.
Less memory consumption, greater speed
@ -66,7 +66,7 @@ class Database(InfiDatabase):
yield item
def insert_tuples(self, model_class: Type['ClickHouseModel'], model_tuples: Iterable[tuple],
def insert_tuples(self, model_class: Type['ClickHouseModel'], model_tuples: Iterable[tuple], # noqa: F821
batch_size: Optional[int] = None, formatted: bool = False) -> None:
"""
Inserts model_class namedtuples

View File

@ -2,19 +2,20 @@
This file contains wrappers for infi.clckhouse_orm engines to use in django-clickhouse
"""
import datetime
from typing import List, Type, Union, Iterable, Generator, Optional
from typing import List, Type, Union, Iterable, Optional
from django.db.models import Model as DjangoModel
from infi.clickhouse_orm import engines as infi_engines
from statsd.defaults.django import statsd
from .clickhouse_models import ClickHouseModel
from .configuration import config
from .database import connections
from .utils import format_datetime
class InsertOnlyEngineMixin:
def get_insert_batch(self, model_cls: Type['ClickHouseModel'], objects: List[DjangoModel]) -> Iterable[tuple]:
def get_insert_batch(self, model_cls: Type[ClickHouseModel], objects: List[DjangoModel]) -> Iterable[tuple]:
"""
Gets a list of model_cls instances to insert into database
:param model_cls: ClickHouseModel subclass to import
@ -47,9 +48,9 @@ class CollapsingMergeTree(InsertOnlyEngineMixin, infi_engines.CollapsingMergeTre
def _get_final_versions_by_version(self, db_alias, model_cls, min_date, max_date, object_pks, date_col, columns):
query = """
SELECT {columns} FROM $table WHERE (`{pk_column}`, `{version_col}`) IN (
SELECT `{pk_column}`, MAX(`{version_col}`)
FROM $table
PREWHERE `{date_col}` >= '{min_date}' AND `{date_col}` <= '{max_date}'
SELECT `{pk_column}`, MAX(`{version_col}`)
FROM $table
PREWHERE `{date_col}` >= '{min_date}' AND `{date_col}` <= '{max_date}'
AND `{pk_column}` IN ({object_pks})
GROUP BY `{pk_column}`
)
@ -68,7 +69,7 @@ class CollapsingMergeTree(InsertOnlyEngineMixin, infi_engines.CollapsingMergeTre
max_date=max_date, object_pks=','.join(object_pks))
return connections[db_alias].select_tuples(query, model_cls)
def get_final_versions(self, model_cls: Type['ClickHouseModel'], objects: Iterable[DjangoModel],
def get_final_versions(self, model_cls: Type[ClickHouseModel], objects: Iterable[DjangoModel],
date_col: Optional[str] = None) -> Iterable[tuple]:
"""
Get objects, that are currently stored in ClickHouse.
@ -122,7 +123,7 @@ class CollapsingMergeTree(InsertOnlyEngineMixin, infi_engines.CollapsingMergeTre
else:
return self._get_final_versions_by_final(*params)
def get_insert_batch(self, model_cls: Type['ClickHouseModel'], objects: List[DjangoModel]) -> Iterable[tuple]:
def get_insert_batch(self, model_cls: Type[ClickHouseModel], objects: List[DjangoModel]) -> Iterable[tuple]:
"""
Gets a list of model_cls instances to insert into database
:param model_cls: ClickHouseModel subclass to import

View File

@ -9,7 +9,7 @@ from django.db.models.signals import post_migrate
from django.dispatch import receiver
# In order to support all operations import here
from infi.clickhouse_orm.migrations import *
from infi.clickhouse_orm.migrations import * # noqa F401, F403
from infi.clickhouse_orm.database import ServerError, DatabaseException
from infi.clickhouse_orm.fields import StringField, DateField

View File

@ -154,7 +154,7 @@ class ClickHouseSyncModel(DjangoModel):
return storage_cls()
@classmethod
def register_clickhouse_sync_model(cls, model_cls: Type['ClickHouseModel']) -> None:
def register_clickhouse_sync_model(cls, model_cls: Type['ClickHouseModel']) -> None: # noqa: F821
"""
Registers ClickHouse model to listen to this model updates
:param model_cls: Model class to register
@ -166,7 +166,7 @@ class ClickHouseSyncModel(DjangoModel):
cls._clickhouse_sync_models.add(model_cls)
@classmethod
def get_clickhouse_sync_models(cls) -> Set['ClickHouseModel']:
def get_clickhouse_sync_models(cls) -> Set['ClickHouseModel']: # noqa: F821
"""
Returns all clickhouse models, listening to this class
:return: A set of model classes to sync

View File

@ -7,7 +7,7 @@ from .utils import model_to_dict
class Django2ClickHouseModelSerializer:
def __init__(self, model_cls: Type['ClickHouseModel'], fields: Optional[Iterable[str]] = None,
def __init__(self, model_cls: Type['ClickHouseModel'], fields: Optional[Iterable[str]] = None, # noqa: F821
exclude_fields: Optional[Iterable[str]] = None, writable: bool = False,
defaults: Optional[dict] = None) -> None:
"""

View File

@ -82,7 +82,7 @@ class Storage:
:param kwargs: Storage dependant arguments
:return: Number of records in queue
"""
raise NotImplemented()
raise NotImplementedError()
def get_operations(self, import_key: str, count: int, **kwargs) -> List[Tuple[str, str]]:
"""
@ -93,7 +93,7 @@ class Storage:
:param kwargs: Storage dependant arguments
:return: A list of tuples (operation, pk) in incoming order.
"""
raise NotImplemented()
raise NotImplementedError()
def register_operations(self, import_key: str, operation: str, *pks: Any) -> int:
"""
@ -134,21 +134,21 @@ class Storage:
This method is used in tests to drop all storage data
:return: None
"""
raise NotImplemented()
raise NotImplementedError()
def get_last_sync_time(self, import_key: str) -> Optional[datetime.datetime]:
"""
Gets the last time, sync has been executed
:return: datetime.datetime if last sync has been. Otherwise - None.
"""
raise NotImplemented()
raise NotImplementedError()
def set_last_sync_time(self, import_key: str, dt: datetime.datetime) -> None:
"""
Sets successful sync time
:return: None
"""
raise NotImplemented()
raise NotImplementedError()
class RedisStorage(Storage, metaclass=SingletonMeta):

View File

@ -287,4 +287,4 @@ class SingletonMeta(type):
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(SingletonMeta, cls).__call__(*args, **kwargs)
return cls._instances[cls]
return cls._instances[cls]

View File

@ -15,9 +15,9 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", 'tests.settings')
django.setup()
# This imports must be after django activation
from django.db.models import F
from tests.clickhouse_models import ClickHouseCollapseTestModel
from tests.models import TestModel
from django.db.models import F # noqa: E402
from tests.clickhouse_models import ClickHouseCollapseTestModel # noqa: E402
from tests.models import TestModel # noqa: E402
logger = logging.getLogger('django-clickhouse')

View File

@ -1,36 +1,39 @@
"""
This file contains django settings to run tests with runtests.py
"""
from os import environ
SECRET_KEY = 'fake-key'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test',
'USER': 'test',
'PASSWORD': 'test',
'HOST': '127.0.0.1',
'PORT': '5432'
'USER': environ.get('PGUSER', 'test'),
'PASSWORD': environ.get('PGPASS', 'test'),
'HOST': environ.get('PGHOST', '127.0.0.1'),
'PORT': environ.get('PGPORT', 5432)
},
'secondary': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test2',
'USER': 'test',
'PASSWORD': 'test',
'HOST': '127.0.0.1',
'PORT': '5432'
'USER': environ.get('PGUSER', 'test'),
'PASSWORD': environ.get('PGPASS', 'test'),
'HOST': environ.get('PGHOST', '127.0.0.1'),
'PORT': environ.get('PGPORT', 5432)
},
# I need separate connections for multiprocessing tests
'test_db': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'test_test',
'USER': 'test',
'PASSWORD': 'test',
'HOST': '127.0.0.1',
'PORT': '5432'
'USER': environ.get('PGUSER', 'test'),
'PASSWORD': environ.get('PGPASS', 'test'),
'HOST': environ.get('PGHOST', '127.0.0.1'),
'PORT': environ.get('PGPORT', 5432)
},
}
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
LOGGING = {
'version': 1,
@ -58,22 +61,26 @@ INSTALLED_APPS = [
CLICKHOUSE_DATABASES = {
'default': {
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
'db_name': 'test',
'username': 'default',
'password': ''
},
'secondary': {
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
'db_name': 'test_2',
'username': 'default',
'password': ''
},
'no_migrate': {
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
'db_name': 'test_3',
'username': 'default',
'password': '',
'migrate': False
},
'readonly': {
'db_url': environ.get('CLICK_HOUSE_HOST', 'http://localhost:8123/'),
'db_name': 'test_3',
'username': 'default',
'password': '',
@ -84,8 +91,8 @@ CLICKHOUSE_DATABASES = {
CLICKHOUSE_SYNC_BATCH_SIZE = 5000
CLICKHOUSE_REDIS_CONFIG = {
'host': '127.0.0.1',
'port': 6379,
'host': environ.get('REDIS_HOST', '127.0.0.1'),
'port': environ.get('REDIS_PORT', 6379),
'db': 8,
'socket_timeout': 10
}

View File

@ -48,5 +48,3 @@ class NamedTupleTest(TestCase):
t2 = TestTuple(1, 2, 3)
self.assertEqual(t1, t2)
self.assertEqual((1, 2, 3), t1)

View File

@ -2,7 +2,7 @@ import datetime
from unittest import skipIf
import django
from django.test import TransactionTestCase, TestCase
from django.test import TransactionTestCase
from django.utils.timezone import now
from tests.clickhouse_models import ClickHouseTestModel, ClickHouseSecondTestModel, ClickHouseCollapseTestModel, \

View File

@ -12,7 +12,7 @@ class StorageTest(TestCase):
def setUp(self):
self.storage.flush()
def tearDown(self):
self.storage.flush()
@ -75,11 +75,10 @@ class StorageTest(TestCase):
def test_locks(self):
# Test that multiple can acquire locks in parallel
# And single model can't
l = self.storage.get_lock(ClickHouseTestModel.get_import_key())
l.acquire()
lock = self.storage.get_lock(ClickHouseTestModel.get_import_key())
lock.acquire()
with self.assertRaises(RedisLockTimeoutError):
l.acquire()
l2 = self.storage.get_lock(ClickHouseCollapseTestModel.get_import_key())
l2.acquire()
lock.acquire()
lock_2 = self.storage.get_lock(ClickHouseCollapseTestModel.get_import_key())
lock_2.acquire()

View File

@ -118,4 +118,3 @@ class TestSingletonMeta(TestCase):
b = Single()
self.assertEqual(a, b)
self.assertEqual(2, b.test)