Initial commit

This commit is contained in:
Alexander Karpov 2022-12-07 20:16:44 +03:00
commit d95e557ad9
51 changed files with 4056 additions and 0 deletions

144
.dockerignore Normal file
View File

@ -0,0 +1,144 @@
### Python template
deploy/
.idea/
.vscode/
.git/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/

31
.editorconfig Normal file
View File

@ -0,0 +1,31 @@
root = true
[*]
tab_width = 4
end_of_line = lf
max_line_length = 88
ij_visual_guides = 88
insert_final_newline = true
trim_trailing_whitespace = true
[*.{js,py,html}]
charset = utf-8
[*.md]
trim_trailing_whitespace = false
[*.{yml,yaml}]
indent_style = space
indent_size = 2
[Makefile]
indent_style = tab
[.flake8]
indent_style = space
indent_size = 2
[*.py]
indent_style = space
indent_size = 4
ij_python_from_import_parentheses_force_if_multiline = true

113
.flake8 Normal file
View File

@ -0,0 +1,113 @@
[flake8]
max-complexity = 6
inline-quotes = double
max-line-length = 88
extend-ignore = E203
docstring_style=sphinx
ignore =
; Found `f` string
WPS305,
; Missing docstring in public module
D100,
; Missing docstring in magic method
D105,
; Missing docstring in __init__
D107,
; Found `__init__.py` module with logic
WPS412,
; Found class without a base class
WPS306,
; Missing docstring in public nested class
D106,
; First line should be in imperative mood
D401,
; Found wrong variable name
WPS110,
; Found `__init__.py` module with logic
WPS326,
; Found string constant over-use
WPS226,
; Found upper-case constant in a class
WPS115,
; Found nested function
WPS602,
; Found method without arguments
WPS605,
; Found overused expression
WPS204,
; Found too many module members
WPS202,
; Found too high module cognitive complexity
WPS232,
; line break before binary operator
W503,
; Found module with too many imports
WPS201,
; Inline strong start-string without end-string.
RST210,
; Found nested class
WPS431,
; Found wrong module name
WPS100,
; Found too many methods
WPS214,
; Found too long ``try`` body
WPS229,
; Found unpythonic getter or setter
WPS615,
; Found a line that starts with a dot
WPS348,
; Found complex default value (for dependency injection)
WPS404,
; not perform function calls in argument defaults (for dependency injection)
B008,
; Model should define verbose_name in its Meta inner class
DJ10,
; Model should define verbose_name_plural in its Meta inner class
DJ11,
; Found mutable module constant.
WPS407,
; Found too many empty lines in `def`
WPS473,
per-file-ignores =
; all tests
test_*.py,tests.py,tests_*.py,*/tests/*,conftest.py:
; Use of assert detected
S101,
; Found outer scope names shadowing
WPS442,
; Found too many local variables
WPS210,
; Found magic number
WPS432,
; Missing parameter(s) in Docstring
DAR101,
; Found too many arguments
WPS211,
; all init files
__init__.py:
; ignore not used imports
F401,
; ignore import with wildcard
F403,
; Found wrong metadata variable
WPS410,
exclude =
./.cache,
./.git,
./.idea,
./.mypy_cache,
./.pytest_cache,
./.venv,
./venv,
./env,
./cached_venv,
./docs,
./deploy,
./var,
./.vscode,
*migrations*,

142
.gitignore vendored Normal file
View File

@ -0,0 +1,142 @@
### Python template
.idea/
.vscode/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/

62
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,62 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.4.0
hooks:
- id: check-ast
- id: trailing-whitespace
- id: check-toml
- id: end-of-file-fixer
- repo: https://github.com/asottile/add-trailing-comma
rev: v2.1.0
hooks:
- id: add-trailing-comma
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.1.0
hooks:
- id: pretty-format-yaml
args:
- --autofix
- --preserve-quotes
- --indent=2
- repo: local
hooks:
- id: black
name: Format with Black
entry: poetry run black
language: system
types: [python]
- id: autoflake
name: autoflake
entry: poetry run autoflake
language: system
types: [python]
args: [--in-place, --remove-all-unused-imports, --remove-duplicate-keys]
- id: isort
name: isort
entry: poetry run isort
language: system
types: [python]
- id: flake8
name: Check with Flake8
entry: poetry run flake8
language: system
pass_filenames: false
types: [python]
args: [--count, .]
- id: mypy
name: Validate types with MyPy
entry: poetry run mypy
language: system
types: [python]
pass_filenames: false
args:
- "med_backend"

132
README.md Normal file
View File

@ -0,0 +1,132 @@
# med_backend
This project was generated using fastapi_template.
## Poetry
This project uses poetry. It's a modern dependency management
tool.
To run the project use this set of commands:
```bash
poetry install
poetry run python -m med_backend
```
This will start the server on the configured host.
You can find swagger documentation at `/api/docs`.
You can read more about poetry here: https://python-poetry.org/
## Docker
You can start the project with docker using this command:
```bash
docker-compose -f deploy/docker-compose.yml --project-directory . up --build
```
If you want to develop in docker with autoreload add `-f deploy/docker-compose.dev.yml` to your docker command.
Like this:
```bash
docker-compose -f deploy/docker-compose.yml -f deploy/docker-compose.dev.yml --project-directory . up
```
This command exposes the web application on port 8000, mounts current directory and enables autoreload.
But you have to rebuild image every time you modify `poetry.lock` or `pyproject.toml` with this command:
```bash
docker-compose -f deploy/docker-compose.yml --project-directory . build
```
## Project structure
```bash
$ tree "med_backend"
med_backend
├── conftest.py # Fixtures for all tests.
├── db # module contains db configurations
│   ├── dao # Data Access Objects. Contains different classes to inteact with database.
│   └── models # Package contains different models for ORMs.
├── __main__.py # Startup script. Starts uvicorn.
├── services # Package for different external services such as rabbit or redis etc.
├── settings.py # Main configuration settings for project.
├── static # Static content.
├── tests # Tests for project.
└── web # Package contains web server. Handlers, startup config.
├── api # Package with all handlers.
│   └── router.py # Main router.
├── application.py # FastAPI application configuration.
└── lifetime.py # Contains actions to perform on startup and shutdown.
```
## Configuration
This application can be configured with environment variables.
You can create `.env` file in the root directory and place all
environment variables here.
All environment variabels should start with "MED_BACKEND_" prefix.
For example if you see in your "med_backend/settings.py" a variable named like
`random_parameter`, you should provide the "MED_BACKEND_RANDOM_PARAMETER"
variable to configure the value. This behaviour can be changed by overriding `env_prefix` property
in `med_backend.settings.Settings.Config`.
An exmaple of .env file:
```bash
MED_BACKEND_RELOAD="True"
MED_BACKEND_PORT="8000"
MED_BACKEND_ENVIRONMENT="dev"
```
You can read more about BaseSettings class here: https://pydantic-docs.helpmanual.io/usage/settings/
## Pre-commit
To install pre-commit simply run inside the shell:
```bash
pre-commit install
```
pre-commit is very useful to check your code before publishing it.
It's configured using .pre-commit-config.yaml file.
By default it runs:
* black (formats your code);
* mypy (validates types);
* isort (sorts imports in all files);
* flake8 (spots possibe bugs);
* yesqa (removes useless `# noqa` comments).
You can read more about pre-commit here: https://pre-commit.com/
## Running tests
If you want to run it in docker, simply run:
```bash
docker-compose -f deploy/docker-compose.yml --project-directory . run --rm api pytest -vv .
docker-compose -f deploy/docker-compose.yml --project-directory . down
```
For running tests on your local machine.
1. you need to start a database.
I prefer doing it with docker:
```
docker run -p "5432:5432" -e "POSTGRES_PASSWORD=med_backend" -e "POSTGRES_USER=med_backend" -e "POSTGRES_DB=med_backend" postgres:13.8-bullseye
```
2. Run the pytest.
```bash
pytest -vv .
```

27
deploy/Dockerfile Normal file
View File

@ -0,0 +1,27 @@
FROM python:3.9.6-slim-buster
RUN apt-get update && apt-get install -y \
gcc \
&& rm -rf /var/lib/apt/lists/*
RUN pip install poetry==1.2.2
# Configuring poetry
RUN poetry config virtualenvs.create false
# Copying requirements of a project
COPY pyproject.toml poetry.lock /app/src/
WORKDIR /app/src
# Installing requirements
RUN poetry install
# Removing gcc
RUN apt-get purge -y \
gcc \
&& rm -rf /var/lib/apt/lists/*
# Copying actuall application
COPY . /app/src/
RUN poetry install
CMD ["/usr/local/bin/python", "-m", "med_backend"]

View File

@ -0,0 +1,13 @@
version: '3.9'
services:
api:
ports:
# Exposes application port.
- "8000:8000"
volumes:
# Adds current directory as volume.
- .:/app/src/
environment:
# Enables autoreload.
MED_BACKEND_RELOAD: "True"

57
deploy/docker-compose.yml Normal file
View File

@ -0,0 +1,57 @@
version: '3.9'
services:
api:
build:
context: .
dockerfile: ./deploy/Dockerfile
image: med_backend:${MED_BACKEND_VERSION:-latest}
restart: always
env_file:
- .env
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
environment:
MED_BACKEND_HOST: 0.0.0.0
MED_BACKEND_DB_HOST: med_backend-db
MED_BACKEND_DB_PORT: 5432
MED_BACKEND_DB_USER: med_backend
MED_BACKEND_DB_PASS: med_backend
MED_BACKEND_DB_BASE: med_backend
db:
image: postgres:13.8-bullseye
hostname: med_backend-db
environment:
POSTGRES_PASSWORD: "med_backend"
POSTGRES_USER: "med_backend"
POSTGRES_DB: "med_backend"
volumes:
- med_backend-db-data:/var/lib/postgresql/data
restart: always
healthcheck:
test: pg_isready -U med_backend
interval: 2s
timeout: 3s
retries: 40
redis:
image: bitnami/redis:6.2.5
hostname: "med_backend-redis"
restart: always
environment:
ALLOW_EMPTY_PASSWORD: "yes"
healthcheck:
test: redis-cli ping
interval: 1s
timeout: 3s
retries: 50
volumes:
med_backend-db-data:
name: med_backend-db-data

1
med_backend/__init__.py Normal file
View File

@ -0,0 +1 @@
"""med_backend package."""

20
med_backend/__main__.py Normal file
View File

@ -0,0 +1,20 @@
import uvicorn
from med_backend.settings import settings
def main() -> None:
"""Entrypoint of the application."""
uvicorn.run(
"med_backend.web.application:get_app",
workers=settings.workers_count,
host=settings.host,
port=settings.port,
reload=settings.reload,
log_level=settings.log_level.value.lower(),
factory=True,
)
if __name__ == "__main__":
main()

129
med_backend/conftest.py Normal file
View File

@ -0,0 +1,129 @@
from typing import Any, AsyncGenerator
import pytest
from fakeredis import FakeServer
from fakeredis.aioredis import FakeConnection
from fastapi import FastAPI
from httpx import AsyncClient
from redis.asyncio import ConnectionPool
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from med_backend.db.dependencies import get_db_session
from med_backend.db.utils import create_database, drop_database
from med_backend.services.redis.dependency import get_redis_pool
from med_backend.settings import settings
from med_backend.web.application import get_app
@pytest.fixture(scope="session")
def anyio_backend() -> str:
"""
Backend for anyio pytest plugin.
:return: backend name.
"""
return "asyncio"
@pytest.fixture(scope="session")
async def _engine() -> AsyncGenerator[AsyncEngine, None]:
"""
Create engine and databases.
:yield: new engine.
"""
from med_backend.db.meta import meta # noqa: WPS433
from med_backend.db.models import load_all_models # noqa: WPS433
load_all_models()
await create_database()
engine = create_async_engine(str(settings.db_url))
async with engine.begin() as conn:
await conn.run_sync(meta.create_all)
try:
yield engine
finally:
await engine.dispose()
await drop_database()
@pytest.fixture
async def dbsession(
_engine: AsyncEngine,
) -> AsyncGenerator[AsyncSession, None]:
"""
Get session to database.
Fixture that returns a SQLAlchemy session with a SAVEPOINT, and the rollback to it
after the test completes.
:param _engine: current engine.
:yields: async session.
"""
connection = await _engine.connect()
trans = await connection.begin()
session_maker = sessionmaker(
connection,
expire_on_commit=False,
class_=AsyncSession,
)
session = session_maker()
try:
yield session
finally:
await session.close()
await trans.rollback()
await connection.close()
@pytest.fixture
async def fake_redis_pool() -> AsyncGenerator[ConnectionPool, None]:
"""
Get instance of a fake redis.
:yield: FakeRedis instance.
"""
server = FakeServer()
server.connected = True
pool = ConnectionPool(connection_class=FakeConnection, server=server)
yield pool
await pool.disconnect()
@pytest.fixture
def fastapi_app(
dbsession: AsyncSession,
fake_redis_pool: ConnectionPool,
) -> FastAPI:
"""
Fixture for creating FastAPI app.
:return: fastapi app with mocked dependencies.
"""
application = get_app()
application.dependency_overrides[get_db_session] = lambda: dbsession
application.dependency_overrides[get_redis_pool] = lambda: fake_redis_pool
return application # noqa: WPS331
@pytest.fixture
async def client(
fastapi_app: FastAPI,
anyio_backend: Any,
) -> AsyncGenerator[AsyncClient, None]:
"""
Fixture that creates client for requesting server.
:param fastapi_app: the application.
:yield: client for the app.
"""
async with AsyncClient(app=fastapi_app, base_url="http://test") as ac:
yield ac

20
med_backend/db/base.py Normal file
View File

@ -0,0 +1,20 @@
from typing import Any, Tuple
from sqlalchemy import Table
from sqlalchemy.orm import as_declarative
from med_backend.db.meta import meta
@as_declarative(metadata=meta)
class Base:
"""
Base for all models.
It has some type definitions to
enhance autocompletion.
"""
__tablename__: str
__table__: Table
__table_args__: Tuple[Any, ...]

View File

@ -0,0 +1 @@
"""DAO classes."""

View File

@ -0,0 +1,53 @@
from typing import List, Optional
from fastapi import Depends
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from med_backend.db.dependencies import get_db_session
from med_backend.db.models.dummy_model import DummyModel
class DummyDAO:
"""Class for accessing dummy table."""
def __init__(self, session: AsyncSession = Depends(get_db_session)):
self.session = session
async def create_dummy_model(self, name: str) -> None:
"""
Add single dummy to session.
:param name: name of a dummy.
"""
self.session.add(DummyModel(name=name))
async def get_all_dummies(self, limit: int, offset: int) -> List[DummyModel]:
"""
Get all dummy models with limit/offset pagination.
:param limit: limit of dummies.
:param offset: offset of dummies.
:return: stream of dummies.
"""
raw_dummies = await self.session.execute(
select(DummyModel).limit(limit).offset(offset),
)
return raw_dummies.scalars().fetchall()
async def filter(
self,
name: Optional[str] = None,
) -> List[DummyModel]:
"""
Get specific dummy model.
:param name: name of dummy instance.
:return: dummy models.
"""
query = select(DummyModel)
if name:
query = query.where(DummyModel.name == name)
rows = await self.session.execute(query)
return rows.scalars().fetchall()

View File

@ -0,0 +1,20 @@
from typing import AsyncGenerator
from sqlalchemy.ext.asyncio import AsyncSession
from starlette.requests import Request
async def get_db_session(request: Request) -> AsyncGenerator[AsyncSession, None]:
"""
Create and get database session.
:param request: current request.
:yield: database session.
"""
session: AsyncSession = request.app.state.db_session_factory()
try: # noqa: WPS501
yield session
finally:
await session.commit()
await session.close()

3
med_backend/db/meta.py Normal file
View File

@ -0,0 +1,3 @@
import sqlalchemy as sa
meta = sa.MetaData()

View File

@ -0,0 +1,14 @@
"""med_backend models."""
import pkgutil
from pathlib import Path
def load_all_models() -> None:
"""Load all models from this folder."""
package_dir = Path(__file__).resolve().parent
modules = pkgutil.walk_packages(
path=[str(package_dir)],
prefix="med_backend.db.models.",
)
for module in modules:
__import__(module.name) # noqa: WPS421

View File

@ -0,0 +1,13 @@
from sqlalchemy.sql.schema import Column
from sqlalchemy.sql.sqltypes import Integer, String
from med_backend.db.base import Base
class DummyModel(Base):
"""Model for demo purpose."""
__tablename__ = "dummy_model"
id = Column(Integer(), primary_key=True, autoincrement=True)
name = Column(String(length=200)) # noqa: WPS432

44
med_backend/db/utils.py Normal file
View File

@ -0,0 +1,44 @@
from sqlalchemy import text
from sqlalchemy.engine import make_url
from sqlalchemy.ext.asyncio import create_async_engine
from med_backend.settings import settings
async def create_database() -> None:
"""Create a databse."""
db_url = make_url(str(settings.db_url.with_path("/postgres")))
engine = create_async_engine(db_url, isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
database_existance = await conn.execute(
text(
f"SELECT 1 FROM pg_database WHERE datname='{settings.db_base}'", # noqa: E501, S608
),
)
database_exists = database_existance.scalar() == 1
if database_exists:
await drop_database()
async with engine.connect() as conn: # noqa: WPS440
await conn.execute(
text(
f'CREATE DATABASE "{settings.db_base}" ENCODING "utf8" TEMPLATE template1', # noqa: E501
),
)
async def drop_database() -> None:
"""Drop current database."""
db_url = make_url(str(settings.db_url.with_path("/postgres")))
engine = create_async_engine(db_url, isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
disc_users = (
"SELECT pg_terminate_backend(pg_stat_activity.pid) " # noqa: S608
"FROM pg_stat_activity "
f"WHERE pg_stat_activity.datname = '{settings.db_base}' "
"AND pid <> pg_backend_pid();"
)
await conn.execute(text(disc_users))
await conn.execute(text(f'DROP DATABASE "{settings.db_base}"'))

View File

@ -0,0 +1 @@
"""Services for med_backend."""

View File

@ -0,0 +1 @@
"""Redis service."""

View File

@ -0,0 +1,26 @@
from typing import AsyncGenerator
from redis.asyncio import Redis
from starlette.requests import Request
async def get_redis_pool(
request: Request,
) -> AsyncGenerator[Redis, None]: # pragma: no cover
"""
Returns connection pool.
You can use it like this:
>>> from redis.asyncio import ConnectionPool, Redis
>>>
>>> async def handler(redis_pool: ConnectionPool = Depends(get_redis_pool)):
>>> async with Redis(connection_pool=redis_pool) as redis:
>>> await redis.get('key')
I use pools so you don't acquire connection till the end of the handler.
:param request: current request.
:returns: redis connection pool.
"""
return request.app.state.redis_pool

View File

@ -0,0 +1,24 @@
from fastapi import FastAPI
from redis.asyncio import ConnectionPool
from med_backend.settings import settings
def init_redis(app: FastAPI) -> None: # pragma: no cover
"""
Creates connection pool for redis.
:param app: current fastapi application.
"""
app.state.redis_pool = ConnectionPool.from_url(
str(settings.redis_url),
)
async def shutdown_redis(app: FastAPI) -> None: # pragma: no cover
"""
Closes redis connection pool.
:param app: current FastAPI app.
"""
await app.state.redis_pool.disconnect()

99
med_backend/settings.py Normal file
View File

@ -0,0 +1,99 @@
import enum
from pathlib import Path
from tempfile import gettempdir
from typing import Optional
from pydantic import BaseSettings
from yarl import URL
TEMP_DIR = Path(gettempdir())
class LogLevel(str, enum.Enum): # noqa: WPS600
"""Possible log levels."""
NOTSET = "NOTSET"
DEBUG = "DEBUG"
INFO = "INFO"
WARNING = "WARNING"
ERROR = "ERROR"
FATAL = "FATAL"
class Settings(BaseSettings):
"""
Application settings.
These parameters can be configured
with environment variables.
"""
host: str = "127.0.0.1"
port: int = 8000
# quantity of workers for uvicorn
workers_count: int = 1
# Enable uvicorn reloading
reload: bool = False
# Current environment
environment: str = "dev"
log_level: LogLevel = LogLevel.INFO
# Variables for the database
db_host: str = "localhost"
db_port: int = 5432
db_user: str = "med_backend"
db_pass: str = "med_backend"
db_base: str = "med_backend"
db_echo: bool = False
# Variables for Redis
redis_host: str = "med_backend-redis"
redis_port: int = 6379
redis_user: Optional[str] = None
redis_pass: Optional[str] = None
redis_base: Optional[int] = None
@property
def db_url(self) -> URL:
"""
Assemble database URL from settings.
:return: database URL.
"""
return URL.build(
scheme="postgresql+asyncpg",
host=self.db_host,
port=self.db_port,
user=self.db_user,
password=self.db_pass,
path=f"/{self.db_base}",
)
@property
def redis_url(self) -> URL:
"""
Assemble REDIS URL from settings.
:return: redis URL.
"""
path = ""
if self.redis_base is not None:
path = f"/{self.redis_base}"
return URL.build(
scheme="redis",
host=self.redis_host,
port=self.redis_port,
user=self.redis_user,
password=self.redis_pass,
path=path,
)
class Config:
env_file = ".env"
env_prefix = "MED_BACKEND_"
env_file_encoding = "utf-8"
settings = Settings()

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
"""Tests for med_backend."""

View File

@ -0,0 +1,49 @@
import uuid
import pytest
from fastapi import FastAPI
from httpx import AsyncClient
from sqlalchemy.ext.asyncio import AsyncSession
from starlette import status
from med_backend.db.dao.dummy_dao import DummyDAO
@pytest.mark.anyio
async def test_creation(
fastapi_app: FastAPI,
client: AsyncClient,
dbsession: AsyncSession,
) -> None:
"""Tests dummy instance creation."""
url = fastapi_app.url_path_for("create_dummy_model")
test_name = uuid.uuid4().hex
response = await client.put(
url,
json={
"name": test_name,
},
)
assert response.status_code == status.HTTP_200_OK
dao = DummyDAO(dbsession)
instances = await dao.filter(name=test_name)
assert instances[0].name == test_name
@pytest.mark.anyio
async def test_getting(
fastapi_app: FastAPI,
client: AsyncClient,
dbsession: AsyncSession,
) -> None:
"""Tests dummy instance retrieval."""
dao = DummyDAO(dbsession)
test_name = uuid.uuid4().hex
await dao.create_dummy_model(name=test_name)
url = fastapi_app.url_path_for("get_dummy_models")
response = await client.get(url)
dummies = response.json()
assert response.status_code == status.HTTP_200_OK
assert len(dummies) == 1
assert dummies[0]["name"] == test_name

View File

@ -0,0 +1,26 @@
import uuid
import pytest
from fastapi import FastAPI
from httpx import AsyncClient
from starlette import status
@pytest.mark.anyio
async def test_echo(fastapi_app: FastAPI, client: AsyncClient) -> None:
"""
Tests that echo route works.
:param fastapi_app: current application.
:param client: clien for the app.
"""
url = fastapi_app.url_path_for("send_echo_message")
message = uuid.uuid4().hex
response = await client.post(
url,
json={
"message": message,
},
)
assert response.status_code == status.HTTP_200_OK
assert response.json()["message"] == message

View File

@ -0,0 +1,17 @@
import pytest
from fastapi import FastAPI
from httpx import AsyncClient
from starlette import status
@pytest.mark.anyio
async def test_health(client: AsyncClient, fastapi_app: FastAPI) -> None:
"""
Checks the health endpoint.
:param client: client for the app.
:param fastapi_app: current FastAPI application.
"""
url = fastapi_app.url_path_for("health_check")
response = await client.get(url)
assert response.status_code == status.HTTP_200_OK

View File

@ -0,0 +1,63 @@
import uuid
import pytest
from fastapi import FastAPI
from httpx import AsyncClient
from redis.asyncio import ConnectionPool, Redis
from starlette import status
@pytest.mark.anyio
async def test_setting_value(
fastapi_app: FastAPI,
fake_redis_pool: ConnectionPool,
client: AsyncClient,
) -> None:
"""
Tests that you can set value in redis.
:param fastapi_app: current application fixture.
:param fake_redis_pool: fake redis pool.
:param client: client fixture.
"""
url = fastapi_app.url_path_for("set_redis_value")
test_key = uuid.uuid4().hex
test_val = uuid.uuid4().hex
response = await client.put(
url,
json={
"key": test_key,
"value": test_val,
},
)
assert response.status_code == status.HTTP_200_OK
async with Redis(connection_pool=fake_redis_pool) as redis:
actual_value = await redis.get(test_key)
assert actual_value.decode() == test_val
@pytest.mark.anyio
async def test_getting_value(
fastapi_app: FastAPI,
fake_redis_pool: ConnectionPool,
client: AsyncClient,
) -> None:
"""
Tests that you can get value from redis by key.
:param fastapi_app: current application fixture.
:param fake_redis_pool: fake redis pool.
:param client: client fixture.
"""
test_key = uuid.uuid4().hex
test_val = uuid.uuid4().hex
async with Redis(connection_pool=fake_redis_pool) as redis:
await redis.set(test_key, test_val)
url = fastapi_app.url_path_for("get_redis_value")
response = await client.get(url, params={"key": test_key})
assert response.status_code == status.HTTP_200_OK
assert response.json()["key"] == test_key
assert response.json()["value"] == test_val

View File

@ -0,0 +1 @@
"""WEB API for med_backend."""

View File

@ -0,0 +1 @@
"""med_backend API package."""

View File

@ -0,0 +1,4 @@
"""Dummy model API."""
from med_backend.web.api.dummy.views import router
__all__ = ["router"]

View File

@ -0,0 +1,21 @@
from pydantic import BaseModel
class DummyModelDTO(BaseModel):
"""
DTO for dummy models.
It returned when accessing dummy models from the API.
"""
id: int
name: str
class Config:
orm_mode = True
class DummyModelInputDTO(BaseModel):
"""DTO for creating new dummy model."""
name: str

View File

@ -0,0 +1,41 @@
from typing import List
from fastapi import APIRouter
from fastapi.param_functions import Depends
from med_backend.db.dao.dummy_dao import DummyDAO
from med_backend.db.models.dummy_model import DummyModel
from med_backend.web.api.dummy.schema import DummyModelDTO, DummyModelInputDTO
router = APIRouter()
@router.get("/", response_model=List[DummyModelDTO])
async def get_dummy_models(
limit: int = 10,
offset: int = 0,
dummy_dao: DummyDAO = Depends(),
) -> List[DummyModel]:
"""
Retrieve all dummy objects from the database.
:param limit: limit of dummy objects, defaults to 10.
:param offset: offset of dummy objects, defaults to 0.
:param dummy_dao: DAO for dummy models.
:return: list of dummy obbjects from database.
"""
return await dummy_dao.get_all_dummies(limit=limit, offset=offset)
@router.put("/")
async def create_dummy_model(
new_dummy_object: DummyModelInputDTO,
dummy_dao: DummyDAO = Depends(),
) -> None:
"""
Creates dummy model in the database.
:param new_dummy_object: new dummy model item.
:param dummy_dao: DAO for dummy models.
"""
await dummy_dao.create_dummy_model(**new_dummy_object.dict())

View File

@ -0,0 +1,4 @@
"""Echo API."""
from med_backend.web.api.echo.views import router
__all__ = ["router"]

View File

@ -0,0 +1,7 @@
from pydantic import BaseModel
class Message(BaseModel):
"""Simple message model."""
message: str

View File

@ -0,0 +1,18 @@
from fastapi import APIRouter
from med_backend.web.api.echo.schema import Message
router = APIRouter()
@router.post("/", response_model=Message)
async def send_echo_message(
incoming_message: Message,
) -> Message:
"""
Sends echo back to user.
:param incoming_message: incoming message.
:returns: message same as the incoming.
"""
return incoming_message

View File

@ -0,0 +1,4 @@
"""API for checking project status."""
from med_backend.web.api.monitoring.views import router
__all__ = ["router"]

View File

@ -0,0 +1,12 @@
from fastapi import APIRouter
router = APIRouter()
@router.get("/health")
def health_check() -> None:
"""
Checks the health of a project.
It returns 200 if the project is healthy.
"""

View File

@ -0,0 +1,4 @@
"""Redis API."""
from med_backend.web.api.redis.views import router
__all__ = ["router"]

View File

@ -0,0 +1,10 @@
from typing import Optional
from pydantic import BaseModel
class RedisValueDTO(BaseModel):
"""DTO for redis values."""
key: str
value: Optional[str] # noqa: WPS110

View File

@ -0,0 +1,44 @@
from fastapi import APIRouter
from fastapi.param_functions import Depends
from redis.asyncio import ConnectionPool, Redis
from med_backend.services.redis.dependency import get_redis_pool
from med_backend.web.api.redis.schema import RedisValueDTO
router = APIRouter()
@router.get("/", response_model=RedisValueDTO)
async def get_redis_value(
key: str,
redis_pool: ConnectionPool = Depends(get_redis_pool),
) -> RedisValueDTO:
"""
Get value from redis.
:param key: redis key, to get data from.
:param redis_pool: redis connection pool.
:returns: information from redis.
"""
async with Redis(connection_pool=redis_pool) as redis:
redis_value = await redis.get(key)
return RedisValueDTO(
key=key,
value=redis_value,
)
@router.put("/")
async def set_redis_value(
redis_value: RedisValueDTO,
redis_pool: ConnectionPool = Depends(get_redis_pool),
) -> None:
"""
Set value in redis.
:param redis_value: new value data.
:param redis_pool: redis connection pool.
"""
if redis_value.value is not None:
async with Redis(connection_pool=redis_pool) as redis:
await redis.set(name=redis_value.key, value=redis_value.value)

View File

@ -0,0 +1,9 @@
from fastapi.routing import APIRouter
from med_backend.web.api import dummy, echo, monitoring, redis
api_router = APIRouter()
api_router.include_router(monitoring.router)
api_router.include_router(echo.router, prefix="/echo", tags=["echo"])
api_router.include_router(dummy.router, prefix="/dummy", tags=["dummy"])
api_router.include_router(redis.router, prefix="/redis", tags=["redis"])

View File

@ -0,0 +1,35 @@
from importlib import metadata
from fastapi import FastAPI
from fastapi.responses import UJSONResponse
from med_backend.web.api.router import api_router
from med_backend.web.lifetime import register_shutdown_event, register_startup_event
def get_app() -> FastAPI:
"""
Get FastAPI application.
This is the main constructor of an application.
:return: application.
"""
app = FastAPI(
title="med_backend",
description="",
version=metadata.version("med_backend"),
docs_url="/api/docs",
redoc_url="/api/redoc",
openapi_url="/api/openapi.json",
default_response_class=UJSONResponse,
)
# Adds startup and shutdown events.
register_startup_event(app)
register_shutdown_event(app)
# Main router for the API.
app.include_router(router=api_router, prefix="/api")
return app

View File

@ -0,0 +1,90 @@
from asyncio import current_task
from typing import Awaitable, Callable
from fastapi import FastAPI
from sqlalchemy.ext.asyncio import (
AsyncSession,
async_scoped_session,
create_async_engine,
)
from sqlalchemy.orm import sessionmaker
from med_backend.db.meta import meta
from med_backend.db.models import load_all_models
from med_backend.services.redis.lifetime import init_redis, shutdown_redis
from med_backend.settings import settings
def _setup_db(app: FastAPI) -> None: # pragma: no cover
"""
Creates connection to the database.
This function creates SQLAlchemy engine instance,
session_factory for creating sessions
and stores them in the application's state property.
:param app: fastAPI application.
"""
engine = create_async_engine(str(settings.db_url), echo=settings.db_echo)
session_factory = async_scoped_session(
sessionmaker(
engine,
expire_on_commit=False,
class_=AsyncSession,
),
scopefunc=current_task,
)
app.state.db_engine = engine
app.state.db_session_factory = session_factory
async def _create_tables() -> None: # pragma: no cover
"""Populates tables in the database."""
load_all_models()
engine = create_async_engine(str(settings.db_url))
async with engine.begin() as connection:
await connection.run_sync(meta.create_all)
await engine.dispose()
def register_startup_event(
app: FastAPI,
) -> Callable[[], Awaitable[None]]: # pragma: no cover
"""
Actions to run on application startup.
This function uses fastAPI app to store data
inthe state, such as db_engine.
:param app: the fastAPI application.
:return: function that actually performs actions.
"""
@app.on_event("startup")
async def _startup() -> None: # noqa: WPS430
_setup_db(app)
await _create_tables()
init_redis(app)
pass # noqa: WPS420
return _startup
def register_shutdown_event(
app: FastAPI,
) -> Callable[[], Awaitable[None]]: # pragma: no cover
"""
Actions to run on application's shutdown.
:param app: fastAPI application.
:return: function that actually performs actions.
"""
@app.on_event("shutdown")
async def _shutdown() -> None: # noqa: WPS430
await app.state.db_engine.dispose()
await shutdown_redis(app)
pass # noqa: WPS420
return _shutdown

2210
poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

81
pyproject.toml Normal file
View File

@ -0,0 +1,81 @@
[tool.poetry]
name = "med_backend"
version = "0.1.0"
description = ""
authors = [
]
maintainers = [
]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.9"
fastapi = "^0.85.0"
uvicorn = { version = "^0.18.3", extras = ["standard"] }
pydantic = {version = "^1.10.2", extras = ["dotenv"]}
yarl = "^1.8.1"
ujson = "^5.5.0"
SQLAlchemy = {version = "^1.4.41", extras = ["mypy", "asyncio"]}
asyncpg = {version = "^0.26.0", extras = ["sa"]}
redis = {version = "^4.3.4", extras = ["hiredis"]}
httptools = "^0.5.0"
[tool.poetry.dev-dependencies]
pytest = "^7.1.3"
flake8 = "~4.0.1"
mypy = "^0.981"
isort = "^5.10.1"
yesqa = "^1.4.0"
pre-commit = "^2.20.0"
wemake-python-styleguide = "^0.17.0"
black = "^22.8.0"
autoflake = "^1.6.1"
SQLAlchemy = {version = "^1.4.41", extras = ["mypy"]}
pytest-cov = "^4.0.0"
anyio = "^3.6.1"
pytest-env = "^0.6.2"
fakeredis = "^1.9.3"
httpx = "^0.23.0"
[tool.isort]
profile = "black"
multi_line_output = 3
src_paths = ["med_backend",]
[tool.mypy]
strict = true
ignore_missing_imports = true
allow_subclassing_any = true
allow_untyped_calls = true
pretty = true
show_error_codes = true
implicit_reexport = true
allow_untyped_decorators = true
warn_unused_ignores = false
warn_return_any = false
namespace_packages = true
plugins = ["sqlalchemy.ext.mypy.plugin"]
# Remove this and add `types-redis`
# when the issue https://github.com/python/typeshed/issues/8242 is resolved.
[[tool.mypy.overrides]]
module = [
'redis.asyncio'
]
ignore_missing_imports = true
[tool.pytest.ini_options]
filterwarnings = [
"error",
"ignore::DeprecationWarning",
"ignore:.*unclosed.*:ResourceWarning",
]
env = [
"MED_BACKEND_DB_BASE=med_backend_test",
]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"