init
commit
4df5770e76
|
@ -0,0 +1,3 @@
|
|||
POSTGRES_DB=serviceman_db
|
||||
POSTGRES_USER=demo_user
|
||||
POSTGRES_PASSWORD=user_pass
|
|
@ -0,0 +1,116 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = api/migrations
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to migrations/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgresql://demo_user:user_pass@db:5432/serviceman_db
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,14 @@
|
|||
from fastapi import FastAPI
|
||||
|
||||
from api.di import Container
|
||||
from api.router.user import router
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
app = FastAPI()
|
||||
app.container = Container()
|
||||
app.include_router(router)
|
||||
return app
|
||||
|
||||
|
||||
app = create_app()
|
|
@ -0,0 +1,34 @@
|
|||
import os
|
||||
|
||||
from dependency_injector import containers, providers
|
||||
from repository.user import UserRepository
|
||||
from service.user import UserService
|
||||
from uow.database import Database
|
||||
|
||||
|
||||
class Container(containers.DeclarativeContainer):
|
||||
wiring_config = containers.WiringConfiguration(modules=["router.user"])
|
||||
|
||||
config = providers.Configuration(yaml_files=[f"{os.getenv('CONFIG_PATH')}"])
|
||||
|
||||
db = providers.Singleton(
|
||||
Database,
|
||||
db_url="postgresql+asyncpg://{}:{}@{}:{}/{}".format(
|
||||
config.db.user,
|
||||
config.db.password,
|
||||
config.db.host,
|
||||
# config.db.port,
|
||||
"5432",
|
||||
config.db.database,
|
||||
),
|
||||
)
|
||||
|
||||
user_repository = providers.Factory(
|
||||
UserRepository,
|
||||
session_factory=db.provided.session,
|
||||
)
|
||||
|
||||
user_service = providers.Factory(
|
||||
UserService,
|
||||
user_repository=user_repository,
|
||||
)
|
|
@ -0,0 +1 @@
|
|||
Generic single-database configuration.
|
Binary file not shown.
|
@ -0,0 +1,77 @@
|
|||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
import api.model.user # type: ignore
|
||||
from api.uow.database import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
|
@ -0,0 +1,26 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
Binary file not shown.
|
@ -0,0 +1,38 @@
|
|||
"""initial
|
||||
|
||||
Revision ID: ec1380cb4f18
|
||||
Revises:
|
||||
Create Date: 2024-03-04 03:11:36.206211
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "ec1380cb4f18"
|
||||
down_revision: str | None = None
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"users",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("email", sa.String(), nullable=True),
|
||||
sa.Column("hashed_password", sa.String(), nullable=True),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("email"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("users")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,20 @@
|
|||
from sqlalchemy import Boolean, Column, Integer, String
|
||||
|
||||
from .database import Base
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
email = Column(String, unique=True)
|
||||
hashed_password = Column(String)
|
||||
is_active = Column(Boolean, default=True)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"<User(id={self.id}, "
|
||||
f'email="{self.email}", '
|
||||
f'hashed_password="{self.hashed_password}", '
|
||||
f"is_active={self.is_active})>"
|
||||
)
|
|
@ -0,0 +1,48 @@
|
|||
from contextlib import AbstractContextManager
|
||||
from typing import Callable
|
||||
|
||||
from model.user import User
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
|
||||
class UserRepository:
|
||||
def __init__(self, session_factory: Callable[..., AbstractContextManager[Session]]) -> None:
|
||||
self.session_factory = session_factory
|
||||
|
||||
def get_all(self):
|
||||
with self.session_factory() as session:
|
||||
return session.query(User).all()
|
||||
|
||||
def get_by_id(self, user_id: int) -> User:
|
||||
with self.session_factory() as session:
|
||||
user = session.query(User).filter(User.id == user_id).first()
|
||||
if not user:
|
||||
raise UserNotFoundError(user_id)
|
||||
return user
|
||||
|
||||
def add(self, email: str, password: str, is_active: bool = True) -> User:
|
||||
with self.session_factory() as session:
|
||||
user = User(email=email, hashed_password=password, is_active=is_active)
|
||||
session.add(user)
|
||||
session.commit()
|
||||
session.refresh(user)
|
||||
return user
|
||||
|
||||
def delete_by_id(self, user_id: int) -> None:
|
||||
with self.session_factory() as session:
|
||||
entity: User = session.query(User).filter(User.id == user_id).first()
|
||||
if not entity:
|
||||
raise UserNotFoundError(user_id)
|
||||
session.delete(entity)
|
||||
session.commit()
|
||||
|
||||
|
||||
class NotFoundError(Exception):
|
||||
entity_name: str
|
||||
|
||||
def __init__(self, entity_id):
|
||||
super().__init__(f"{self.entity_name} not found, id: {entity_id}")
|
||||
|
||||
|
||||
class UserNotFoundError(NotFoundError):
|
||||
entity_name: str = "User"
|
|
@ -0,0 +1,55 @@
|
|||
from dependency_injector.wiring import Provide, inject
|
||||
from fastapi import APIRouter, Depends, Response, status
|
||||
|
||||
from ..di import Container
|
||||
from ..repository.user import NotFoundError
|
||||
from ..service.user import UserService
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/users")
|
||||
@inject
|
||||
def get_list(
|
||||
user_service: UserService = Depends(Provide[Container.user_service]),
|
||||
):
|
||||
return user_service.get_users()
|
||||
|
||||
|
||||
@router.get("/users/{user_id}")
|
||||
@inject
|
||||
def get_by_id(
|
||||
user_id: int,
|
||||
user_service: UserService = Depends(Provide[Container.user_service]),
|
||||
):
|
||||
try:
|
||||
return user_service.get_user_by_id(user_id)
|
||||
except NotFoundError:
|
||||
return Response(status_code=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
|
||||
@router.post("/users", status_code=status.HTTP_201_CREATED)
|
||||
@inject
|
||||
def add(
|
||||
user_service: UserService = Depends(Provide[Container.user_service]),
|
||||
):
|
||||
return user_service.create_user()
|
||||
|
||||
|
||||
@router.delete("/users/{user_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
@inject
|
||||
def remove(
|
||||
user_id: int,
|
||||
user_service: UserService = Depends(Provide[Container.user_service]),
|
||||
):
|
||||
try:
|
||||
user_service.delete_user_by_id(user_id) # type: ignore
|
||||
except NotFoundError:
|
||||
return Response(status_code=status.HTTP_404_NOT_FOUND)
|
||||
else:
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@router.get("/status")
|
||||
def get_status():
|
||||
return {"status": "OK"}
|
|
@ -0,0 +1,22 @@
|
|||
from uuid import uuid4
|
||||
|
||||
from model.user import User
|
||||
from repository.user import UserRepository
|
||||
|
||||
|
||||
class UserService:
|
||||
def __init__(self, user_repository: UserRepository) -> None:
|
||||
self._repository: UserRepository = user_repository
|
||||
|
||||
def get_users(self):
|
||||
return self._repository.get_all()
|
||||
|
||||
def get_user_by_id(self, user_id: int) -> User:
|
||||
return self._repository.get_by_id(user_id)
|
||||
|
||||
async def create_user(self) -> User:
|
||||
uid = uuid4()
|
||||
return await self._repository.add(email=f"{uid}@email.com", password="pwd")
|
||||
|
||||
async def delete_user_by_id(self, user_id: int) -> None:
|
||||
return await self._repository.delete_by_id(user_id)
|
|
@ -0,0 +1,29 @@
|
|||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class Database:
|
||||
def __init__(self, db_url: str) -> None:
|
||||
self._engine = create_async_engine(db_url, echo=True)
|
||||
self._async_session = async_sessionmaker(
|
||||
self._engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
async def __aenter__(self):
|
||||
async with self._async_session() as session:
|
||||
self.session = session
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args):
|
||||
await self.session.rollback()
|
||||
await self.session.close()
|
||||
|
||||
async def commit(self):
|
||||
await self.session.commit()
|
||||
|
||||
async def rollback(self):
|
||||
await self.session.rollback()
|
|
@ -0,0 +1,7 @@
|
|||
db:
|
||||
host: "db"
|
||||
port: "5432"
|
||||
database: "serviceman_db"
|
||||
user: "demo_user"
|
||||
password: "user_pass"
|
||||
echo: true
|
|
@ -0,0 +1,121 @@
|
|||
version: "3.8"
|
||||
services:
|
||||
db:
|
||||
container_name: pgdb
|
||||
|
||||
image: postgres:15.1-alpine
|
||||
|
||||
env_file:
|
||||
- .env
|
||||
|
||||
environment:
|
||||
POSTGRES_DB: ${POSTGRES_DB}
|
||||
POSTGRES_USER: ${POSTGRES_USER}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
|
||||
ports:
|
||||
- 6432:5432
|
||||
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
|
||||
api:
|
||||
container_name: api
|
||||
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./docker/api/Dockerfile
|
||||
|
||||
ports:
|
||||
- 8000:8000
|
||||
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
- ./config/api_config.yml:/usr/src/service_man/config/api_config.yml
|
||||
- ./api:/usr/src/service_man/api
|
||||
- ./alembic.ini:/usr/src/service_man/alembic.ini
|
||||
|
||||
command: /bin/bash -c 'cd /usr/src/service_man && alembic upgrade head && poetry run uvicorn api.app:create_app --host 0.0.0.0 --reload --factory'
|
||||
|
||||
# bot:
|
||||
# container_name: bot
|
||||
#
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: ./docker/bot/Dockerfile
|
||||
#
|
||||
# env_file:
|
||||
# - .env
|
||||
#
|
||||
# ports:
|
||||
# - 8001:8000
|
||||
#
|
||||
# depends_on:
|
||||
# db:
|
||||
# condition: service_healthy
|
||||
#
|
||||
# restart: always
|
||||
#
|
||||
# volumes:
|
||||
# - ./tg_bot:/usr/src/service_man/tg_bot
|
||||
#
|
||||
# command: /bin/bash -c 'cd /usr/src/service_man/tg_bot && poetry run uvicorn app:create_app --host 0.0.0.0 --reload --factory'
|
||||
#
|
||||
|
||||
# celery_worker:
|
||||
# container_name: celeryworker
|
||||
#
|
||||
# build:
|
||||
# context: .
|
||||
#
|
||||
# env_file:
|
||||
# - .env
|
||||
#
|
||||
# depends_on:
|
||||
# - rabbitmq
|
||||
# - db
|
||||
# - app
|
||||
# - redis
|
||||
#
|
||||
# volumes:
|
||||
# - .:/usr/src/fastfood
|
||||
#
|
||||
# command: ["celery", "-A", "bg_tasks.bg_task:celery_app", "worker", "--loglevel=info", "--concurrency", "1", "-P", "solo"]
|
||||
#
|
||||
# celery_beat:
|
||||
# container_name: celerybeat
|
||||
#
|
||||
# build:
|
||||
# context: .
|
||||
#
|
||||
# env_file:
|
||||
# - .env
|
||||
#
|
||||
# depends_on:
|
||||
# - rabbitmq
|
||||
# - db
|
||||
# - app
|
||||
# - redis
|
||||
#
|
||||
# volumes:
|
||||
# - .:/usr/src/fastfood
|
||||
#
|
||||
# command: ["celery", "-A", "bg_tasks.bg_task:celery_app", "beat", "--loglevel=info"]
|
||||
#
|
||||
#
|
||||
# rabbitmq:
|
||||
# container_name: rabbit
|
||||
#
|
||||
# image: "rabbitmq:management"
|
||||
#
|
||||
# ports:
|
||||
# - 5672:5672
|
|
@ -0,0 +1,25 @@
|
|||
FROM python:3.10-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
RUN pip install --upgrade pip
|
||||
|
||||
RUN pip install poetry
|
||||
|
||||
RUN poetry config virtualenvs.create false
|
||||
|
||||
RUN mkdir -p /usr/src/service_man
|
||||
|
||||
WORKDIR /usr/src/service_man
|
||||
|
||||
COPY ./poetry.lock .
|
||||
|
||||
COPY ./pyproject.toml .
|
||||
|
||||
RUN poetry install --only api --no-root
|
||||
|
||||
ENV CONFIG_PATH='/usr/src/service_man/config/api_config.yml'
|
||||
|
||||
RUN touch __init__.py
|
|
@ -0,0 +1,25 @@
|
|||
FROM python:3.10-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
RUN pip install --upgrade pip
|
||||
|
||||
RUN pip install poetry
|
||||
|
||||
RUN poetry config virtualenvs.create false
|
||||
|
||||
RUN mkdir -p /usr/src/service_man
|
||||
|
||||
WORKDIR /usr/src/service_man
|
||||
|
||||
COPY ./poetry.lock .
|
||||
|
||||
COPY ./pyproject.toml .
|
||||
|
||||
COPY ./config .
|
||||
|
||||
RUN poetry install --only bot --no-root
|
||||
|
||||
ENV CONFIG_PATH='./config/bot_config.toml'
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,38 @@
|
|||
[tool.poetry]
|
||||
name = "service-man"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["pi3c <pi3c@yandex.ru>"]
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pre-commit = "^3.6.2"
|
||||
pytest = "^8.0.2"
|
||||
mypy = "^1.8.0"
|
||||
|
||||
|
||||
[tool.poetry.group.api.dependencies]
|
||||
fastapi = "^0.110.0"
|
||||
sqlalchemy = "^2.0.27"
|
||||
asyncpg = "^0.29.0"
|
||||
pydantic-settings = "^2.2.1"
|
||||
uvicorn = "^0.27.1"
|
||||
dependency-injector = "^4.41.0"
|
||||
pyyaml = "^6.0.1"
|
||||
alembic = "^1.13.1"
|
||||
psycopg2-binary = "^2.9.9"
|
||||
|
||||
|
||||
[tool.poetry.group.bot.dependencies]
|
||||
aiogram = "^3.4.1"
|
||||
fastapi = "^0.110.0"
|
||||
uvicorn = "^0.27.1"
|
||||
pydantic-settings = "^2.2.1"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
|
@ -0,0 +1,7 @@
|
|||
from fastapi import FastAPI
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
app = FastAPI()
|
||||
|
||||
return app
|
Loading…
Reference in New Issue