Compare commits

...

46 Commits

Author SHA1 Message Date
8189aaedd4 fix: Поправил TypeHints и убраз неиспользуемые сущности 2024-02-14 15:34:24 +03:00
5ef6aaeb6f маленькие правки 2024-02-13 13:09:09 +03:00
f75415d9d9 Удаление блюда 2024-02-13 12:38:13 +03:00
4c3779776d Readme 2024-02-13 02:44:24 +03:00
d54e704dfb fix: volumes не примонтировал 2024-02-13 00:02:22 +03:00
68594eb7f0 слияние веток 2024-02-12 23:52:21 +03:00
8bfa166987 слияние веток 2024-02-12 23:09:50 +03:00
e0a81cf126 google sheets docker образ 2024-02-12 23:09:01 +03:00
a4f8bce657 google синхронизация 2024-02-12 23:09:01 +03:00
9ba42aae9f upd фоновая задача теперь не дропает базу 2024-02-12 23:09:01 +03:00
afdf1c5e2b fix 2024-02-12 23:09:01 +03:00
74c0ccae2a fix 2024-02-12 23:09:01 +03:00
2c48529a02 fix 2024-02-12 23:09:01 +03:00
cedf27a04d fix 2024-02-12 23:09:01 +03:00
e0798de713 fix 2024-02-12 23:09:01 +03:00
5a133a05e1 fix 2024-02-12 23:09:01 +03:00
3df3c67e7c fix: правка урла кролика 2024-02-12 23:09:01 +03:00
a0ebe9bdb9 upd: Контейнеры для celery & rabbitmq 2024-02-12 23:09:01 +03:00
ed3d7d9352 upd Разнес тесты, уменьшив портянку
upd Тест для summary роута
2024-02-12 23:09:01 +03:00
3dbefda936 upd: Применение скидки в выводе API 2024-02-12 23:09:01 +03:00
5a95b06300 upd: Добавил bg_task xlsx>>DBase 2024-02-12 23:09:01 +03:00
ebe75b6dc3 upd: Добавил роут summary с выводом вмего меню со вложением 2024-02-12 23:09:01 +03:00
22a876d3ce google sheets docker образ 2024-02-12 23:03:28 +03:00
6a0776557d google синхронизация 2024-02-12 22:49:16 +03:00
b2a284d791 upd фоновая задача теперь не дропает базу 2024-02-12 22:22:59 +03:00
5e213e759d fix 2024-02-12 03:03:24 +03:00
f28637f5dd fix 2024-02-12 02:42:46 +03:00
e6d1070d9a fix 2024-02-12 01:42:53 +03:00
47cb0e08c7 fix 2024-02-12 01:29:06 +03:00
e6576e9e58 fix 2024-02-12 01:11:00 +03:00
02134d247a fix 2024-02-12 01:06:45 +03:00
68db31a033 fix: правка урла кролика 2024-02-12 00:54:53 +03:00
fc9577c538 upd: Контейнеры для celery & rabbitmq 2024-02-12 00:39:51 +03:00
550a058b6f upd Разнес тесты, уменьшив портянку
upd Тест для summary роута
2024-02-11 23:17:57 +03:00
ffb5b855c4 upd: Применение скидки в выводе API 2024-02-11 20:10:25 +03:00
d9633dcfbd upd: Добавил bg_task xlsx>>DBase 2024-02-11 03:14:17 +03:00
e4656825cb upd: Добавил роут summary с выводом вмего меню со вложением 2024-02-09 02:57:34 +03:00
3120910552 Fix .env для локального запуска 2024-02-07 12:44:42 +03:00
3b1a1614cf fix: .env for local run 2024-02-07 12:37:43 +03:00
aa7db7cd35 Обновить README.md 2024-02-06 23:16:18 +03:00
27904e0c6a .env
добавил шаблон, чтоб не копировать файл постоянно
2024-02-06 22:46:25 +03:00
ee709a489e flow using openapi.json 2024-02-06 22:41:29 +03:00
f8cca4b861 flow dump openapi.json 2024-02-06 22:07:25 +03:00
7d4c4d9be3 fix: typehint в serv/repos 2024-02-06 15:50:02 +03:00
095ab07ebb fix: typehint в routes 2024-02-06 15:25:19 +03:00
f72c6fe4d7 fix: reverse() получает урл из имени ендпоинта 2024-02-06 15:12:32 +03:00
40 changed files with 3317 additions and 610 deletions

14
.env Normal file
View File

@@ -0,0 +1,14 @@
# PosgreSQL адрес сервера
POSTGRES_HOST=127.0.0.1
POSTGRES_PORT=5432
# Пользователь БД Postgres
POSTGRES_USER=testuser
POSTGRES_PASSWORD=test
# БД рабочая и тестовая
POSTGRES_DB=fastfood_db
POSTGRES_DB_TEST=fastfood_db_test
# Redis
REDIS_HOST=127.0.0.1
REDIS_PORT=6379
REDIS_DB=0

View File

@@ -8,7 +8,11 @@ RUN mkdir -p /usr/src/fastfood
WORKDIR /usr/src/fastfood
COPY . .
COPY ./example.env .
COPY ./poetry.lock .
COPY ./pyproject.toml .
RUN touch /usr/src/RUN_IN_DOCKER

View File

@@ -5,29 +5,50 @@ Fastapi веб приложение реализующее api для общеп
Данный проект, это результат выполнения практических домашних заданий интенсива от YLAB Development. Проект реализован на фреймворке fastapi, с использованием sqlalchemy. В качестве базы данных используется postgresql.
## Техническое задание
### Спринт 3 - Паттерны и принципы разработки
### Спринт 4 - Многопроцессорность, асинхронность
В этом домашнем задании необходимо:
1.Переписать текущее FastAPI приложение на асинхронное выполнение
2.Добавить в проект фоновую задачу с помощью Celery + RabbitMQ.
3.Добавить эндпоинт (GET) для вывода всех меню со всеми связанными подменю и со всеми связанными блюдами.
4.Реализовать инвалидация кэша в background task (встроено в FastAPI)
5.* Обновление меню из google sheets раз в 15 сек.
6.** Блюда по акции. Размер скидки (%) указывается в столбце G файла Menu.xlsx
1.Вынести бизнес логику и запросы в БД в отдельные слои приложения.
Фоновая задача: синхронизация Excel документа и БД.
В проекте создаем папку admin. В эту папку кладем файл Menu.xlsx (будет прикреплен к ДЗ). Не забываем запушить в гит.
При внесении изменений в файл все изменения должны отображаться в БД. Периодичность обновления 15 сек. Удалять БД при каждом обновлении нельзя.
2.Добавить кэширование запросов к API с использованием Redis. Не забыть про инвалидацию кэша.
3.Добавить pre-commit хуки в проект. Файл yaml будет прикреплен к ДЗ.
4.Покрыть проект type hints (тайпхинтами)
5.* Описать ручки API в соответствий c OpenAPI
6.** Реализовать в тестах аналог Django reverse() для FastAPI
Требования:
Код должен проходить все линтеры.
Код должен соответствовать принципам SOLID, DRY, KISS.
●Проект должен запускаться по одной команде (докер).
●Проект должен проходить все Postman тесты (коллекция с Вебинара №1).
●Тесты написанные вами после Вебинара №2, должны быть актуальны, запускать и успешно проходить
Данные меню, подменю, блюд для нового эндпоинта должны доставаться одним ORM-запросом в БД (использовать подзапросы и агрегирующие функций SQL).
Проект должен запускаться одной командой
●Проект должен соответствовать требованиям всех предыдущих вебинаров. (Не забыть добавить тесты для нового API эндпоинта)
### Выполненные доп задания со *
Спринт 2
3.* Реализовать вывод количества подменю и блюд для Меню через один (сложный) ORM запрос.
`./fastfood/repository/menu.py` Метод `get_menu_item`
4.** Реализовать тестовый сценарий «Проверка кол-ва блюд и подменю в меню» из Postman с помощью pytest
`./tests/test_postman.py`
Спринт 3
5.* Описать ручки API в соответствий c OpenAPI
'./openapi.json'
6.** Реализовать в тестах аналог Django reverse() для FastAPI
'./tests/urls.py'
Спринт 4
5.* Обновление меню из google sheets раз в 15 сек.
`./bg_tasks/` Реализовано чтение как локальной, так и удаленной таблицы.
В зависимости какой compose поднять, тот и будет использоваться
6.** Блюда по акции. Размер скидки (%) указывается в столбце G файла Menu.xlsx
`./fastfood/service/dish.py`, метод _get_discont, подменяет сумму в выдаче,
скидка хранится в REDIS под ключами вида DISCONT:{UUID блюда}
Дополнительно:
Контейнеры с проектом и с тестами запускаются разными командами.
## Зависимости
- docker
@@ -41,12 +62,14 @@ Fastapi веб приложение реализующее api для общеп
Перейдите в каталог
> `$ cd fastfood`
> `cp example.env .env`
Запуск/остановка образов:
- Запуск FAstAPI приложения
> `$ docker-compose -f compose_app.yml up `
- Запуск FAstAPI приложения c локальным файлом для фоновой задачи
> `$ docker-compose -f compose_app.yml up`
- Запуск FAstAPI приложения c Google Sheets для фоновой задачи
> `$ docker-compose -f compose_google.yml up`
(ЧИТАЙТЕ СООБЩЕНИЕ В ЧАТЕ)
После успешного запуска образов документация по API будет доступна по адресу <a href="http://localhost:8000/docs">http://localhost:8000</a>

BIN
admin/Menu.xlsx Normal file

Binary file not shown.

0
bg_tasks/__init__.py Normal file
View File

50
bg_tasks/bg_task.py Normal file
View File

@@ -0,0 +1,50 @@
import asyncio
from celery import Celery
from fastfood.config import settings
from .updater import main, main_gsheets
loop = asyncio.get_event_loop()
celery_app = Celery(
'tasks',
broker=settings.REBBITMQ_URL,
backend='rpc://',
include=['bg_tasks.bg_task'],
)
celery_app.conf.beat_schedule = {
'run-task-every-15-seconds': {
'task': 'bg_tasks.bg_task.periodic_task',
'schedule': 30.0,
},
}
celery_app_google = Celery(
'tasks',
broker=settings.REBBITMQ_URL,
backend='rpc://',
include=['bg_tasks.bg_task'],
)
celery_app_google.conf.beat_schedule = {
'run-task-every-15-seconds': {
'task': 'bg_tasks.bg_task.periodic_task_google',
'schedule': 30.0,
},
}
@celery_app_google.task
def periodic_task_google() -> None:
result = loop.run_until_complete(main_gsheets())
return result
@celery_app.task
def periodic_task() -> None:
result = loop.run_until_complete(main())
return result

97
bg_tasks/parser.py Normal file
View File

@@ -0,0 +1,97 @@
import os
from typing import Any
import gspread
import openpyxl
file = os.path.join(os.path.curdir, 'admin', 'Menu.xlsx')
async def gsheets_to_rows() -> list[list[str | int | float]]:
"""Получение всех строк из Google Sheets"""
def to_int(val: str) -> int | str:
try:
res = int(val)
except ValueError:
return val
return res
def to_float(val: str) -> float | str:
val = val.replace(',', '.')
try:
res = float(val)
except ValueError:
return val
return res
gc = gspread.service_account(filename='creds.json')
sh = gc.open('Menu')
data = sh.sheet1.get_all_values()
for row in data:
row[:3] = list(map(to_int, row[:3]))
row[-2:] = list(map(to_float, row[-2:]))
return data
async def local_xlsx_to_rows() -> list[list[str | int | float]]:
"""Получение всех строк из локального файла Menu"""
data = []
wb = openpyxl.load_workbook(file).worksheets[0]
for row in wb.iter_rows(values_only=True):
data.append(list(row))
return data
async def rows_to_dict(
rows: list[list],
) -> tuple[dict[int, Any], dict[Any, Any], dict[Any, Any]]:
"""Парсит строки полученные и источников в словарь"""
menus = {}
submenus = {}
dishes = {}
menu_num = None
submenu_num = None
for row in rows:
if all(row[:3]):
menu = {
row[0]: {
'data': {'title': row[1], 'description': row[2]},
'id': None,
}
}
menu_num = row[0]
menus.update(menu)
elif all(row[1:4]):
submenu = {
(menu_num, row[1]): {
'data': {'title': row[2], 'description': row[3]},
'parent_num': menu_num,
'id': None,
'parent_menu': None,
}
}
submenu_num = row[1]
submenus.update(submenu)
elif all(row[3:6]):
dish = {
(menu_num, submenu_num, row[2]): {
'data': {
'title': row[3],
'description': row[4],
'price': row[5],
},
'parent_num': (menu_num, submenu_num),
'id': None,
'parent_submenu': None,
'discont': row[6],
},
}
dishes.update(dish)
return menus, submenus, dishes

295
bg_tasks/updater.py Normal file
View File

@@ -0,0 +1,295 @@
import os
import pickle
from typing import Any
import redis.asyncio as redis # type: ignore
from sqlalchemy import delete, update
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from fastfood.config import settings
from fastfood.models import Dish, Menu, SubMenu
from .parser import file, gsheets_to_rows, local_xlsx_to_rows, rows_to_dict
redis = redis.Redis.from_url(url=settings.REDIS_URL)
async_engine = create_async_engine(settings.DATABASE_URL_asyncpg)
async_session_maker = async_sessionmaker(
async_engine,
class_=AsyncSession,
expire_on_commit=False,
)
async def clear_cache(pattern: str) -> None:
keys = [key async for key in redis.scan_iter(pattern)]
if keys:
await redis.delete(*keys)
async def is_changed_xls() -> bool:
"""Проверяет, изменен ли файл с последнего запуска таска."""
if not os.path.exists(file):
return False
mod_time = os.path.getmtime(file)
cached_time = await redis.get('XLSX_MOD_TIME')
if cached_time is not None:
cached_time = pickle.loads(cached_time)
if mod_time == cached_time:
return False
return True
async def on_menu_change(
new_menu: dict, old_menu: dict, session: AsyncSession
) -> dict[str, Any]:
"""Изменение, удаление или создание меню"""
if new_menu and not old_menu:
# Создаем меню
menu = Menu(
title=new_menu['data']['title'],
description=new_menu['data']['description'],
)
session.add(menu)
await session.flush()
new_menu['id'] = str(menu.id)
elif new_menu and old_menu:
# Обновляем меню
await session.execute(
update(Menu).where(Menu.id == old_menu['id']).values(**(new_menu['data']))
)
new_menu['id'] = old_menu['id']
else:
# Удаляем меню
await session.execute(delete(Menu).where(Menu.id == old_menu['id']))
await session.commit()
return new_menu
async def menus_updater(menus: dict, session: AsyncSession) -> None:
"""Проверяет пункты меню на изменения
При необходимости запускае обновление БД
через фенкцию on_menu_change
"""
cached_menus = await redis.get('ALL_MENUS')
if cached_menus is not None:
cached_menus = pickle.loads(cached_menus)
else:
cached_menus = {}
for key in menus.keys():
if key not in cached_menus.keys():
# Создание меню
menu = await on_menu_change(menus[key], {}, session)
menus[key] = menu
elif key in cached_menus.keys():
# Обновление меню
if menus[key].get('data') != cached_menus[key].get('data'):
menu = await on_menu_change(menus[key], cached_menus[key], session)
menus[key] = menu
else:
menus[key]['id'] = cached_menus[key]['id']
for key in {k: cached_menus[k] for k in set(cached_menus) - set(menus)}:
# Проверяем на удаленные меню
await on_menu_change({}, cached_menus.pop(key), session)
await redis.set('ALL_MENUS', pickle.dumps(menus))
async def on_submenu_change(
new_sub: dict, old_sub: dict, session: AsyncSession
) -> dict[str, Any]:
if new_sub and not old_sub:
# Создаем подменю
submenu = SubMenu(
title=new_sub['data']['title'],
description=new_sub['data']['description'],
)
submenu.parent_menu = new_sub['parent_menu']
session.add(submenu)
await session.flush()
new_sub['id'] = str(submenu.id)
new_sub['parent_menu'] = str(submenu.parent_menu)
elif new_sub and old_sub:
# Обновляем подменю
await session.execute(
update(SubMenu)
.where(SubMenu.id == old_sub['id'])
.values(**(new_sub['data']))
)
new_sub['id'] = old_sub['id']
new_sub['parent_menu'] = old_sub['parent_menu']
else:
# Удаляем подменю
await session.execute(delete(SubMenu).where(SubMenu.id == old_sub['id']))
await session.commit()
return new_sub
async def submenus_updater(submenus: dict, session: AsyncSession) -> None:
"""Проверяет пункты подменю на изменения
При необходимости запускае обновление БД
"""
# Получаем меню из кэша для получения их ID по померу в таблице
cached_menus = await redis.get('ALL_MENUS')
if cached_menus is not None:
cached_menus = pickle.loads(cached_menus)
else:
cached_menus = {}
# Получаем подмен из кэша
cached_sub = await redis.get('ALL_SUBMENUS')
if cached_sub is not None:
cached_sub = pickle.loads(cached_sub)
else:
cached_sub = {}
for key in submenus.keys():
parent = cached_menus[submenus[key]['parent_num']]['id']
submenus[key]['parent_menu'] = parent
if key not in cached_sub.keys():
# Получаем и ставим UUID parent_menu
submenus[key]['parent_menu'] = parent
submenu = await on_submenu_change(submenus[key], {}, session)
submenus[key] = submenu
elif key in cached_sub.keys():
# Обновление меню
if submenus[key].get('data') != cached_sub[key].get('data'):
submenu = await on_submenu_change(
submenus[key], cached_sub[key], session
)
submenus[key] = submenu
else:
submenus[key]['id'] = cached_sub[key]['id']
submenus[key]['parent_menu'] = cached_sub[key]['parent_menu']
for key in {k: cached_sub[k] for k in set(cached_sub) - set(submenus)}:
# Проверяем на удаленные меню
await on_submenu_change({}, cached_sub.pop(key), session)
await redis.set('ALL_SUBMENUS', pickle.dumps(submenus))
async def on_dish_change(
new_dish: dict, old_dish, session: AsyncSession
) -> dict[str, Any]:
if new_dish and not old_dish:
dish = Dish(
title=new_dish['data']['title'],
description=new_dish['data']['description'],
price=new_dish['data']['price'],
)
dish.parent_submenu = new_dish['parent_submenu']
session.add(dish)
await session.flush()
new_dish['id'] = str(dish.id)
new_dish['parent_submenu'] = str(dish.parent_submenu)
new_dish['data']['price'] = str(dish.price)
elif new_dish and old_dish:
# Обновляем блюдо
await session.execute(
update(Dish).where(Dish.id == old_dish['id']).values(**(new_dish['data']))
)
new_dish['id'] = old_dish['id']
new_dish['parent_submenu'] = old_dish['parent_submenu']
new_dish['data']['price'] = old_dish['data']['price']
else:
# Удаляем блюдо
await session.execute(delete(Dish).where(Dish.id == old_dish['id']))
await session.commit()
return new_dish
async def dishes_updater(dishes: dict, session: AsyncSession) -> None:
"""Проверяет блюда на изменения
При необходимости запускае обновление БД
"""
cached_submenus = await redis.get('ALL_SUBMENUS')
if cached_submenus is not None:
cached_submenus = pickle.loads(cached_submenus)
else:
cached_submenus = {}
# Получаем блюда из кэша
cached_dishes = await redis.get('ALL_DISHES')
if cached_dishes is not None:
cached_dishes = pickle.loads(cached_dishes)
else:
cached_dishes = {}
await clear_cache('DISCONT*')
for key in {k: cached_dishes[k] for k in set(cached_dishes) - set(dishes)}:
# Проверяем на удаленные блюда и обновляемся
await on_dish_change({}, cached_dishes.pop(key), session)
for key in dishes.keys():
parent = cached_submenus[dishes[key]['parent_num']]['id']
dishes[key]['parent_submenu'] = parent
if key not in cached_dishes.keys():
# Получаем и ставим UUID parent_submenu
dishes[key]['parent_submenu'] = parent
dish = await on_dish_change(dishes[key], {}, session)
dishes[key] = dish
elif key in cached_dishes.keys():
# Обновление блюда
if dishes[key].get('data') != cached_dishes[key].get('data'):
dish = await on_dish_change(dishes[key], cached_dishes[key], session)
dishes[key] = dish
else:
dishes[key]['id'] = cached_dishes[key]['id']
dishes[key]['parent_submenu'] = cached_dishes[key]['parent_submenu']
if dishes[key]['discont'] is not None:
await redis.set(
f"DISCONT:{dishes[key]['id']}", pickle.dumps(dishes[key]['discont'])
)
await redis.set('ALL_DISHES', pickle.dumps(dishes))
async def updater(rows) -> None:
menus, submenus, dishes = await rows_to_dict(rows)
async with async_session_maker() as session:
await menus_updater(menus, session)
await submenus_updater(submenus, session)
await dishes_updater(dishes, session)
# Чистим кэш
await clear_cache('MENUS*')
await clear_cache('summary')
async def main() -> None:
"""Главная функция фоновой задачи"""
changed = await is_changed_xls()
if changed:
rows = await local_xlsx_to_rows()
await updater(rows)
async def main_gsheets() -> None:
"""Главная функция фоновой задачи для работы с Google"""
rows = await gsheets_to_rows()
await updater(rows)

View File

@@ -57,4 +57,56 @@ services:
restart: always
command: /bin/bash -c 'poetry run python /usr/src/fastfood/manage.py --run-test-server'
volumes:
- .:/usr/src/fastfood
command: /bin/bash -c 'poetry run python /usr/src/fastfood/manage.py --run-docker-server'
celery_worker:
container_name: celeryworker
build:
context: .
env_file:
- .env
depends_on:
- rabbitmq
- db
- app
- redis
volumes:
- .:/usr/src/fastfood
command: ["celery", "-A", "bg_tasks.bg_task:celery_app", "worker", "--loglevel=info", "--concurrency", "1", "-P", "solo"]
celery_beat:
container_name: celerybeat
build:
context: .
env_file:
- .env
depends_on:
- rabbitmq
- db
- app
- redis
volumes:
- .:/usr/src/fastfood
command: ["celery", "-A", "bg_tasks.bg_task:celery_app", "beat", "--loglevel=info"]
rabbitmq:
container_name: rabbit
image: "rabbitmq:management"
ports:
- 5672:5672

112
compose_google.yml Normal file
View File

@@ -0,0 +1,112 @@
version: "3.8"
services:
redis:
container_name: redis_test
image: redis:7.2.4-alpine3.19
ports:
- '6380:6379'
healthcheck:
test: [ "CMD", "redis-cli","ping" ]
interval: 10s
timeout: 5s
retries: 5
db:
container_name: pgdb
image: postgres:15.1-alpine
env_file:
- .env
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
ports:
- 6432:5432
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
interval: 10s
timeout: 5s
retries: 5
app:
container_name: fastfood_app
build:
context: .
env_file:
- .env
ports:
- 8000:8000
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
restart: always
volumes:
- .:/usr/src/fastfood
command: /bin/bash -c 'poetry run python /usr/src/fastfood/manage.py --run-docker-server'
celery_worker:
container_name: celeryworker
build:
context: .
env_file:
- .env
depends_on:
- rabbitmq
- db
- app
- redis
volumes:
- .:/usr/src/fastfood
command: ["celery", "-A", "bg_tasks.bg_task:celery_app_google", "worker", "--loglevel=info", "--concurrency", "1", "-P", "solo"]
celery_beat:
container_name: celerybeat
build:
context: .
env_file:
- .env
depends_on:
- rabbitmq
- db
- app
- redis
volumes:
- .:/usr/src/fastfood
command: ["celery", "-A", "bg_tasks.bg_task:celery_app_google", "beat", "--loglevel=info"]
rabbitmq:
container_name: rabbit
image: "rabbitmq:management"
ports:
- 5672:5672

View File

@@ -55,6 +55,7 @@ services:
redis:
condition: service_healthy
restart: always
volumes:
- .:/usr/src/fastfood
command: /bin/bash -c 'poetry run pytest -vv'

View File

@@ -1,7 +0,0 @@
DB_HOST=db
DB_PORT=5432
POSTGRES_USER=testuser
POSTGRES_PASSWORD=test
POSTGRES_DB=fastfood_db
POSTGRES_DB_TEST=testdb
REDIS_DB=redis://localhost

View File

@@ -5,44 +5,23 @@ from fastapi import FastAPI
from fastfood.routers.dish import router as dish_router
from fastfood.routers.menu import router as menu_router
from fastfood.routers.submenu import router as submenu_router
tags_metadata = [
{
'name': 'menu',
'description': 'Операции с меню.',
},
{
'name': 'submenu',
'description': 'Подменю и работа с ним',
},
{'name': 'dish', 'description': 'Блюда и работа с ними'},
]
from fastfood.routers.summary import router as summary_router
def create_app(redis=None) -> FastAPI:
def create_app() -> FastAPI:
"""
Фабрика FastAPI.
"""
with open('openapi.json') as f:
js = json.load(f)
app = FastAPI(
title=js['info']['title'],
description=js['info']['description'],
version=js['info']['version'],
contact={
'name': 'Sergey Vanyushkin',
'url': 'http://pi3c.ru',
'email': 'pi3c@yandex.ru',
},
license_info={
'name': 'MIT license',
'url': 'https://mit-license.org/',
},
openapi_tags=tags_metadata,
)
app = FastAPI()
app.include_router(menu_router)
app.include_router(submenu_router)
app.include_router(dish_router)
app.include_router(summary_router)
def custom_openapi():
with open('openapi.json') as openapi:
return json.load(openapi)
app.openapi = custom_openapi
return app

View File

@@ -4,13 +4,17 @@ from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
DB_HOST: str = ''
DB_PORT: int = 5432
# Конфиг PostgreSql
POSTGRES_HOST: str = ''
POSTGRES_PORT: int = 5432
POSTGRES_DB: str = ''
POSTGRES_PASSWORD: str = ''
POSTGRES_USER: str = ''
POSTGRES_DB_TEST: str = ''
REDIS_DB: str = ''
# Конфиг Redis
REDIS_HOST: str = ''
REDIS_PORT: int = 6379
REDIS_DB: int = 0
@property
def DATABASE_URL_asyncpg(self) -> str:
@@ -18,19 +22,18 @@ class Settings(BaseSettings):
Возвращает строку подключения к БД необходимую для SQLAlchemy
"""
# Проверяем, в DOCKER или нет
file_path = '/usr/src/RUN_IN_DOCKER'
if os.path.exists(file_path):
return (
'postgresql+asyncpg://'
f'{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}'
f'@db:{self.DB_PORT}/{self.POSTGRES_DB}'
f'@db:5432/{self.POSTGRES_DB}'
)
return (
'postgresql+asyncpg://'
f'{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}'
f'@{self.DB_HOST}:{self.DB_PORT}/{self.POSTGRES_DB}'
f'@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}'
)
@property
@@ -43,22 +46,36 @@ class Settings(BaseSettings):
return (
'postgresql+asyncpg://'
f'{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}'
f'@db:{self.DB_PORT}/{self.POSTGRES_DB_TEST}'
f'@db:5432/{self.POSTGRES_DB_TEST}'
)
return (
'postgresql+asyncpg://'
f'{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}'
f'@{self.DB_HOST}:{self.DB_PORT}/{self.POSTGRES_DB_TEST}'
f'@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB_TEST}'
)
@property
def REDIS_URL(self):
"""
Возвращает строку подключения к REDIS
"""
file_path = '/usr/src/RUN_IN_DOCKER'
if os.path.exists(file_path):
return 'redis://redis:6379/0'
return self.REDIS_DB
return f'redis://{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_DB}'
@property
def REBBITMQ_URL(self):
"""
Возвращает строку подключения к REBBITMQ
"""
file_path = '/usr/src/RUN_IN_DOCKER'
if os.path.exists(file_path):
return 'amqp://guest:guest@rabbitmq'
return 'amqp://guest:guest@127.0.0.1'
model_config = SettingsConfigDict(env_file='.env')

View File

@@ -10,10 +10,10 @@ from fastfood.schemas import Dish_db
class DishRepository:
def __init__(self, session: AsyncSession = Depends(get_async_session)):
def __init__(self, session: AsyncSession = Depends(get_async_session)) -> None:
self.db = session
async def get_dishes(self, menu_id: UUID, submenu_id: UUID) -> list[Dish]:
async def get_dishes(self, submenu_id: UUID) -> list[Dish]:
query = select(Dish).where(
Dish.parent_submenu == submenu_id,
)
@@ -22,7 +22,6 @@ class DishRepository:
async def create_dish_item(
self,
menu_id: UUID,
submenu_id: UUID,
dish_data: Dish_db,
) -> Dish:
@@ -35,8 +34,6 @@ class DishRepository:
async def get_dish_item(
self,
menu_id: UUID,
submenu_id: UUID,
dish_id: UUID,
) -> Dish | None:
query = select(Dish).where(Dish.id == dish_id)
@@ -45,25 +42,20 @@ class DishRepository:
async def update_dish_item(
self,
menu_id: UUID,
submenu_id: UUID,
dish_id: UUID,
dish_data: Dish_db,
) -> Dish:
) -> Dish | None:
query = update(Dish).where(Dish.id == dish_id).values(**dish_data.model_dump())
await self.db.execute(query)
await self.db.commit()
qr = select(Dish).where(Dish.id == dish_id)
updated_submenu = await self.db.execute(qr)
return updated_submenu.scalars().one()
return updated_submenu.scalar_one_or_none()
async def delete_dish_item(
self,
menu_id: UUID,
submenu_id: UUID,
dish_id: UUID,
) -> int:
) -> None:
query = delete(Dish).where(Dish.id == dish_id)
await self.db.execute(query)
await self.db.commit()
return 200

View File

@@ -11,7 +11,7 @@ from fastfood.models import Dish, Menu, SubMenu
class MenuRepository:
def __init__(self, session: AsyncSession = Depends(get_async_session)):
def __init__(self, session: AsyncSession = Depends(get_async_session)) -> None:
self.db = session
async def get_menus(self) -> list[Menu]:
@@ -44,23 +44,21 @@ class MenuRepository:
)
menu = await self.db.execute(query)
menu = menu.scalars().one_or_none()
if menu is None:
return None
return menu
async def update_menu_item(
self,
menu_id: UUID,
menu: schemas.MenuBase,
) -> Menu:
) -> Menu | None:
query = update(Menu).where(Menu.id == menu_id).values(**menu.model_dump())
await self.db.execute(query)
await self.db.commit()
qr = select(Menu).where(Menu.id == menu_id)
updated_menu = await self.db.execute(qr)
return updated_menu.scalar_one()
return updated_menu.scalar_one_or_none()
async def delete_menu_item(self, menu_id: UUID):
async def delete_menu_item(self, menu_id: UUID) -> None:
query = delete(Menu).where(Menu.id == menu_id)
await self.db.execute(query)
await self.db.commit()

View File

@@ -12,17 +12,17 @@ def get_key(level: str, **kwargs) -> str:
case 'menus':
return 'MENUS'
case 'menu':
return f"{kwargs.get('menu_id')}"
return f"MENUS:{kwargs.get('menu_id')}"
case 'submenus':
return f"{kwargs.get('menu_id')}:SUBMENUS"
return f"MENUS:{kwargs.get('menu_id')}:SUBMENUS"
case 'submenu':
return f"{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}"
return f"MENUS:{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}"
case 'dishes':
return f"{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}:DISHES"
return f"MENUS:{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}:DISHES"
case 'dish':
return f"{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}:{kwargs.get('dish_id')}"
return f"MENUS:{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}:{kwargs.get('dish_id')}"
return 'abracadabra'
return 'summary'
class RedisRepository:
@@ -63,3 +63,4 @@ class RedisRepository:
async def invalidate(self, key: str, bg_task: BackgroundTasks) -> None:
await self.clear_cache(f'{key}*', bg_task)
await self.clear_cache(f'{get_key("menus")}*', bg_task)
await self.clear_cache('summary', bg_task)

View File

@@ -11,7 +11,7 @@ from fastfood.schemas import MenuBase
class SubMenuRepository:
def __init__(self, session: AsyncSession = Depends(get_async_session)):
def __init__(self, session: AsyncSession = Depends(get_async_session)) -> None:
self.db = session
async def get_submenus(self, menu_id: UUID) -> list[SubMenu]:
@@ -32,14 +32,13 @@ class SubMenuRepository:
await self.db.commit()
await self.db.refresh(new_submenu)
full_sub = await self.get_submenu_item(menu_id, new_submenu.id)
full_sub = await self.get_submenu_item(new_submenu.id)
if full_sub is None:
raise TypeError
return full_sub
async def get_submenu_item(
self,
menu_id: UUID,
submenu_id: UUID,
) -> SubMenu | None:
s = aliased(SubMenu)
@@ -52,16 +51,13 @@ class SubMenuRepository:
)
submenu = await self.db.execute(query)
submenu = submenu.scalars().one_or_none()
if submenu is None:
return None
return submenu
async def update_submenu_item(
self,
menu_id: UUID,
submenu_id: UUID,
submenu_data: MenuBase,
) -> SubMenu:
) -> SubMenu | None:
query = (
update(SubMenu)
.where(SubMenu.id == submenu_id)
@@ -71,12 +67,11 @@ class SubMenuRepository:
await self.db.commit()
qr = select(SubMenu).where(SubMenu.id == submenu_id)
updated_submenu = await self.db.execute(qr)
return updated_submenu.scalar_one()
return updated_submenu.scalar_one_or_none()
async def delete_submenu_item(self, menu_id: UUID, submenu_id: UUID) -> int:
async def delete_submenu_item(self, submenu_id: UUID) -> None:
query = delete(SubMenu).where(
SubMenu.id == submenu_id,
)
await self.db.execute(query)
await self.db.commit()
return 200

View File

@@ -0,0 +1,21 @@
from typing import Any
from fastapi import Depends
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from fastfood.dbase import get_async_session
from fastfood.models import Menu, SubMenu
class SummaryRepository:
def __init__(self, session: AsyncSession = Depends(get_async_session)) -> None:
self.db = session
async def get_data(self) -> list[Any]:
query = select(Menu).options(
selectinload(Menu.submenus).selectinload(SubMenu.dishes)
)
data = await self.db.execute(query)
return [x for x in data.scalars().all()]

View File

@@ -1,6 +1,6 @@
from uuid import UUID
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException
from fastapi import APIRouter, Depends, HTTPException
from fastfood.schemas import Dish, DishBase
from fastfood.service.dish import DishService
@@ -11,25 +11,30 @@ router = APIRouter(
)
@router.get('/', response_model=list[Dish])
@router.get(
'/',
response_model=list[Dish],
)
async def get_dishes(
menu_id: UUID,
submenu_id: UUID,
dish: DishService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
) -> list[Dish]:
result = await dish.read_dishes(menu_id, submenu_id)
return result
@router.post('/', status_code=201, response_model=Dish)
@router.post(
'/',
status_code=201,
response_model=Dish,
)
async def create_dish(
menu_id: UUID,
submenu_id: UUID,
dish_data: DishBase,
dish: DishService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> Dish:
return await dish.create_dish(
menu_id,
submenu_id,
@@ -37,48 +42,61 @@ async def create_dish(
)
@router.get('/{dish_id}', response_model=Dish)
@router.get(
'/{dish_id}',
response_model=Dish,
)
async def get_dish(
menu_id: UUID,
submenu_id: UUID,
dish_id: UUID,
dish: DishService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> Dish | None:
result = await dish.read_dish(
menu_id,
submenu_id,
dish_id,
)
if not result:
raise HTTPException(status_code=404, detail='dish not found')
raise HTTPException(
status_code=404,
detail='dish not found',
)
return result
@router.patch('/{dish_id}', response_model=Dish)
@router.patch(
'/{dish_id}',
response_model=Dish,
)
async def update_dish(
menu_id: UUID,
submenu_id: UUID,
dish_id: UUID,
dish_data: DishBase,
dish: DishService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> Dish:
result = await dish.update_dish(
menu_id,
submenu_id,
dish_id,
dish_data,
)
if not result:
raise HTTPException(
status_code=404,
detail='dish not found',
)
return result
@router.delete('/{dish_id}')
@router.delete(
'/{dish_id}',
)
async def delete_dish(
menu_id: UUID,
submenu_id: UUID,
dish_id: UUID,
dish: DishService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
await dish.del_dish(menu_id, submenu_id, dish_id)
) -> None:
await dish.del_dish(menu_id, dish_id)

View File

@@ -1,6 +1,6 @@
from uuid import UUID
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException
from fastapi import APIRouter, Depends, HTTPException
from fastfood.schemas import MenuBase, MenuRead
from fastfood.service.menu import MenuService
@@ -11,54 +11,75 @@ router = APIRouter(
)
@router.get('/', response_model=list[MenuRead])
@router.get(
'/',
status_code=200,
response_model=list[MenuRead],
)
async def get_menus(
menu: MenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> list[MenuRead]:
return await menu.read_menus()
@router.post('/', status_code=201, response_model=MenuRead)
@router.post(
'/',
status_code=201,
response_model=MenuRead,
)
async def add_menu(
menu: MenuBase,
responce: MenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> MenuRead:
return await responce.create_menu(menu)
@router.get('/{menu_id}', response_model=MenuRead)
@router.get(
'/{menu_id}',
response_model=MenuRead,
)
async def get_menu(
menu_id: UUID,
responce: MenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> MenuRead:
result = await responce.read_menu(menu_id=menu_id)
if not result:
raise HTTPException(status_code=404, detail='menu not found')
raise HTTPException(
status_code=404,
detail='menu not found',
)
return result
@router.patch('/{menu_id}', response_model=MenuRead)
@router.patch(
'/{menu_id}',
response_model=MenuRead,
)
async def update_menu(
menu_id: UUID,
menu: MenuBase,
responce: MenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> MenuRead:
result = await responce.update_menu(
menu_id=menu_id,
menu_data=menu,
)
if not result:
raise HTTPException(
status_code=404,
detail='menu not found',
)
return result
@router.delete('/{menu_id}')
@router.delete(
'/{menu_id}',
status_code=200,
)
async def delete_menu(
menu_id: UUID,
menu: MenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> None:
await menu.del_menu(menu_id)

View File

@@ -1,6 +1,6 @@
from uuid import UUID
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException
from fastapi import APIRouter, Depends, HTTPException
from fastfood.schemas import MenuBase, SubMenuRead
from fastfood.service.submenu import SubmenuService
@@ -11,23 +11,28 @@ router = APIRouter(
)
@router.get('/', response_model=list[SubMenuRead])
@router.get(
'/',
response_model=list[SubMenuRead],
)
async def get_submenus(
menu_id: UUID,
submenu: SubmenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> list[SubMenuRead]:
result = await submenu.read_submenus(menu_id=menu_id)
return result
@router.post('/', status_code=201, response_model=SubMenuRead)
@router.post(
'/',
status_code=201,
response_model=SubMenuRead,
)
async def create_submenu_item(
menu_id: UUID,
submenu_data: MenuBase,
submenu: SubmenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> SubMenuRead:
result = await submenu.create_submenu(
menu_id=menu_id,
submenu_data=submenu_data,
@@ -35,19 +40,24 @@ async def create_submenu_item(
return result
@router.get('/{submenu_id}', response_model=SubMenuRead)
@router.get(
'/{submenu_id}',
response_model=SubMenuRead,
)
async def get_submenu(
menu_id: UUID,
submenu_id: UUID,
submenu: SubmenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> SubMenuRead:
result = await submenu.read_menu(
menu_id=menu_id,
submenu_id=submenu_id,
)
if not result:
raise HTTPException(status_code=404, detail='submenu not found')
raise HTTPException(
status_code=404,
detail='submenu not found',
)
return result
@@ -60,21 +70,27 @@ async def update_submenu(
submenu_id: UUID,
submenu_data: MenuBase,
submenu: SubmenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> SubMenuRead:
result = await submenu.update_submenu(
menu_id=menu_id,
submenu_id=submenu_id,
submenu_data=submenu_data,
)
if not result:
raise HTTPException(
status_code=404,
detail='submenu not found',
)
return result
@router.delete('/{submenu_id}')
@router.delete(
'/{submenu_id}',
)
async def delete_submenu(
menu_id: UUID,
submenu_id: UUID,
submenu: SubmenuService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
):
) -> None:
await submenu.del_menu(menu_id=menu_id, submenu_id=submenu_id)

View File

@@ -0,0 +1,16 @@
from fastapi import APIRouter, Depends
from fastfood.schemas import MenuSummary
from fastfood.service.summary import SummaryService
router = APIRouter(
prefix='/api/v1/summary',
tags=['summary'],
)
@router.get('/', response_model=list[MenuSummary])
async def get_summary(
sum: SummaryService = Depends(),
) -> list[MenuSummary]:
return await sum.read_data()

View File

@@ -34,3 +34,11 @@ class Dish(DishBase, Menu):
class Dish_db(MenuBase):
price: float
class SubMenuSummary(Menu):
dishes: list[Dish_db]
class MenuSummary(Menu):
submenus: list[SubMenuSummary]

View File

@@ -3,6 +3,7 @@ from uuid import UUID
import redis.asyncio as redis # type: ignore
from fastapi import BackgroundTasks, Depends
from fastfood import models
from fastfood.dbase import get_async_redis_client
from fastfood.repository.dish import DishRepository
from fastfood.repository.redis import RedisRepository, get_key
@@ -21,6 +22,19 @@ class DishService:
self.bg_tasks = background_tasks
self.key = get_key
async def _get_discont(self, dish) -> dict:
discont = await self.cache.get(f"DISCONT:{str(dish.get('id'))}")
if discont is not None:
discont = float(discont)
dish['price'] = round(dish['price'] - (dish['price'] * discont / 100), 2)
return dish
async def _convert_dish_to_dict(self, row: models.Dish) -> Dish:
dish = row.__dict__
dish = await self._get_discont(dish)
dish['price'] = str(dish['price'])
return Dish(**dish)
async def read_dishes(self, menu_id: UUID, submenu_id: UUID) -> list[Dish]:
cached_dishes = await self.cache.get(
self.key('dishes', menu_id=str(menu_id), submenu_id=str(submenu_id))
@@ -28,12 +42,12 @@ class DishService:
if cached_dishes is not None:
return cached_dishes
data = await self.dish_repo.get_dishes(menu_id, submenu_id)
data = await self.dish_repo.get_dishes(submenu_id)
response = []
for row in data:
dish = row.__dict__
dish['price'] = str(dish['price'])
response.append(Dish(**dish))
dish = await self._convert_dish_to_dict(row)
response.append(dish)
await self.cache.set(
self.key(
'dishes',
@@ -53,13 +67,10 @@ class DishService:
) -> Dish:
dish_db = Dish_db(**dish_data.model_dump())
data = await self.dish_repo.create_dish_item(
menu_id,
submenu_id,
dish_db,
)
dish = data.__dict__
dish['price'] = str(dish['price'])
dish = Dish(**dish)
dish = await self._convert_dish_to_dict(data)
await self.cache.set(
self.key('dish', menu_id=str(menu_id), submenu_id=str(submenu_id)),
dish,
@@ -83,12 +94,11 @@ class DishService:
if cached_dish is not None:
return cached_dish
data = await self.dish_repo.get_dish_item(menu_id, submenu_id, dish_id)
data = await self.dish_repo.get_dish_item(dish_id)
if data is None:
return None
dish = data.__dict__
dish['price'] = str(dish['price'])
dish = Dish(**dish)
dish = await self._convert_dish_to_dict(data)
await self.cache.set(
self.key(
'dish',
@@ -103,14 +113,15 @@ class DishService:
async def update_dish(
self, menu_id: UUID, submenu_id: UUID, dish_id, dish_data: DishBase
) -> Dish:
) -> Dish | None:
dish_db = Dish_db(**dish_data.model_dump())
data = await self.dish_repo.update_dish_item(
menu_id, submenu_id, dish_id, dish_db
)
dish = data.__dict__
dish['price'] = str(dish['price'])
dish = Dish(**dish)
data = await self.dish_repo.update_dish_item(dish_id, dish_db)
if data is None:
return None
dish = await self._convert_dish_to_dict(data)
await self.cache.set(
self.key(
'dish',
@@ -125,13 +136,9 @@ class DishService:
return dish
async def del_dish(self, menu_id: UUID, submenu_id: UUID, dish_id: UUID) -> int:
response = await self.dish_repo.delete_dish_item(
menu_id,
submenu_id,
async def del_dish(self, menu_id: UUID, dish_id: UUID) -> None:
await self.dish_repo.delete_dish_item(
dish_id,
)
await self.cache.delete(key=str(menu_id), bg_task=self.bg_tasks)
await self.cache.invalidate(key=str(menu_id), bg_task=self.bg_tasks)
return response

View File

@@ -87,8 +87,10 @@ class MenuService:
)
return menu
async def update_menu(self, menu_id: UUID, menu_data) -> MenuRead:
async def update_menu(self, menu_id: UUID, menu_data) -> MenuRead | None:
data = await self.menu_repo.update_menu_item(menu_id, menu_data)
if data is None:
return None
menu = data.__dict__
menu = {k: v for k, v in menu.items() if not k.startswith('_')}
dishes_conter = 0
@@ -104,8 +106,7 @@ class MenuService:
await self.cache.invalidate(key=str(menu_id), bg_task=self.bg_tasks)
return menu
async def del_menu(self, menu_id: UUID):
data = await self.menu_repo.delete_menu_item(menu_id)
async def del_menu(self, menu_id: UUID) -> None:
await self.menu_repo.delete_menu_item(menu_id)
await self.cache.delete(key=str(menu_id), bg_task=self.bg_tasks)
await self.cache.invalidate(key=str(menu_id), bg_task=self.bg_tasks)
return data

View File

@@ -33,7 +33,7 @@ class SubmenuService:
submenus = []
for r in data:
submenu = r.__dict__
subq = await self.submenu_repo.get_submenu_item(menu_id, r.id)
subq = await self.submenu_repo.get_submenu_item(r.id)
if subq is not None:
submenu['dishes_count'] = len(subq.dishes)
submenu = SubMenuRead(**submenu)
@@ -73,7 +73,7 @@ class SubmenuService:
if cached_submenu is not None:
return cached_submenu
data = await self.submenu_repo.get_submenu_item(menu_id, submenu_id)
data = await self.submenu_repo.get_submenu_item(submenu_id)
if data is None:
return None
submenu = data.__dict__
@@ -89,14 +89,16 @@ class SubmenuService:
async def update_submenu(
self, menu_id: UUID, submenu_id: UUID, submenu_data: MenuBase
) -> SubMenuRead:
data = await self.submenu_repo.update_submenu_item(
menu_id, submenu_id, submenu_data
)
) -> SubMenuRead | None:
data = await self.submenu_repo.update_submenu_item(submenu_id, submenu_data)
if data is None:
return None
submenu = data.__dict__
submenu = {k: v for k, v in submenu.items() if not k.startswith('_')}
submenu['dishes_count'] = len(submenu.pop('dishes'))
submenu = SubMenuRead(**submenu)
await self.cache.set(
self.key('submenu', menu_id=str(menu_id), submenu_id=str(submenu_id)),
submenu,
@@ -106,8 +108,8 @@ class SubmenuService:
return submenu
async def del_menu(self, menu_id: UUID, submenu_id: UUID) -> int:
code = await self.submenu_repo.delete_submenu_item(menu_id, submenu_id)
async def del_menu(self, menu_id: UUID, submenu_id: UUID) -> None:
await self.submenu_repo.delete_submenu_item(submenu_id)
await self.cache.delete(
key=self.key(
'submenu',
@@ -117,4 +119,3 @@ class SubmenuService:
bg_task=self.bg_tasks,
)
await self.cache.invalidate(key=str(menu_id), bg_task=self.bg_tasks)
return code

View File

@@ -0,0 +1,81 @@
import redis.asyncio as redis # type: ignore
from fastapi import BackgroundTasks, Depends
from fastfood.dbase import get_async_redis_client
from fastfood.repository.redis import RedisRepository, get_key
from fastfood.repository.summary import SummaryRepository
from fastfood.schemas import DishBase, MenuSummary, SubMenuSummary
class SummaryService:
def __init__(
self,
sum_repo: SummaryRepository = Depends(),
redis_client: redis.Redis = Depends(get_async_redis_client),
background_tasks: BackgroundTasks = None,
) -> None:
self.sum_repo = sum_repo
self.cache = RedisRepository(redis_client)
self.key = get_key
self.bg_tasks = background_tasks
async def read_data(self) -> list[MenuSummary]:
result = []
async def dump_to_schema(
schema, obj
) -> MenuSummary | SubMenuSummary | DishBase:
"""Функция преобразует объект SQLAlchemy к Pydantic модели
Входящие параметры
schema: Pydantic модель
obj: ORM объект
Возвращаемые данные
schema: MenuSummary | SubMenuSummary | DishBase
"""
obj = obj.__dict__
obj = {k: v for k, v in obj.items() if not k.startswith('_')}
if 'price' in obj.keys():
discont = await self.cache.get(f"DISCONT:{str(obj.get('id'))}")
if discont is not None:
try:
discont = float(discont)
except Exception:
discont = 0.0
obj['price'] = round(
obj['price'] - (obj['price'] * discont / 100), 2
)
obj['price'] = str(obj['price'])
return schema(**obj)
cached_data = await self.cache.get(self.key('summary'))
if cached_data is not None:
return cached_data
data = await self.sum_repo.get_data()
for menu in data:
menus_res = await dump_to_schema(MenuSummary, menu)
menus_res.submenus = []
for sub in menu.submenus:
sub_res = await dump_to_schema(SubMenuSummary, sub)
sub_res.dishes = []
for dish in sub.dishes:
dish_res = await dump_to_schema(DishBase, dish)
sub_res.dishes.append(dish_res)
menus_res.submenus.append(sub_res)
result.append(menus_res)
await self.cache.set(self.key('summary'), data, self.bg_tasks)
return result

View File

@@ -1,12 +1,28 @@
import asyncio
import multiprocessing
import sys
from subprocess import Popen
import uvicorn
from fastfood.repository import create_db_and_tables
loop = asyncio.get_event_loop()
def run_app():
def start_celery_worker() -> None:
Popen(['celery', '-A', 'bg_tasks.bg_task.celery_app', 'worker', '--loglevel=info'])
def start_celery_beat() -> None:
Popen(['celery', '-A', 'bg_tasks.bg_task.celery_app', 'beat', '--loglevel=info'])
celery_worker_process = multiprocessing.Process(target=start_celery_worker)
celery_beat_process = multiprocessing.Process(target=start_celery_beat)
async def run_app() -> None:
"""
Запуск FastAPI
"""
@@ -20,15 +36,24 @@ def run_app():
)
async def recreate():
async def recreate() -> None:
"""Удаление и создание таблиц в базе данных для тестирования"""
await create_db_and_tables()
if __name__ == '__main__':
if '--run-server' in sys.argv:
run_app()
if '--run-docker-server' in sys.argv:
"""Запуск FastAPI в докере. Celery запускается в отдельном контейнере"""
loop.run_until_complete(recreate())
loop.run_until_complete(run_app())
if '--run-test-server' in sys.argv:
asyncio.run(recreate())
run_app()
if '--run-local-server' in sys.argv:
"""Локальный запуск FastAPI с запуском Celery в отдельных процессах"""
celery_worker_process.start()
celery_beat_process.start()
loop.run_until_complete(recreate())
loop.run_until_complete(run_app())
celery_beat_process.kill()
celery_worker_process.kill()

File diff suppressed because one or more lines are too long

554
poetry.lock generated
View File

@@ -1,5 +1,19 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "amqp"
version = "5.2.0"
description = "Low-level AMQP client for Python (fork of amqplib)."
optional = false
python-versions = ">=3.6"
files = [
{file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"},
{file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"},
]
[package.dependencies]
vine = ">=5.0.0,<6.0.0"
[[package]]
name = "annotated-types"
version = "0.6.0"
@@ -101,6 +115,83 @@ async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.12.0\""}
docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"]
[[package]]
name = "billiard"
version = "4.2.0"
description = "Python multiprocessing fork with improvements and bugfixes"
optional = false
python-versions = ">=3.7"
files = [
{file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"},
{file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"},
]
[[package]]
name = "cachetools"
version = "5.3.2"
description = "Extensible memoizing collections and decorators"
optional = false
python-versions = ">=3.7"
files = [
{file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"},
{file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"},
]
[[package]]
name = "celery"
version = "5.3.6"
description = "Distributed Task Queue."
optional = false
python-versions = ">=3.8"
files = [
{file = "celery-5.3.6-py3-none-any.whl", hash = "sha256:9da4ea0118d232ce97dff5ed4974587fb1c0ff5c10042eb15278487cdd27d1af"},
{file = "celery-5.3.6.tar.gz", hash = "sha256:870cc71d737c0200c397290d730344cc991d13a057534353d124c9380267aab9"},
]
[package.dependencies]
billiard = ">=4.2.0,<5.0"
click = ">=8.1.2,<9.0"
click-didyoumean = ">=0.3.0"
click-plugins = ">=1.1.1"
click-repl = ">=0.2.0"
kombu = ">=5.3.4,<6.0"
python-dateutil = ">=2.8.2"
tzdata = ">=2022.7"
vine = ">=5.1.0,<6.0"
[package.extras]
arangodb = ["pyArango (>=2.0.2)"]
auth = ["cryptography (==41.0.5)"]
azureblockblob = ["azure-storage-blob (>=12.15.0)"]
brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"]
cassandra = ["cassandra-driver (>=3.25.0,<4)"]
consul = ["python-consul2 (==0.1.5)"]
cosmosdbsql = ["pydocumentdb (==2.3.5)"]
couchbase = ["couchbase (>=3.0.0)"]
couchdb = ["pycouchdb (==1.14.2)"]
django = ["Django (>=2.2.28)"]
dynamodb = ["boto3 (>=1.26.143)"]
elasticsearch = ["elastic-transport (<=8.10.0)", "elasticsearch (<=8.11.0)"]
eventlet = ["eventlet (>=0.32.0)"]
gevent = ["gevent (>=1.5.0)"]
librabbitmq = ["librabbitmq (>=2.0.0)"]
memcache = ["pylibmc (==1.6.3)"]
mongodb = ["pymongo[srv] (>=4.0.2)"]
msgpack = ["msgpack (==1.0.7)"]
pymemcache = ["python-memcached (==1.59)"]
pyro = ["pyro4 (==4.82)"]
pytest = ["pytest-celery (==0.0.0)"]
redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"]
s3 = ["boto3 (>=1.26.143)"]
slmq = ["softlayer-messaging (>=1.0.3)"]
solar = ["ephem (==4.1.5)"]
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.0)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"]
tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"]
yaml = ["PyYAML (>=3.10)"]
zookeeper = ["kazoo (>=1.3.1)"]
zstd = ["zstandard (==0.22.0)"]
[[package]]
name = "certifi"
version = "2024.2.2"
@@ -187,6 +278,105 @@ files = [
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
]
[[package]]
name = "charset-normalizer"
version = "3.3.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
{file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
{file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
{file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
{file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
{file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
{file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
[[package]]
name = "click"
version = "8.1.7"
@@ -201,6 +391,55 @@ files = [
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "click-didyoumean"
version = "0.3.0"
description = "Enables git-like *did-you-mean* feature in click"
optional = false
python-versions = ">=3.6.2,<4.0.0"
files = [
{file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"},
{file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"},
]
[package.dependencies]
click = ">=7"
[[package]]
name = "click-plugins"
version = "1.1.1"
description = "An extension module for click to enable registering CLI commands via setuptools entry-points."
optional = false
python-versions = "*"
files = [
{file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"},
{file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"},
]
[package.dependencies]
click = ">=4.0"
[package.extras]
dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"]
[[package]]
name = "click-repl"
version = "0.3.0"
description = "REPL plugin for Click"
optional = false
python-versions = ">=3.6"
files = [
{file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"},
{file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"},
]
[package.dependencies]
click = ">=7.0"
prompt-toolkit = ">=3.0.36"
[package.extras]
testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
[[package]]
name = "colorama"
version = "0.4.6"
@@ -379,6 +618,17 @@ files = [
dnspython = ">=2.0.0"
idna = ">=2.0.0"
[[package]]
name = "et-xmlfile"
version = "1.1.0"
description = "An implementation of lxml.xmlfile for the standard library"
optional = false
python-versions = ">=3.6"
files = [
{file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"},
{file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
]
[[package]]
name = "exceptiongroup"
version = "1.2.0"
@@ -428,6 +678,47 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
name = "google-auth"
version = "2.27.0"
description = "Google Authentication Library"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"},
{file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"},
]
[package.dependencies]
cachetools = ">=2.0.0,<6.0"
pyasn1-modules = ">=0.2.1"
rsa = ">=3.1.4,<5"
[package.extras]
aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"]
enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
reauth = ["pyu2f (>=0.1.5)"]
requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
[[package]]
name = "google-auth-oauthlib"
version = "1.2.0"
description = "Google Authentication Library"
optional = false
python-versions = ">=3.6"
files = [
{file = "google-auth-oauthlib-1.2.0.tar.gz", hash = "sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8"},
{file = "google_auth_oauthlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf"},
]
[package.dependencies]
google-auth = ">=2.15.0"
requests-oauthlib = ">=0.7.0"
[package.extras]
tool = ["click (>=6.0.0)"]
[[package]]
name = "greenlet"
version = "3.0.3"
@@ -499,6 +790,22 @@ files = [
docs = ["Sphinx", "furo"]
test = ["objgraph", "psutil"]
[[package]]
name = "gspread"
version = "6.0.1"
description = "Google Spreadsheets Python API"
optional = false
python-versions = ">=3.8"
files = [
{file = "gspread-6.0.1-py3-none-any.whl", hash = "sha256:6c3af32b753fe75d9dd513ea9e088e9e043e09b9e3bf04d61d77213f37e67b79"},
{file = "gspread-6.0.1.tar.gz", hash = "sha256:8c8bf83be676a019d3a483455d8b17b442f2acfc620172f245422ca4fc960dd0"},
]
[package.dependencies]
google-auth = ">=1.12.0"
google-auth-oauthlib = ">=0.4.1"
StrEnum = "0.4.15"
[[package]]
name = "h11"
version = "0.14.0"
@@ -591,6 +898,38 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "kombu"
version = "5.3.5"
description = "Messaging library for Python."
optional = false
python-versions = ">=3.8"
files = [
{file = "kombu-5.3.5-py3-none-any.whl", hash = "sha256:0eac1bbb464afe6fb0924b21bf79460416d25d8abc52546d4f16cad94f789488"},
{file = "kombu-5.3.5.tar.gz", hash = "sha256:30e470f1a6b49c70dc6f6d13c3e4cc4e178aa6c469ceb6bcd55645385fc84b93"},
]
[package.dependencies]
amqp = ">=5.1.1,<6.0.0"
vine = "*"
[package.extras]
azureservicebus = ["azure-servicebus (>=7.10.0)"]
azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"]
confluentkafka = ["confluent-kafka (>=2.2.0)"]
consul = ["python-consul2"]
librabbitmq = ["librabbitmq (>=2.0.0)"]
mongodb = ["pymongo (>=4.1.1)"]
msgpack = ["msgpack"]
pyro = ["pyro4"]
qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"]
redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"]
slmq = ["softlayer-messaging (>=1.0.3)"]
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"]
yaml = ["PyYAML (>=3.10)"]
zookeeper = ["kazoo (>=2.8.0)"]
[[package]]
name = "mypy"
version = "1.8.0"
@@ -663,6 +1002,36 @@ files = [
[package.dependencies]
setuptools = "*"
[[package]]
name = "oauthlib"
version = "3.2.2"
description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
optional = false
python-versions = ">=3.6"
files = [
{file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
{file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
]
[package.extras]
rsa = ["cryptography (>=3.0.0)"]
signals = ["blinker (>=1.4.0)"]
signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "openpyxl"
version = "3.1.2"
description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
optional = false
python-versions = ">=3.6"
files = [
{file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"},
{file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"},
]
[package.dependencies]
et-xmlfile = "*"
[[package]]
name = "packaging"
version = "23.2"
@@ -722,6 +1091,45 @@ nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
[[package]]
name = "prompt-toolkit"
version = "3.0.43"
description = "Library for building powerful interactive command lines in Python"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"},
{file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"},
]
[package.dependencies]
wcwidth = "*"
[[package]]
name = "pyasn1"
version = "0.5.1"
description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
{file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"},
{file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"},
]
[[package]]
name = "pyasn1-modules"
version = "0.3.0"
description = "A collection of ASN.1-based protocols modules"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
{file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
{file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
]
[package.dependencies]
pyasn1 = ">=0.4.6,<0.6.0"
[[package]]
name = "pycparser"
version = "2.21"
@@ -916,6 +1324,20 @@ pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
[package.dependencies]
six = ">=1.5"
[[package]]
name = "python-dotenv"
version = "1.0.1"
@@ -1007,6 +1429,59 @@ async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2
hiredis = ["hiredis (>=1.0.0)"]
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
[[package]]
name = "requests"
version = "2.31.0"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.7"
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<3"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-oauthlib"
version = "1.3.1"
description = "OAuthlib authentication support for Requests."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"},
{file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"},
]
[package.dependencies]
oauthlib = ">=3.0.0"
requests = ">=2.0.0"
[package.extras]
rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
[[package]]
name = "rsa"
version = "4.9"
description = "Pure-Python RSA implementation"
optional = false
python-versions = ">=3.6,<4"
files = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
]
[package.dependencies]
pyasn1 = ">=0.1.3"
[[package]]
name = "setuptools"
version = "69.0.3"
@@ -1023,6 +1498,17 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "sniffio"
version = "1.3.0"
@@ -1138,6 +1624,22 @@ anyio = ">=3.4.0,<5"
[package.extras]
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"]
[[package]]
name = "strenum"
version = "0.4.15"
description = "An Enum that inherits from str."
optional = false
python-versions = "*"
files = [
{file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"},
{file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"},
]
[package.extras]
docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
release = ["twine"]
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
[[package]]
name = "tomli"
version = "2.0.1"
@@ -1189,6 +1691,34 @@ files = [
{file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
]
[[package]]
name = "tzdata"
version = "2023.4"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
files = [
{file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"},
{file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"},
]
[[package]]
name = "urllib3"
version = "2.2.0"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
{file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"},
{file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "uvicorn"
version = "0.26.0"
@@ -1208,6 +1738,17 @@ typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
[package.extras]
standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
[[package]]
name = "vine"
version = "5.1.0"
description = "Python promises."
optional = false
python-versions = ">=3.6"
files = [
{file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"},
{file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"},
]
[[package]]
name = "virtualenv"
version = "20.25.0"
@@ -1228,7 +1769,18 @@ platformdirs = ">=3.9.1,<5"
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[[package]]
name = "wcwidth"
version = "0.2.13"
description = "Measures the displayed width of unicode strings in a terminal"
optional = false
python-versions = "*"
files = [
{file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
{file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "106e42984de924817e2dc083ad78699b3411f9aa60de5bb5c1a95ca94a21fda1"
content-hash = "5ad429e191b066c84074106341a72bb5e17ba85c5733acaae3d2ffd2db40e338"

View File

@@ -17,6 +17,9 @@ pytest-asyncio = "^0.23.3"
redis = "^4.6.0"
types-redis = "^4.6.0.3"
mypy = "^1.4.1"
celery = "^5.3.6"
openpyxl = "^3.1.2"
gspread = "^6.0.1"
[tool.poetry.group.dev.dependencies]

View File

@@ -52,6 +52,6 @@ async def client(event_loop) -> AsyncGenerator[AsyncClient, None]:
async with AsyncClient(
app=app,
base_url='http://localhost:8000/api/v1/menus',
base_url='http://localhost:8000',
) as async_client:
yield async_client

View File

@@ -1,6 +1,6 @@
from httpx import AsyncClient, Response
from .urls import reverse_url
from .urls import reverse
class Repository:
@@ -9,28 +9,28 @@ class Repository:
async def read_all(ac: AsyncClient) -> tuple[int, dict]:
"""чтение всех меню"""
response: Response = await ac.get(reverse_url('menus'))
response: Response = await ac.get(reverse('get_menus'))
return response.status_code, response.json()
@staticmethod
async def get(ac: AsyncClient, data: dict) -> tuple[int, dict]:
"""Получение меню по id"""
response: Response = await ac.get(
reverse_url('menu', menu_id=data.get('id'))
reverse('get_menu', menu_id=data.get('id'))
)
return response.status_code, response.json()
@staticmethod
async def write(ac: AsyncClient, data: dict) -> tuple[int, dict]:
"""создания меню"""
response: Response = await ac.post(reverse_url('menus'), json=data)
response: Response = await ac.post(reverse('add_menu'), json=data)
return response.status_code, response.json()
@staticmethod
async def update(ac: AsyncClient, data: dict) -> tuple[int, dict]:
"""Обновление меню по id"""
response: Response = await ac.patch(
reverse_url('menu', menu_id=data.get('id')),
reverse('update_menu', menu_id=data.get('id')),
json=data,
)
return response.status_code, response.json()
@@ -39,7 +39,7 @@ class Repository:
async def delete(ac: AsyncClient, data: dict) -> int:
"""Удаление меню по id"""
response: Response = await ac.delete(
reverse_url('menu', menu_id=data.get('id')),
reverse('delete_menu', menu_id=data.get('id')),
)
return response.status_code
@@ -48,7 +48,7 @@ class Repository:
async def read_all(ac: AsyncClient, menu: dict) -> tuple[int, dict]:
"""чтение всех меню"""
response: Response = await ac.get(
reverse_url('submenus', menu_id=menu.get('id')),
reverse('get_submenus', menu_id=menu.get('id')),
)
return response.status_code, response.json()
@@ -60,8 +60,8 @@ class Repository:
) -> tuple[int, dict]:
"""Получение меню по id"""
response: Response = await ac.get(
reverse_url(
'submenu',
reverse(
'get_submenu',
menu_id=menu.get('id'),
submenu_id=submenu.get('id'),
),
@@ -76,7 +76,7 @@ class Repository:
) -> tuple[int, dict]:
"""создания меню"""
response: Response = await ac.post(
reverse_url('submenu', menu_id=menu.get('id')),
reverse('create_submenu_item', menu_id=menu.get('id')),
json=submenu,
)
return response.status_code, response.json()
@@ -87,8 +87,8 @@ class Repository:
) -> tuple[int, dict]:
"""Обновление меню по id"""
response: Response = await ac.patch(
reverse_url(
'submenu',
reverse(
'update_submenu',
menu_id=menu.get('id'),
submenu_id=submenu.get('id'),
),
@@ -100,8 +100,8 @@ class Repository:
async def delete(ac: AsyncClient, menu: dict, submenu: dict) -> int:
"""Удаление меню по id"""
response: Response = await ac.delete(
reverse_url(
'submenu',
reverse(
'delete_submenu',
menu_id=menu.get('id'),
submenu_id=submenu.get('id'),
),
@@ -115,8 +115,8 @@ class Repository:
) -> tuple[int, dict]:
"""чтение всех блюд"""
response: Response = await ac.get(
reverse_url(
'dishes',
reverse(
'get_dishes',
menu_id=menu.get('id'),
submenu_id=submenu.get('id'),
),
@@ -129,8 +129,8 @@ class Repository:
) -> tuple[int, dict]:
"""Получение блюда по id"""
response: Response = await ac.get(
reverse_url(
'dish',
reverse(
'get_dish',
menu_id=menu.get('id'),
submenu_id=submenu.get('id'),
dish_id=dish.get('id'),
@@ -144,8 +144,8 @@ class Repository:
) -> tuple[int, dict]:
"""создания блюда"""
response: Response = await ac.post(
reverse_url(
'dishes',
reverse(
'create_dish',
menu_id=menu.get('id'),
submenu_id=submenu.get('id'),
),
@@ -159,8 +159,8 @@ class Repository:
) -> tuple[int, dict]:
"""Обновление блюда по id"""
response: Response = await ac.patch(
reverse_url(
'dish',
reverse(
'update_dish',
menu_id=menu.get('id'),
submenu_id=submenu.get('id'),
dish_id=dish.get('id'),
@@ -178,11 +178,19 @@ class Repository:
) -> int:
"""Удаление блюда по id"""
response: Response = await ac.delete(
reverse_url(
'dish',
reverse(
'delete_dish',
menu_id=menu.get('id'),
submenu_id=submenu.get('id'),
dish_id=dish.get('id'),
),
)
return response.status_code
class Summary:
@staticmethod
async def read_summary(ac: AsyncClient) -> tuple[int, dict]:
"""чтение summary"""
response: Response = await ac.get(reverse('get_summary'))
return response.status_code, response.json()

View File

@@ -1,360 +0,0 @@
import pytest
from httpx import AsyncClient
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_menu_crud_empty(client: AsyncClient) -> None:
"""Тестирование функций меню"""
code, rspn = await Repo.Menu.read_all(client)
assert code == 200
assert rspn == []
@pytest.mark.asyncio
async def test_menu_crud_add(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
assert code == 201
assert rspn['title'] == 'Menu'
assert rspn['description'] is None
await Repo.Menu.delete(client, rspn)
@pytest.mark.asyncio
async def test_menu_crud_get(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code, menu = await Repo.Menu.get(client, {'id': rspn.get('id')})
assert code == 200
assert menu['title'] == rspn['title']
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_menu_crud_update(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
upd_data = {
'id': rspn.get('id'),
'title': 'upd Menu',
'description': '',
}
code, upd_rspn = await Repo.Menu.update(client, upd_data)
assert code == 200
assert upd_rspn['title'] == 'upd Menu'
await Repo.Menu.delete(client, upd_rspn)
@pytest.mark.asyncio
async def test_menu_crud_delete(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code = await Repo.Menu.delete(client, rspn)
assert code == 200
code, rspn = await Repo.Menu.get(client, {'id': rspn.get('id')})
assert code == 404
@pytest.mark.asyncio
async def test_menu_crud_get_all(client: AsyncClient) -> None:
"""Тестирование функций меню"""
code, rspn = await Repo.Menu.read_all(client)
assert code == 200
assert rspn == []
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code, upd_rspn = await Repo.Menu.read_all(client)
assert code == 200
assert upd_rspn == [rspn]
await Repo.Menu.delete(client, rspn)
@pytest.mark.asyncio
async def test_submenus_get_all(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
assert code == 201
menu.update(rspn)
# Проверяем наличие подменю
code, rspn = await Repo.Submenu.read_all(client, menu)
assert code == 200
assert rspn == []
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Проверяем наличие подменю
code, upd_rspn = await Repo.Submenu.read_all(client, menu)
assert code == 200
assert upd_rspn == [rspn]
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_add(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
assert code == 201
submenu.update(rspn)
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_update(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Обновляем подменю и проверяем
submenu['title'] = 'updated_submenu'
code, rspn = await Repo.Submenu.update(client, menu, submenu)
assert code == 200
assert submenu['title'] == rspn['title']
submenu.update(rspn)
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_delete(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Удаляем подменю
code = await Repo.Submenu.delete(client, menu, submenu)
assert code == 200
# Проверяем удаленное подменю
code, rspn = await Repo.Submenu.get(client, menu, submenu)
assert code == 404
# удаляем сопутствующее
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_get_all(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Проверяем все блюда в подменю
code, rspn = await Repo.Dish.read_all(client, menu, submenu)
assert code == 200
assert rspn == []
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
assert code == 201
dish.update(rspn)
code, upd_rspn = await Repo.Dish.read_all(client, menu, submenu)
assert code == 200
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_add(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
assert code == 201
dish.update(rspn)
# Получаем блюдо
code, rspn = await Repo.Dish.get(client, menu, submenu, dish)
assert code == 200
assert rspn['title'] == dish['title']
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_update(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
# Обновляем блюдо и проверяем
dish['title'] = 'updated_dish'
code, rspn = await Repo.Dish.update(client, menu, submenu, dish)
assert code == 200
assert dish['title'] == rspn['title']
dish.update(rspn)
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_delete(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
# Удаляем подменю
code = await Repo.Dish.delete(client, menu, submenu, dish)
assert code == 200
# Проверяем удаленное блюдо
code, rspn = await Repo.Dish.get(client, menu, submenu, dish)
assert code == 404
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)

174
tests/test_dish.py Normal file
View File

@@ -0,0 +1,174 @@
import pytest
from httpx import AsyncClient
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_dishes_get_all(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Проверяем все блюда в подменю
code, rspn = await Repo.Dish.read_all(client, menu, submenu)
assert code == 200
assert rspn == []
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
assert code == 201
dish.update(rspn)
code, upd_rspn = await Repo.Dish.read_all(client, menu, submenu)
assert code == 200
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_add(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
assert code == 201
dish.update(rspn)
# Получаем блюдо
code, rspn = await Repo.Dish.get(client, menu, submenu, dish)
assert code == 200
assert rspn['title'] == dish['title']
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_update(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
# Обновляем блюдо и проверяем
dish['title'] = 'updated_dish'
code, rspn = await Repo.Dish.update(client, menu, submenu, dish)
assert code == 200
assert dish['title'] == rspn['title']
dish.update(rspn)
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_delete(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
# Удаляем подменю
code = await Repo.Dish.delete(client, menu, submenu, dish)
assert code == 200
# Проверяем удаленное блюдо
code, rspn = await Repo.Dish.get(client, menu, submenu, dish)
assert code == 404
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)

80
tests/test_menu.py Normal file
View File

@@ -0,0 +1,80 @@
import pytest
from httpx import AsyncClient
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_menu_crud_empty(client: AsyncClient) -> None:
"""Тестирование функций меню"""
code, rspn = await Repo.Menu.read_all(client)
assert code == 200
assert rspn == []
@pytest.mark.asyncio
async def test_menu_crud_add(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
assert code == 201
assert rspn['title'] == 'Menu'
assert rspn['description'] is None
await Repo.Menu.delete(client, rspn)
@pytest.mark.asyncio
async def test_menu_crud_get(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code, menu = await Repo.Menu.get(client, {'id': rspn.get('id')})
assert code == 200
assert menu['title'] == rspn['title']
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_menu_crud_update(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
upd_data = {
'id': rspn.get('id'),
'title': 'upd Menu',
'description': '',
}
code, upd_rspn = await Repo.Menu.update(client, upd_data)
assert code == 200
assert upd_rspn['title'] == 'upd Menu'
await Repo.Menu.delete(client, upd_rspn)
@pytest.mark.asyncio
async def test_menu_crud_delete(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code = await Repo.Menu.delete(client, rspn)
assert code == 200
code, rspn = await Repo.Menu.get(client, {'id': rspn.get('id')})
assert code == 404
@pytest.mark.asyncio
async def test_menu_crud_get_all(client: AsyncClient) -> None:
"""Тестирование функций меню"""
code, rspn = await Repo.Menu.read_all(client)
assert code == 200
assert rspn == []
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code, upd_rspn = await Repo.Menu.read_all(client)
assert code == 200
assert upd_rspn == [rspn]
await Repo.Menu.delete(client, rspn)

113
tests/test_submenu.py Normal file
View File

@@ -0,0 +1,113 @@
import pytest
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_submenus_get_all(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
assert code == 201
menu.update(rspn)
# Проверяем наличие подменю
code, rspn = await Repo.Submenu.read_all(client, menu)
assert code == 200
assert rspn == []
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Проверяем наличие подменю
code, upd_rspn = await Repo.Submenu.read_all(client, menu)
assert code == 200
assert upd_rspn == [rspn]
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_add(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
assert code == 201
submenu.update(rspn)
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_update(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Обновляем подменю и проверяем
submenu['title'] = 'updated_submenu'
code, rspn = await Repo.Submenu.update(client, menu, submenu)
assert code == 200
assert submenu['title'] == rspn['title']
submenu.update(rspn)
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_delete(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Удаляем подменю
code = await Repo.Submenu.delete(client, menu, submenu)
assert code == 200
# Проверяем удаленное подменю
code, rspn = await Repo.Submenu.get(client, menu, submenu)
assert code == 404
# удаляем сопутствующее
await Repo.Menu.delete(client, menu)

113
tests/test_summary.py Normal file
View File

@@ -0,0 +1,113 @@
import pytest
from httpx import AsyncClient
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_summary_with_menu(client: AsyncClient) -> None:
# Проверяем пустое summary
code, rspn = await Repo.Summary.read_summary(client)
assert code == 200
assert rspn == []
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu', 'submenus': []}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Удалим ненужные ключи, тк в модели они не используются
del menu['submenus_count']
del menu['dishes_count']
# Проверяем summary c меню
code, rspn = await Repo.Summary.read_summary(client)
assert code == 200
assert rspn == [menu]
# удаляем сопутствующее
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_summary_with_submenus(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu: dict[str, str | list | float] = {
'title': 'Menu',
'description': 'main menu',
'submenus': [],
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
del menu['submenus_count']
del menu['dishes_count']
# Создаем и проверяем подменю
submenu: dict[str, str | list | float] = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
'dishes': list(),
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
del submenu['dishes_count']
del submenu['parent_menu']
menu['submenus'] = [submenu]
# Получаем блюдо
code, rspn = await Repo.Summary.read_summary(client)
assert code == 200
assert rspn == [menu]
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_summary_with_dishes(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu: dict[str, str | list | float] = {
'title': 'Menu',
'description': 'main menu',
'submenus': [],
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
del menu['submenus_count']
del menu['dishes_count']
# Создаем и проверяем подменю
submenu: dict[str, str | list | float] = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
'dishes': [],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
del submenu['dishes_count']
del submenu['parent_menu']
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
del dish['parent_submenu']
del dish['id']
submenu['dishes'] = [dish]
menu['submenus'] = [submenu]
code, rspn = await Repo.Summary.read_summary(client)
assert code == 200
assert rspn == [menu]
await Repo.Menu.delete(client, menu)

View File

@@ -1,25 +1,9 @@
def reverse_url(loc: str, **kwargs) -> str:
menu_pref = '/'
submenu_pref = menu_pref + str(kwargs.get('menu_id', '')) + '/submenus/'
dish_pref = submenu_pref + str(kwargs.get('submenu_id', '')) + '/dishes/'
from fastfood.app import create_app
match loc:
case 'menus':
return menu_pref
app = create_app()
case 'menu':
return menu_pref + str(kwargs.get('menu_id', ''))
case 'submenus':
return submenu_pref
def reverse(loc: str, **kwargs) -> str:
case 'submenu':
return submenu_pref + str(kwargs.get('submenu_id', ''))
case 'dishes':
return dish_pref
case 'dish':
return dish_pref + str(kwargs.get('dish_id', ''))
return menu_pref
url = app.url_path_for(loc, **kwargs)
return url