Compare commits

...

32 Commits

Author SHA1 Message Date
Сергей Ванюшкин 8bfa166987 слияние веток 2024-02-12 23:09:50 +03:00
Сергей Ванюшкин e0a81cf126 google sheets docker образ 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин a4f8bce657 google синхронизация 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин 9ba42aae9f upd фоновая задача теперь не дропает базу 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин afdf1c5e2b fix 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин 74c0ccae2a fix 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин 2c48529a02 fix 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин cedf27a04d fix 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин e0798de713 fix 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин 5a133a05e1 fix 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин 3df3c67e7c fix: правка урла кролика 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин a0ebe9bdb9 upd: Контейнеры для celery & rabbitmq 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин ed3d7d9352 upd Разнес тесты, уменьшив портянку
upd Тест для summary роута
2024-02-12 23:09:01 +03:00
Сергей Ванюшкин 3dbefda936 upd: Применение скидки в выводе API 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин 5a95b06300 upd: Добавил bg_task xlsx>>DBase 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин ebe75b6dc3 upd: Добавил роут summary с выводом вмего меню со вложением 2024-02-12 23:09:01 +03:00
Сергей Ванюшкин 22a876d3ce google sheets docker образ 2024-02-12 23:03:28 +03:00
Сергей Ванюшкин 6a0776557d google синхронизация 2024-02-12 22:49:16 +03:00
Сергей Ванюшкин b2a284d791 upd фоновая задача теперь не дропает базу 2024-02-12 22:22:59 +03:00
Сергей Ванюшкин 5e213e759d fix 2024-02-12 03:03:24 +03:00
Сергей Ванюшкин f28637f5dd fix 2024-02-12 02:42:46 +03:00
Сергей Ванюшкин e6d1070d9a fix 2024-02-12 01:42:53 +03:00
Сергей Ванюшкин 47cb0e08c7 fix 2024-02-12 01:29:06 +03:00
Сергей Ванюшкин e6576e9e58 fix 2024-02-12 01:11:00 +03:00
Сергей Ванюшкин 02134d247a fix 2024-02-12 01:06:45 +03:00
Сергей Ванюшкин 68db31a033 fix: правка урла кролика 2024-02-12 00:54:53 +03:00
Сергей Ванюшкин fc9577c538 upd: Контейнеры для celery & rabbitmq 2024-02-12 00:39:51 +03:00
Сергей Ванюшкин 550a058b6f upd Разнес тесты, уменьшив портянку
upd Тест для summary роута
2024-02-11 23:17:57 +03:00
Сергей Ванюшкин ffb5b855c4 upd: Применение скидки в выводе API 2024-02-11 20:10:25 +03:00
Сергей Ванюшкин d9633dcfbd upd: Добавил bg_task xlsx>>DBase 2024-02-11 03:14:17 +03:00
Сергей Ванюшкин e4656825cb upd: Добавил роут summary с выводом вмего меню со вложением 2024-02-09 02:57:34 +03:00
Сергей Ванюшкин 3b1a1614cf fix: .env for local run 2024-02-07 12:37:43 +03:00
32 changed files with 3100 additions and 409 deletions

2
.env
View File

@ -4,4 +4,4 @@ POSTGRES_USER=testuser
POSTGRES_PASSWORD=test
POSTGRES_DB=fastfood_db
POSTGRES_DB_TEST=testdb
REDIS_DB=redis://127.0.0.1:6379/0
REDIS_DB=redis://127.0.0.1:6379/0

View File

@ -8,7 +8,11 @@ RUN mkdir -p /usr/src/fastfood
WORKDIR /usr/src/fastfood
COPY . .
COPY ./example.env .
COPY ./poetry.lock .
COPY ./pyproject.toml .
RUN touch /usr/src/RUN_IN_DOCKER

BIN
admin/Menu.xlsx Normal file

Binary file not shown.

0
bg_tasks/__init__.py Normal file
View File

50
bg_tasks/bg_task.py Normal file
View File

@ -0,0 +1,50 @@
import asyncio
from celery import Celery
from fastfood.config import settings
from .updater import main, main_gsheets
loop = asyncio.get_event_loop()
celery_app = Celery(
'tasks',
broker=settings.REBBITMQ_URL,
backend='rpc://',
include=['bg_tasks.bg_task'],
)
celery_app.conf.beat_schedule = {
'run-task-every-15-seconds': {
'task': 'bg_tasks.bg_task.periodic_task',
'schedule': 30.0,
},
}
celery_app_google = Celery(
'tasks',
broker=settings.REBBITMQ_URL,
backend='rpc://',
include=['bg_tasks.bg_task'],
)
celery_app_google.conf.beat_schedule = {
'run-task-every-15-seconds': {
'task': 'bg_tasks.bg_task.periodic_task_google',
'schedule': 30.0,
},
}
@celery_app_google.task
def periodic_task_google() -> None:
result = loop.run_until_complete(main_gsheets())
return result
@celery_app.task
def periodic_task() -> None:
result = loop.run_until_complete(main())
return result

94
bg_tasks/parser.py Normal file
View File

@ -0,0 +1,94 @@
import os
import gspread
import openpyxl
file = os.path.join(os.path.curdir, 'admin', 'Menu.xlsx')
async def gsheets_to_rows() -> list[list[str | int | float]]:
"""Получение всех строк из Google Sheets"""
def to_int(val: str) -> int | str:
try:
res = int(val)
except ValueError:
return val
return res
def to_float(val: str) -> float | str:
val = val.replace(',', '.')
try:
res = float(val)
except ValueError:
return val
return res
gc = gspread.service_account(filename='creds.json')
sh = gc.open('Menu')
data = sh.sheet1.get_all_values()
for row in data:
row[:3] = list(map(to_int, row[:3]))
row[-2:] = list(map(to_float, row[-2:]))
return data
async def local_xlsx_to_rows() -> list[list[str | int | float]]:
"""Получение всех строк из локального файла Menu"""
data = []
wb = openpyxl.load_workbook(file).worksheets[0]
for row in wb.iter_rows(values_only=True):
data.append(list(row))
return data
async def rows_to_dict(rows: list[list]) -> tuple:
"""Парсит строки полученные и источников в словарь"""
menus = {}
submenus = {}
dishes = {}
menu_num = None
submenu_num = None
for row in rows:
if all(row[:3]):
menu = {
row[0]: {
'data': {'title': row[1], 'description': row[2]},
'id': None,
}
}
menu_num = row[0]
menus.update(menu)
elif all(row[1:4]):
submenu = {
(menu_num, row[1]): {
'data': {'title': row[2], 'description': row[3]},
'parent_num': menu_num,
'id': None,
'parent_menu': None,
}
}
submenu_num = row[1]
submenus.update(submenu)
elif all(row[3:6]):
dish = {
(menu_num, submenu_num, row[2]): {
'data': {
'title': row[3],
'description': row[4],
'price': row[5],
},
'parent_num': (menu_num, submenu_num),
'id': None,
'parent_submenu': None,
'discont': row[6],
},
}
dishes.update(dish)
return menus, submenus, dishes

295
bg_tasks/updater.py Normal file
View File

@ -0,0 +1,295 @@
import os
import pickle
import redis.asyncio as redis # type: ignore
from sqlalchemy import delete, update
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
from fastfood.config import settings
from fastfood.models import Dish, Menu, SubMenu
from .parser import file, gsheets_to_rows, local_xlsx_to_rows, rows_to_dict
redis = redis.Redis.from_url(url=settings.REDIS_URL)
async_engine = create_async_engine(settings.DATABASE_URL_asyncpg)
async_session_maker = async_sessionmaker(
async_engine,
class_=AsyncSession,
expire_on_commit=False,
)
async def clear_cache(pattern: str) -> None:
keys = [key async for key in redis.scan_iter(pattern)]
if keys:
await redis.delete(*keys)
async def is_changed_xls() -> bool:
"""Проверяет, изменен ли файл с последнего запуска таска."""
if not os.path.exists(file):
return False
mod_time = os.path.getmtime(file)
cached_time = await redis.get('XLSX_MOD_TIME')
if cached_time is not None:
cached_time = pickle.loads(cached_time)
if mod_time == cached_time:
return False
return True
async def on_menu_change(
new_menu: dict, old_menu: dict, session: AsyncSession
) -> dict | None:
if new_menu and not old_menu:
# Создаем меню
menu = Menu(
title=new_menu['data']['title'],
description=new_menu['data']['description'],
)
session.add(menu)
await session.flush()
new_menu['id'] = str(menu.id)
elif new_menu and old_menu:
# Обновляем меню
await session.execute(
update(Menu).where(Menu.id == old_menu['id']).values(**(new_menu['data']))
)
new_menu['id'] = old_menu['id']
else:
# Удаляем меню
await session.execute(delete(Menu).where(Menu.id == old_menu['id']))
await session.commit()
# Чистим кэш
await clear_cache('MENUS*')
await clear_cache('summary')
return new_menu
async def menus_updater(menus: dict, session: AsyncSession) -> None:
"""Проверяет пункты меню на изменения
При необходимости запускае обновление БД
через фенкцию on_menu_change
"""
cached_menus = await redis.get('ALL_MENUS')
if cached_menus is not None:
cached_menus = pickle.loads(cached_menus)
else:
cached_menus = {}
for key in menus.keys():
if key not in cached_menus.keys():
# Создание меню
menu = await on_menu_change(menus[key], {}, session)
menus[key] = menu
elif key in cached_menus.keys():
# Обновление меню
if menus[key].get('data') != cached_menus[key].get('data'):
menu = await on_menu_change(menus[key], cached_menus[key], session)
menus[key] = menu
else:
menus[key]['id'] = cached_menus[key]['id']
for key in {k: cached_menus[k] for k in set(cached_menus) - set(menus)}:
# Проверяем на удаленные меню
await on_menu_change({}, cached_menus.pop(key), session)
await redis.set('ALL_MENUS', pickle.dumps(menus))
async def on_submenu_change(
new_sub: dict, old_sub: dict, session: AsyncSession
) -> dict:
if new_sub and not old_sub:
# Создаем меню
submenu = SubMenu(
title=new_sub['data']['title'],
description=new_sub['data']['description'],
)
submenu.parent_menu = new_sub['parent_menu']
session.add(submenu)
await session.flush()
new_sub['id'] = str(submenu.id)
new_sub['parent_menu'] = str(submenu.parent_menu)
elif new_sub and old_sub:
# Обновляем меню
await session.execute(
update(SubMenu)
.where(SubMenu.id == old_sub['id'])
.values(**(new_sub['data']))
)
new_sub['id'] = old_sub['id']
new_sub['parent_menu'] = old_sub['parent_menu']
else:
# Удаляем меню
await session.execute(delete(SubMenu).where(SubMenu.id == old_sub['id']))
await clear_cache('MENUS*')
await clear_cache('summary')
await session.commit()
return new_sub
async def submenus_updater(submenus: dict, session: AsyncSession) -> None:
"""Проверяет пункты подменю на изменения
При необходимости запускае обновление БД
"""
# Получаем Меню из кэша для получения их ID по померу в таблице
cached_menus = await redis.get('ALL_MENUS')
if cached_menus is not None:
cached_menus = pickle.loads(cached_menus)
else:
cached_menus = {}
# Получаем подмен из кэша
cached_sub = await redis.get('ALL_SUBMENUS')
if cached_sub is not None:
cached_sub = pickle.loads(cached_sub)
else:
cached_sub = {}
for key in submenus.keys():
parent = cached_menus[submenus[key]['parent_num']]['id']
submenus[key]['parent_menu'] = parent
if key not in cached_sub.keys():
# Получаем и ставим UUID parent_menu
submenus[key]['parent_menu'] = parent
submenu = await on_submenu_change(submenus[key], {}, session)
submenus[key] = submenu
elif key in cached_sub.keys():
# Обновление меню
if submenus[key].get('data') != cached_sub[key].get('data'):
submenu = await on_submenu_change(
submenus[key], cached_sub[key], session
)
submenus[key] = submenu
else:
submenus[key]['id'] = cached_sub[key]['id']
submenus[key]['parent_menu'] = cached_sub[key]['parent_menu']
for key in {k: cached_sub[k] for k in set(cached_sub) - set(submenus)}:
# Проверяем на удаленные меню
await on_submenu_change({}, cached_sub.pop(key), session)
await redis.set('ALL_SUBMENUS', pickle.dumps(submenus))
async def on_dish_change(new_dish: dict, old_dish, session: AsyncSession) -> dict:
if new_dish and not old_dish:
dish = Dish(
title=new_dish['data']['title'],
description=new_dish['data']['description'],
price=new_dish['data']['price'],
)
dish.parent_submenu = new_dish['parent_submenu']
session.add(dish)
await session.flush()
new_dish['id'] = str(dish.id)
new_dish['parent_submenu'] = str(dish.parent_submenu)
new_dish['data']['price'] = str(dish.price)
elif new_dish and old_dish:
# Обновляем меню
await session.execute(
update(Dish).where(Dish.id == old_dish['id']).values(**(new_dish['data']))
)
new_dish['id'] = old_dish['id']
new_dish['parent_submenu'] = old_dish['parent_submenu']
new_dish['data']['price'] = old_dish['data']['price']
else:
# Удаляем меню
await session.execute(delete(Dish).where(Dish.id == old_dish['id']))
await clear_cache('MENUS*')
await clear_cache('summary')
await session.commit()
return new_dish
async def dishes_updater(dishes: dict, session: AsyncSession) -> None:
"""Проверяет блюда на изменения
При необходимости запускае обновление БД
"""
cached_submenus = await redis.get('ALL_SUBMENUS')
if cached_submenus is not None:
cached_submenus = pickle.loads(cached_submenus)
else:
cached_submenus = {}
# Получаем подмен из кэша
cached_dishes = await redis.get('ALL_DISHES')
if cached_dishes is not None:
cached_dishes = pickle.loads(cached_dishes)
else:
cached_dishes = {}
await clear_cache('DISCONT*')
for key in {k: cached_dishes[k] for k in set(cached_dishes) - set(dishes)}:
# Проверяем на удаленные меню
await on_submenu_change({}, cached_dishes.pop(key), session)
for key in dishes.keys():
parent = cached_submenus[dishes[key]['parent_num']]['id']
dishes[key]['parent_submenu'] = parent
if key not in cached_dishes.keys():
# Получаем и ставим UUID parent_menu
dishes[key]['parent_submenu'] = parent
dish = await on_dish_change(dishes[key], {}, session)
dishes[key] = dish
elif key in cached_dishes.keys():
# Обновление меню
if dishes[key].get('data') != cached_dishes[key].get('data'):
dish = await on_dish_change(dishes[key], cached_dishes[key], session)
dishes[key] = dish
else:
dishes[key]['id'] = cached_dishes[key]['id']
dishes[key]['parent_submenu'] = cached_dishes[key]['parent_submenu']
if dishes[key]['discont'] is not None:
await redis.set(
f"DISCONT:{dishes[key]['id']}", pickle.dumps(dishes[key]['discont'])
)
await redis.set('ALL_DISHES', pickle.dumps(dishes))
async def updater(rows):
menus, submenus, dishes = await rows_to_dict(rows)
async with async_session_maker() as session:
await menus_updater(menus, session)
await submenus_updater(submenus, session)
await dishes_updater(dishes, session)
async def main() -> None:
"""Главная функция фоновой задачи"""
changed = await is_changed_xls()
if changed:
rows = await local_xlsx_to_rows()
await updater(rows)
async def main_gsheets() -> None:
rows = await gsheets_to_rows()
await updater(rows)

View File

@ -57,4 +57,56 @@ services:
restart: always
command: /bin/bash -c 'poetry run python /usr/src/fastfood/manage.py --run-test-server'
volumes:
- .:/usr/src/fastfood
command: /bin/bash -c 'poetry run python /usr/src/fastfood/manage.py --run-docker-server'
celery_worker:
container_name: celeryworker
build:
context: .
env_file:
- .env
depends_on:
- rabbitmq
- db
- app
- redis
volumes:
- .:/usr/src/fastfood
command: ["celery", "-A", "bg_tasks.bg_task:celery_app", "worker", "--loglevel=info", "--concurrency", "1", "-P", "solo"]
celery_beat:
container_name: celerybeat
build:
context: .
env_file:
- .env
depends_on:
- rabbitmq
- db
- app
- redis
volumes:
- .:/usr/src/fastfood
command: ["celery", "-A", "bg_tasks.bg_task:celery_app", "beat", "--loglevel=info"]
rabbitmq:
container_name: rabbit
image: "rabbitmq:management"
ports:
- 5672:5672

112
compose_google.yml Normal file
View File

@ -0,0 +1,112 @@
version: "3.8"
services:
redis:
container_name: redis_test
image: redis:7.2.4-alpine3.19
ports:
- '6380:6379'
healthcheck:
test: [ "CMD", "redis-cli","ping" ]
interval: 10s
timeout: 5s
retries: 5
db:
container_name: pgdb
image: postgres:15.1-alpine
env_file:
- .env
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
ports:
- 6432:5432
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
interval: 10s
timeout: 5s
retries: 5
app:
container_name: fastfood_app
build:
context: .
env_file:
- .env
ports:
- 8000:8000
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
restart: always
volumes:
- .:/usr/src/fastfood
command: /bin/bash -c 'poetry run python /usr/src/fastfood/manage.py --run-docker-server'
celery_worker:
container_name: celeryworker
build:
context: .
env_file:
- .env
depends_on:
- rabbitmq
- db
- app
- redis
volumes:
- .:/usr/src/fastfood
command: ["celery", "-A", "bg_tasks.bg_task:celery_app_google", "worker", "--loglevel=info", "--concurrency", "1", "-P", "solo"]
celery_beat:
container_name: celerybeat
build:
context: .
env_file:
- .env
depends_on:
- rabbitmq
- db
- app
- redis
volumes:
- .:/usr/src/fastfood
command: ["celery", "-A", "bg_tasks.bg_task:celery_app_google", "beat", "--loglevel=info"]
rabbitmq:
container_name: rabbit
image: "rabbitmq:management"
ports:
- 5672:5672

13
creds.json Normal file
View File

@ -0,0 +1,13 @@
{
"type": "service_account",
"project_id": "psyched-ceiling-413920",
"private_key_id": "d19492eea6a030092cf8ad767b62d7909734ccb1",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDPP2UrfDE+UOlD\nA761Jemy1IKKdIanRKXSziGvDYJ7zbY5gPFxY8Vo+9fsh1oumvROXqEH4+1LiR0J\nnUiy33R1KDOHETeP4H/FJ3u8+gHoL8wsA7SN5pIX9AR8AyBZCBjgSSp+KJchfrp/\nWN2qnFAbgp248QPvmG7/wLzBNnVsAQhQULLKqSc46hbiZ8Jt7t0ajJgOFRJSp2wP\nT5VE5k737w6b4OH8mUnhw7VK04Wk6DBmQhN1jrnxmMxmdG2hSM2zR824RIMBBs/O\n1dF+5Vkav0tgja/tVqm41Aaa2vgPRACP6bpF13YS+8C1lzw6s+7M/VdE5TH5NXRU\nvChuRknBAgMBAAECggEABmuckna0krVsawaXhLaQ30DsLf5w9hdLTvDy6CCuO9Aw\nPKb//9UNNmjMKD4rlQNY1YFS6jbxZNZRrIC7aftwQOGE2mKuIMBl6+tinuy0tLr/\nl3baS+22VZyyG36ILNrqZJ8epGm08CEsNVYRKKwS0x3aXZKFnnlnqaeYn2CUzdqa\na9iNZqrdXdRt4O7KVP7IfdNi11WuOL4epmHwBBYmCxiN0Z2KAIYvS6AcflYWtYTZ\npsBFjCQexqS37PdUyyQX9E/gKwqNZmahYwIC3vsCMCLdQQ93iODYni7LKsG0vvls\nwz03TtlMmZpMJJQGkALeqlv7jeyj+oRuqg6gjs2moQKBgQDxuDt3u1rDWhTJ50bD\nAp5T1LaiV0/+lu29ElTmYpa0RF1tlHvrndFm/MrdUjpzP4/VISmRkP3bmAgwPP6p\nYeALqQXCCGJtl44LG6D9VIOCOZxntytjLHogY8S3BLpwzKC+VMFsd56ay6wCl03S\nJEnvG10FQX8sFd+6j5qMy73OoQKBgQDbfc7hV4/r7PMaUVWFRqWjLry3dtTErxnM\nTdX30BDtuqMrm+hx0zC85ePcsbx+Zhwneyaxw2ICN5F954mJurBqs9cVaxitNSv1\nX5XjAoZqf3TevufkmSBXog6t/p4FHqAHftHYzwQvQXIINFrmT15PJkbx0lMYEYzw\nPyB7doBHIQKBgGiJi7ZpYYRw1eLH0fOOk1if+uhUqHTrYx/M6MjGRHTryBgXCkzI\n8QIAO9/hqwOirpq2/9pDgXZR1uC90EkC2jlQvPvAUokg7T5ikYpd3Y4ZSkoUjoAS\ngTK20yFvuw4DgVUvJIO7a+14PgjU1MQYC52MEPuv6sbvItX1Oxq/FnRhAoGAHWYK\ncbBSvJzuKtY+CC3gPa0i5cfq07VIVU8Pm7OosM7Q0CR/y88ntgVsscC0qJFwr/EU\ny7aJyBY9TInYqDPzMTeJVXsUwQ5gJut4ngFWk6kitDsJwFqqNFKmeLOj4repY5ee\n79U6kEHJzkOE8VgsH5nW4sjzDEQ9hmhOJ3tFz0ECgYEA8N+7yq1tK/99S8ThYW1J\n9mvUXRhAcFamBYp+8bIBdnQlrM9bGd9j8gYzQj+RBcvfCpVHFM20z8CC8oN0bitk\nh5MEjLBkw1vaywFlA/hcnA8A3g+5/IgHl03Y1tPWnyAtB77vE2M2ThklZ5l4E8eT\nP1vYw9RUSAPjtd43XDTqPNQ=\n-----END PRIVATE KEY-----\n",
"client_email": "tester@psyched-ceiling-413920.iam.gserviceaccount.com",
"client_id": "100697987276606879445",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/tester%40psyched-ceiling-413920.iam.gserviceaccount.com",
"universe_domain": "googleapis.com"
}

View File

@ -1,7 +1,14 @@
DB_HOST=db
DB_PORT=5432
# PosgreSQL адрес сервера
POSTGRES_HOST=127.0.0.1
POSTGRES_PORT=5432
# Пользователь БД Postgres
POSTGRES_USER=testuser
POSTGRES_PASSWORD=test
# БД рабочая и тестовая
POSTGRES_DB=fastfood_db
POSTGRES_DB_TEST=testdb
REDIS_DB=redis://localhost
POSTGRES_DB_TEST=fastfood_db_test
# Redis
REDIS_HOST=127.0.0.1
REDIS_PORT=6379
REDIS_DB=0

View File

@ -5,6 +5,7 @@ from fastapi import FastAPI
from fastfood.routers.dish import router as dish_router
from fastfood.routers.menu import router as menu_router
from fastfood.routers.submenu import router as submenu_router
from fastfood.routers.summary import router as summary_router
def create_app() -> FastAPI:
@ -15,6 +16,7 @@ def create_app() -> FastAPI:
app.include_router(menu_router)
app.include_router(submenu_router)
app.include_router(dish_router)
app.include_router(summary_router)
def custom_openapi():
with open('openapi.json') as openapi:

View File

@ -4,13 +4,17 @@ from pydantic_settings import BaseSettings, SettingsConfigDict
class Settings(BaseSettings):
DB_HOST: str = ''
DB_PORT: int = 5432
# Конфиг PostgreSql
POSTGRES_HOST: str = ''
POSTGRES_PORT: int = 5432
POSTGRES_DB: str = ''
POSTGRES_PASSWORD: str = ''
POSTGRES_USER: str = ''
POSTGRES_DB_TEST: str = ''
REDIS_DB: str = ''
# Конфиг Redis
REDIS_HOST: str = ''
REDIS_PORT: int = 6379
REDIS_DB: int = 0
@property
def DATABASE_URL_asyncpg(self) -> str:
@ -18,19 +22,18 @@ class Settings(BaseSettings):
Возвращает строку подключения к БД необходимую для SQLAlchemy
"""
# Проверяем, в DOCKER или нет
file_path = '/usr/src/RUN_IN_DOCKER'
if os.path.exists(file_path):
return (
'postgresql+asyncpg://'
f'{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}'
f'@db:{self.DB_PORT}/{self.POSTGRES_DB}'
f'@db:5432/{self.POSTGRES_DB}'
)
return (
'postgresql+asyncpg://'
f'{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}'
f'@{self.DB_HOST}:{self.DB_PORT}/{self.POSTGRES_DB}'
f'@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB}'
)
@property
@ -43,22 +46,36 @@ class Settings(BaseSettings):
return (
'postgresql+asyncpg://'
f'{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}'
f'@db:{self.DB_PORT}/{self.POSTGRES_DB_TEST}'
f'@db:5432/{self.POSTGRES_DB_TEST}'
)
return (
'postgresql+asyncpg://'
f'{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}'
f'@{self.DB_HOST}:{self.DB_PORT}/{self.POSTGRES_DB_TEST}'
f'@{self.POSTGRES_HOST}:{self.POSTGRES_PORT}/{self.POSTGRES_DB_TEST}'
)
@property
def REDIS_URL(self):
"""
Возвращает строку подключения к REDIS
"""
file_path = '/usr/src/RUN_IN_DOCKER'
if os.path.exists(file_path):
return 'redis://redis:6379/0'
return self.REDIS_DB
return f'redis://{self.REDIS_HOST}:{self.REDIS_PORT}/{self.REDIS_DB}'
@property
def REBBITMQ_URL(self):
"""
Возвращает строку подключения к REBBITMQ
"""
file_path = '/usr/src/RUN_IN_DOCKER'
if os.path.exists(file_path):
return 'amqp://guest:guest@rabbitmq'
return 'amqp://guest:guest@127.0.0.1'
model_config = SettingsConfigDict(env_file='.env')

View File

@ -12,17 +12,17 @@ def get_key(level: str, **kwargs) -> str:
case 'menus':
return 'MENUS'
case 'menu':
return f"{kwargs.get('menu_id')}"
return f"MENUS:{kwargs.get('menu_id')}"
case 'submenus':
return f"{kwargs.get('menu_id')}:SUBMENUS"
return f"MENUS:{kwargs.get('menu_id')}:SUBMENUS"
case 'submenu':
return f"{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}"
return f"MENUS:{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}"
case 'dishes':
return f"{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}:DISHES"
return f"MENUS:{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}:DISHES"
case 'dish':
return f"{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}:{kwargs.get('dish_id')}"
return f"MENUS:{kwargs.get('menu_id')}:{kwargs.get('submenu_id')}:{kwargs.get('dish_id')}"
return 'abracadabra'
return 'summary'
class RedisRepository:
@ -63,3 +63,4 @@ class RedisRepository:
async def invalidate(self, key: str, bg_task: BackgroundTasks) -> None:
await self.clear_cache(f'{key}*', bg_task)
await self.clear_cache(f'{get_key("menus")}*', bg_task)
await self.clear_cache('summary', bg_task)

View File

@ -0,0 +1,19 @@
from fastapi import Depends
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from fastfood.dbase import get_async_session
from fastfood.models import Menu, SubMenu
class SummaryRepository:
def __init__(self, session: AsyncSession = Depends(get_async_session)) -> None:
self.db = session
async def get_data(self):
query = select(Menu).options(
selectinload(Menu.submenus).selectinload(SubMenu.dishes)
)
data = await self.db.execute(query)
return [x for x in data.scalars().all()]

View File

@ -63,7 +63,7 @@ async def get_dish(
if not result:
raise HTTPException(
status_code=404,
detail=f'Блюдо c UUID={dish_id} не существует, доступ невозможен',
detail='dish not found',
)
return result
@ -89,7 +89,7 @@ async def update_dish(
if not result:
raise HTTPException(
status_code=404,
detail=f'Блюдо c UUID={dish_id} не существует, обновление невозможно',
detail='dish not found',
)
return result

View File

@ -50,7 +50,7 @@ async def get_menu(
if not result:
raise HTTPException(
status_code=404,
detail=f'Меню c UUID={menu_id} не существует, доступ невозможен',
detail='menu not found',
)
return result
@ -72,7 +72,7 @@ async def update_menu(
if not result:
raise HTTPException(
status_code=404,
detail=f'Меню c UUID={menu_id} не существует, Обновление невозможно',
detail='menu not found',
)
return result

View File

@ -59,7 +59,7 @@ async def get_submenu(
if not result:
raise HTTPException(
status_code=404,
detail=f'Подменю c UUID={submenu_id} не существует, доступ невозможен',
detail='submenu not found',
)
return result
@ -83,7 +83,7 @@ async def update_submenu(
if not result:
raise HTTPException(
status_code=404,
detail=f'Gjlvеню c UUID={submenu_id} не существует, обновление невозможно',
detail='submenu not found',
)
return result

View File

@ -0,0 +1,17 @@
from fastapi import APIRouter, BackgroundTasks, Depends
from fastfood.schemas import MenuSummary
from fastfood.service.summary import SummaryService
router = APIRouter(
prefix='/api/v1/summary',
tags=['summary'],
)
@router.get('/', response_model=list[MenuSummary])
async def get_summary(
sum: SummaryService = Depends(),
background_tasks: BackgroundTasks = BackgroundTasks(),
) -> list[MenuSummary]:
return await sum.read_data()

View File

@ -34,3 +34,11 @@ class Dish(DishBase, Menu):
class Dish_db(MenuBase):
price: float
class SubMenuSummary(Menu):
dishes: list[Dish_db]
class MenuSummary(Menu):
submenus: list[SubMenuSummary]

View File

@ -3,6 +3,7 @@ from uuid import UUID
import redis.asyncio as redis # type: ignore
from fastapi import BackgroundTasks, Depends
from fastfood import models
from fastfood.dbase import get_async_redis_client
from fastfood.repository.dish import DishRepository
from fastfood.repository.redis import RedisRepository, get_key
@ -21,6 +22,19 @@ class DishService:
self.bg_tasks = background_tasks
self.key = get_key
async def _get_discont(self, dish) -> dict:
discont = await self.cache.get(f"DISCONT:{str(dish.get('id'))}")
if discont is not None:
discont = float(discont)
dish['price'] = round(dish['price'] - (dish['price'] * discont / 100), 2)
return dish
async def _convert_dish_to_dict(self, row: models.Dish) -> Dish:
dish = row.__dict__
dish = await self._get_discont(dish)
dish['price'] = str(dish['price'])
return Dish(**dish)
async def read_dishes(self, menu_id: UUID, submenu_id: UUID) -> list[Dish]:
cached_dishes = await self.cache.get(
self.key('dishes', menu_id=str(menu_id), submenu_id=str(submenu_id))
@ -31,9 +45,9 @@ class DishService:
data = await self.dish_repo.get_dishes(menu_id, submenu_id)
response = []
for row in data:
dish = row.__dict__
dish['price'] = str(dish['price'])
response.append(Dish(**dish))
dish = await self._convert_dish_to_dict(row)
response.append(dish)
await self.cache.set(
self.key(
'dishes',
@ -57,9 +71,7 @@ class DishService:
submenu_id,
dish_db,
)
dish = data.__dict__
dish['price'] = str(dish['price'])
dish = Dish(**dish)
dish = await self._convert_dish_to_dict(data)
await self.cache.set(
self.key('dish', menu_id=str(menu_id), submenu_id=str(submenu_id)),
dish,
@ -86,9 +98,8 @@ class DishService:
data = await self.dish_repo.get_dish_item(menu_id, submenu_id, dish_id)
if data is None:
return None
dish = data.__dict__
dish['price'] = str(dish['price'])
dish = Dish(**dish)
dish = await self._convert_dish_to_dict(data)
await self.cache.set(
self.key(
'dish',
@ -112,9 +123,7 @@ class DishService:
if data is None:
return None
dish = data.__dict__
dish['price'] = str(dish['price'])
dish = Dish(**dish)
dish = await self._convert_dish_to_dict(data)
await self.cache.set(
self.key(

View File

@ -0,0 +1,81 @@
import redis.asyncio as redis # type: ignore
from fastapi import BackgroundTasks, Depends
from fastfood.dbase import get_async_redis_client
from fastfood.repository.redis import RedisRepository, get_key
from fastfood.repository.summary import SummaryRepository
from fastfood.schemas import DishBase, MenuSummary, SubMenuSummary
class SummaryService:
def __init__(
self,
sum_repo: SummaryRepository = Depends(),
redis_client: redis.Redis = Depends(get_async_redis_client),
background_tasks: BackgroundTasks = None,
) -> None:
self.sum_repo = sum_repo
self.cache = RedisRepository(redis_client)
self.key = get_key
self.bg_tasks = background_tasks
async def read_data(self):
result = []
async def dump_to_schema(
schema, obj
) -> MenuSummary | SubMenuSummary | DishBase:
"""Функция преобразует объект SQLAlchemy к Pydantic модели
Входящие параметры
schema: Pydantic модель
obj: ORM объект
Возвращаемые данные
schema: MenuSummary | SubMenuSummary | DishBase
"""
obj = obj.__dict__
obj = {k: v for k, v in obj.items() if not k.startswith('_')}
if 'price' in obj.keys():
discont = await self.cache.get(f"DISCONT:{str(obj.get('id'))}")
if discont is not None:
try:
discont = float(discont)
except Exception:
discont = 0.0
obj['price'] = round(
obj['price'] - (obj['price'] * discont / 100), 2
)
obj['price'] = str(obj['price'])
return schema(**obj)
cached_data = await self.cache.get(self.key('summary'))
if cached_data is not None:
return cached_data
data = await self.sum_repo.get_data()
for menu in data:
menus_res = await dump_to_schema(MenuSummary, menu)
menus_res.submenus = []
for sub in menu.submenus:
sub_res = await dump_to_schema(SubMenuSummary, sub)
sub_res.dishes = []
for dish in sub.dishes:
dish_res = await dump_to_schema(DishBase, dish)
sub_res.dishes.append(dish_res)
menus_res.submenus.append(sub_res)
result.append(menus_res)
await self.cache.set(self.key('summary'), data, self.bg_tasks)
return result

View File

@ -1,12 +1,28 @@
import asyncio
import multiprocessing
import sys
from subprocess import Popen
import uvicorn
from fastfood.repository import create_db_and_tables
loop = asyncio.get_event_loop()
def run_app():
def start_celery_worker() -> None:
Popen(['celery', '-A', 'bg_tasks.bg_task.celery_app', 'worker', '--loglevel=info'])
def start_celery_beat() -> None:
Popen(['celery', '-A', 'bg_tasks.bg_task.celery_app', 'beat', '--loglevel=info'])
celery_worker_process = multiprocessing.Process(target=start_celery_worker)
celery_beat_process = multiprocessing.Process(target=start_celery_beat)
async def run_app() -> None:
"""
Запуск FastAPI
"""
@ -20,15 +36,24 @@ def run_app():
)
async def recreate():
async def recreate() -> None:
"""Удаление и создание таблиц в базе данных для тестирования"""
await create_db_and_tables()
if __name__ == '__main__':
if '--run-server' in sys.argv:
run_app()
if '--run-docker-server' in sys.argv:
"""Запуск FastAPI в докере. Celery запускается в отдельном контейнере"""
loop.run_until_complete(recreate())
loop.run_until_complete(run_app())
if '--run-test-server' in sys.argv:
asyncio.run(recreate())
run_app()
if '--run-local-server' in sys.argv:
"""Локальный запуск FastAPI с запуском Celery в отдельных процессах"""
celery_worker_process.start()
celery_beat_process.start()
loop.run_until_complete(recreate())
loop.run_until_complete(run_app())
celery_beat_process.kill()
celery_worker_process.kill()

File diff suppressed because one or more lines are too long

554
poetry.lock generated
View File

@ -1,5 +1,19 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "amqp"
version = "5.2.0"
description = "Low-level AMQP client for Python (fork of amqplib)."
optional = false
python-versions = ">=3.6"
files = [
{file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"},
{file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"},
]
[package.dependencies]
vine = ">=5.0.0,<6.0.0"
[[package]]
name = "annotated-types"
version = "0.6.0"
@ -101,6 +115,83 @@ async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.12.0\""}
docs = ["Sphinx (>=5.3.0,<5.4.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
test = ["flake8 (>=6.1,<7.0)", "uvloop (>=0.15.3)"]
[[package]]
name = "billiard"
version = "4.2.0"
description = "Python multiprocessing fork with improvements and bugfixes"
optional = false
python-versions = ">=3.7"
files = [
{file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"},
{file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"},
]
[[package]]
name = "cachetools"
version = "5.3.2"
description = "Extensible memoizing collections and decorators"
optional = false
python-versions = ">=3.7"
files = [
{file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"},
{file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"},
]
[[package]]
name = "celery"
version = "5.3.6"
description = "Distributed Task Queue."
optional = false
python-versions = ">=3.8"
files = [
{file = "celery-5.3.6-py3-none-any.whl", hash = "sha256:9da4ea0118d232ce97dff5ed4974587fb1c0ff5c10042eb15278487cdd27d1af"},
{file = "celery-5.3.6.tar.gz", hash = "sha256:870cc71d737c0200c397290d730344cc991d13a057534353d124c9380267aab9"},
]
[package.dependencies]
billiard = ">=4.2.0,<5.0"
click = ">=8.1.2,<9.0"
click-didyoumean = ">=0.3.0"
click-plugins = ">=1.1.1"
click-repl = ">=0.2.0"
kombu = ">=5.3.4,<6.0"
python-dateutil = ">=2.8.2"
tzdata = ">=2022.7"
vine = ">=5.1.0,<6.0"
[package.extras]
arangodb = ["pyArango (>=2.0.2)"]
auth = ["cryptography (==41.0.5)"]
azureblockblob = ["azure-storage-blob (>=12.15.0)"]
brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"]
cassandra = ["cassandra-driver (>=3.25.0,<4)"]
consul = ["python-consul2 (==0.1.5)"]
cosmosdbsql = ["pydocumentdb (==2.3.5)"]
couchbase = ["couchbase (>=3.0.0)"]
couchdb = ["pycouchdb (==1.14.2)"]
django = ["Django (>=2.2.28)"]
dynamodb = ["boto3 (>=1.26.143)"]
elasticsearch = ["elastic-transport (<=8.10.0)", "elasticsearch (<=8.11.0)"]
eventlet = ["eventlet (>=0.32.0)"]
gevent = ["gevent (>=1.5.0)"]
librabbitmq = ["librabbitmq (>=2.0.0)"]
memcache = ["pylibmc (==1.6.3)"]
mongodb = ["pymongo[srv] (>=4.0.2)"]
msgpack = ["msgpack (==1.0.7)"]
pymemcache = ["python-memcached (==1.59)"]
pyro = ["pyro4 (==4.82)"]
pytest = ["pytest-celery (==0.0.0)"]
redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"]
s3 = ["boto3 (>=1.26.143)"]
slmq = ["softlayer-messaging (>=1.0.3)"]
solar = ["ephem (==4.1.5)"]
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.0)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"]
tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"]
yaml = ["PyYAML (>=3.10)"]
zookeeper = ["kazoo (>=1.3.1)"]
zstd = ["zstandard (==0.22.0)"]
[[package]]
name = "certifi"
version = "2024.2.2"
@ -187,6 +278,105 @@ files = [
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
]
[[package]]
name = "charset-normalizer"
version = "3.3.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
{file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
{file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
{file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
{file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
{file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
{file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
{file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
{file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
{file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
{file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
{file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
{file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
{file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
{file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
{file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
{file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
{file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
{file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
{file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
{file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
{file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
{file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
]
[[package]]
name = "click"
version = "8.1.7"
@ -201,6 +391,55 @@ files = [
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "click-didyoumean"
version = "0.3.0"
description = "Enables git-like *did-you-mean* feature in click"
optional = false
python-versions = ">=3.6.2,<4.0.0"
files = [
{file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"},
{file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"},
]
[package.dependencies]
click = ">=7"
[[package]]
name = "click-plugins"
version = "1.1.1"
description = "An extension module for click to enable registering CLI commands via setuptools entry-points."
optional = false
python-versions = "*"
files = [
{file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"},
{file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"},
]
[package.dependencies]
click = ">=4.0"
[package.extras]
dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"]
[[package]]
name = "click-repl"
version = "0.3.0"
description = "REPL plugin for Click"
optional = false
python-versions = ">=3.6"
files = [
{file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"},
{file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"},
]
[package.dependencies]
click = ">=7.0"
prompt-toolkit = ">=3.0.36"
[package.extras]
testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
[[package]]
name = "colorama"
version = "0.4.6"
@ -379,6 +618,17 @@ files = [
dnspython = ">=2.0.0"
idna = ">=2.0.0"
[[package]]
name = "et-xmlfile"
version = "1.1.0"
description = "An implementation of lxml.xmlfile for the standard library"
optional = false
python-versions = ">=3.6"
files = [
{file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"},
{file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
]
[[package]]
name = "exceptiongroup"
version = "1.2.0"
@ -428,6 +678,47 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
name = "google-auth"
version = "2.27.0"
description = "Google Authentication Library"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-auth-2.27.0.tar.gz", hash = "sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821"},
{file = "google_auth-2.27.0-py2.py3-none-any.whl", hash = "sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245"},
]
[package.dependencies]
cachetools = ">=2.0.0,<6.0"
pyasn1-modules = ">=0.2.1"
rsa = ">=3.1.4,<5"
[package.extras]
aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"]
enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"]
pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"]
reauth = ["pyu2f (>=0.1.5)"]
requests = ["requests (>=2.20.0,<3.0.0.dev0)"]
[[package]]
name = "google-auth-oauthlib"
version = "1.2.0"
description = "Google Authentication Library"
optional = false
python-versions = ">=3.6"
files = [
{file = "google-auth-oauthlib-1.2.0.tar.gz", hash = "sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8"},
{file = "google_auth_oauthlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf"},
]
[package.dependencies]
google-auth = ">=2.15.0"
requests-oauthlib = ">=0.7.0"
[package.extras]
tool = ["click (>=6.0.0)"]
[[package]]
name = "greenlet"
version = "3.0.3"
@ -499,6 +790,22 @@ files = [
docs = ["Sphinx", "furo"]
test = ["objgraph", "psutil"]
[[package]]
name = "gspread"
version = "6.0.1"
description = "Google Spreadsheets Python API"
optional = false
python-versions = ">=3.8"
files = [
{file = "gspread-6.0.1-py3-none-any.whl", hash = "sha256:6c3af32b753fe75d9dd513ea9e088e9e043e09b9e3bf04d61d77213f37e67b79"},
{file = "gspread-6.0.1.tar.gz", hash = "sha256:8c8bf83be676a019d3a483455d8b17b442f2acfc620172f245422ca4fc960dd0"},
]
[package.dependencies]
google-auth = ">=1.12.0"
google-auth-oauthlib = ">=0.4.1"
StrEnum = "0.4.15"
[[package]]
name = "h11"
version = "0.14.0"
@ -591,6 +898,38 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "kombu"
version = "5.3.5"
description = "Messaging library for Python."
optional = false
python-versions = ">=3.8"
files = [
{file = "kombu-5.3.5-py3-none-any.whl", hash = "sha256:0eac1bbb464afe6fb0924b21bf79460416d25d8abc52546d4f16cad94f789488"},
{file = "kombu-5.3.5.tar.gz", hash = "sha256:30e470f1a6b49c70dc6f6d13c3e4cc4e178aa6c469ceb6bcd55645385fc84b93"},
]
[package.dependencies]
amqp = ">=5.1.1,<6.0.0"
vine = "*"
[package.extras]
azureservicebus = ["azure-servicebus (>=7.10.0)"]
azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"]
confluentkafka = ["confluent-kafka (>=2.2.0)"]
consul = ["python-consul2"]
librabbitmq = ["librabbitmq (>=2.0.0)"]
mongodb = ["pymongo (>=4.1.1)"]
msgpack = ["msgpack"]
pyro = ["pyro4"]
qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"]
redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"]
slmq = ["softlayer-messaging (>=1.0.3)"]
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"]
yaml = ["PyYAML (>=3.10)"]
zookeeper = ["kazoo (>=2.8.0)"]
[[package]]
name = "mypy"
version = "1.8.0"
@ -663,6 +1002,36 @@ files = [
[package.dependencies]
setuptools = "*"
[[package]]
name = "oauthlib"
version = "3.2.2"
description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
optional = false
python-versions = ">=3.6"
files = [
{file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
{file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
]
[package.extras]
rsa = ["cryptography (>=3.0.0)"]
signals = ["blinker (>=1.4.0)"]
signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "openpyxl"
version = "3.1.2"
description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
optional = false
python-versions = ">=3.6"
files = [
{file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"},
{file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"},
]
[package.dependencies]
et-xmlfile = "*"
[[package]]
name = "packaging"
version = "23.2"
@ -722,6 +1091,45 @@ nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
[[package]]
name = "prompt-toolkit"
version = "3.0.43"
description = "Library for building powerful interactive command lines in Python"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"},
{file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"},
]
[package.dependencies]
wcwidth = "*"
[[package]]
name = "pyasn1"
version = "0.5.1"
description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
{file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"},
{file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"},
]
[[package]]
name = "pyasn1-modules"
version = "0.3.0"
description = "A collection of ASN.1-based protocols modules"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
{file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
{file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
]
[package.dependencies]
pyasn1 = ">=0.4.6,<0.6.0"
[[package]]
name = "pycparser"
version = "2.21"
@ -916,6 +1324,20 @@ pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
[package.dependencies]
six = ">=1.5"
[[package]]
name = "python-dotenv"
version = "1.0.1"
@ -1007,6 +1429,59 @@ async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2
hiredis = ["hiredis (>=1.0.0)"]
ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"]
[[package]]
name = "requests"
version = "2.31.0"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.7"
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<3"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-oauthlib"
version = "1.3.1"
description = "OAuthlib authentication support for Requests."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"},
{file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"},
]
[package.dependencies]
oauthlib = ">=3.0.0"
requests = ">=2.0.0"
[package.extras]
rsa = ["oauthlib[signedtoken] (>=3.0.0)"]
[[package]]
name = "rsa"
version = "4.9"
description = "Pure-Python RSA implementation"
optional = false
python-versions = ">=3.6,<4"
files = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
]
[package.dependencies]
pyasn1 = ">=0.1.3"
[[package]]
name = "setuptools"
version = "69.0.3"
@ -1023,6 +1498,17 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "sniffio"
version = "1.3.0"
@ -1138,6 +1624,22 @@ anyio = ">=3.4.0,<5"
[package.extras]
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"]
[[package]]
name = "strenum"
version = "0.4.15"
description = "An Enum that inherits from str."
optional = false
python-versions = "*"
files = [
{file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"},
{file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"},
]
[package.extras]
docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
release = ["twine"]
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
[[package]]
name = "tomli"
version = "2.0.1"
@ -1189,6 +1691,34 @@ files = [
{file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
]
[[package]]
name = "tzdata"
version = "2023.4"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
files = [
{file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"},
{file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"},
]
[[package]]
name = "urllib3"
version = "2.2.0"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
{file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"},
{file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "uvicorn"
version = "0.26.0"
@ -1208,6 +1738,17 @@ typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
[package.extras]
standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
[[package]]
name = "vine"
version = "5.1.0"
description = "Python promises."
optional = false
python-versions = ">=3.6"
files = [
{file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"},
{file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"},
]
[[package]]
name = "virtualenv"
version = "20.25.0"
@ -1228,7 +1769,18 @@ platformdirs = ">=3.9.1,<5"
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[[package]]
name = "wcwidth"
version = "0.2.13"
description = "Measures the displayed width of unicode strings in a terminal"
optional = false
python-versions = "*"
files = [
{file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
{file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "106e42984de924817e2dc083ad78699b3411f9aa60de5bb5c1a95ca94a21fda1"
content-hash = "5ad429e191b066c84074106341a72bb5e17ba85c5733acaae3d2ffd2db40e338"

View File

@ -17,6 +17,9 @@ pytest-asyncio = "^0.23.3"
redis = "^4.6.0"
types-redis = "^4.6.0.3"
mypy = "^1.4.1"
celery = "^5.3.6"
openpyxl = "^3.1.2"
gspread = "^6.0.1"
[tool.poetry.group.dev.dependencies]

View File

@ -186,3 +186,11 @@ class Repository:
),
)
return response.status_code
class Summary:
@staticmethod
async def read_summary(ac: AsyncClient) -> tuple[int, dict]:
"""чтение summary"""
response: Response = await ac.get(reverse('get_summary'))
return response.status_code, response.json()

View File

@ -1,360 +0,0 @@
import pytest
from httpx import AsyncClient
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_menu_crud_empty(client: AsyncClient) -> None:
"""Тестирование функций меню"""
code, rspn = await Repo.Menu.read_all(client)
assert code == 200
assert rspn == []
@pytest.mark.asyncio
async def test_menu_crud_add(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
assert code == 201
assert rspn['title'] == 'Menu'
assert rspn['description'] is None
await Repo.Menu.delete(client, rspn)
@pytest.mark.asyncio
async def test_menu_crud_get(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code, menu = await Repo.Menu.get(client, {'id': rspn.get('id')})
assert code == 200
assert menu['title'] == rspn['title']
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_menu_crud_update(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
upd_data = {
'id': rspn.get('id'),
'title': 'upd Menu',
'description': '',
}
code, upd_rspn = await Repo.Menu.update(client, upd_data)
assert code == 200
assert upd_rspn['title'] == 'upd Menu'
await Repo.Menu.delete(client, upd_rspn)
@pytest.mark.asyncio
async def test_menu_crud_delete(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code = await Repo.Menu.delete(client, rspn)
assert code == 200
code, rspn = await Repo.Menu.get(client, {'id': rspn.get('id')})
assert code == 404
@pytest.mark.asyncio
async def test_menu_crud_get_all(client: AsyncClient) -> None:
"""Тестирование функций меню"""
code, rspn = await Repo.Menu.read_all(client)
assert code == 200
assert rspn == []
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code, upd_rspn = await Repo.Menu.read_all(client)
assert code == 200
assert upd_rspn == [rspn]
await Repo.Menu.delete(client, rspn)
@pytest.mark.asyncio
async def test_submenus_get_all(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
assert code == 201
menu.update(rspn)
# Проверяем наличие подменю
code, rspn = await Repo.Submenu.read_all(client, menu)
assert code == 200
assert rspn == []
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Проверяем наличие подменю
code, upd_rspn = await Repo.Submenu.read_all(client, menu)
assert code == 200
assert upd_rspn == [rspn]
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_add(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
assert code == 201
submenu.update(rspn)
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_update(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Обновляем подменю и проверяем
submenu['title'] = 'updated_submenu'
code, rspn = await Repo.Submenu.update(client, menu, submenu)
assert code == 200
assert submenu['title'] == rspn['title']
submenu.update(rspn)
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_delete(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Удаляем подменю
code = await Repo.Submenu.delete(client, menu, submenu)
assert code == 200
# Проверяем удаленное подменю
code, rspn = await Repo.Submenu.get(client, menu, submenu)
assert code == 404
# удаляем сопутствующее
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_get_all(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Проверяем все блюда в подменю
code, rspn = await Repo.Dish.read_all(client, menu, submenu)
assert code == 200
assert rspn == []
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
assert code == 201
dish.update(rspn)
code, upd_rspn = await Repo.Dish.read_all(client, menu, submenu)
assert code == 200
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_add(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
assert code == 201
dish.update(rspn)
# Получаем блюдо
code, rspn = await Repo.Dish.get(client, menu, submenu, dish)
assert code == 200
assert rspn['title'] == dish['title']
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_update(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
# Обновляем блюдо и проверяем
dish['title'] = 'updated_dish'
code, rspn = await Repo.Dish.update(client, menu, submenu, dish)
assert code == 200
assert dish['title'] == rspn['title']
dish.update(rspn)
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_delete(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
# Удаляем подменю
code = await Repo.Dish.delete(client, menu, submenu, dish)
assert code == 200
# Проверяем удаленное блюдо
code, rspn = await Repo.Dish.get(client, menu, submenu, dish)
assert code == 404
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)

174
tests/test_dish.py Normal file
View File

@ -0,0 +1,174 @@
import pytest
from httpx import AsyncClient
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_dishes_get_all(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Проверяем все блюда в подменю
code, rspn = await Repo.Dish.read_all(client, menu, submenu)
assert code == 200
assert rspn == []
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
assert code == 201
dish.update(rspn)
code, upd_rspn = await Repo.Dish.read_all(client, menu, submenu)
assert code == 200
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_add(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
assert code == 201
dish.update(rspn)
# Получаем блюдо
code, rspn = await Repo.Dish.get(client, menu, submenu, dish)
assert code == 200
assert rspn['title'] == dish['title']
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_update(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
# Обновляем блюдо и проверяем
dish['title'] = 'updated_dish'
code, rspn = await Repo.Dish.update(client, menu, submenu, dish)
assert code == 200
assert dish['title'] == rspn['title']
dish.update(rspn)
# удаляем сопутствующее
await Repo.Dish.delete(client, menu, submenu, dish)
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_dishes_delete(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu = {
'title': 'Menu',
'description': 'main menu',
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
# Удаляем подменю
code = await Repo.Dish.delete(client, menu, submenu, dish)
assert code == 200
# Проверяем удаленное блюдо
code, rspn = await Repo.Dish.get(client, menu, submenu, dish)
assert code == 404
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)

80
tests/test_menu.py Normal file
View File

@ -0,0 +1,80 @@
import pytest
from httpx import AsyncClient
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_menu_crud_empty(client: AsyncClient) -> None:
"""Тестирование функций меню"""
code, rspn = await Repo.Menu.read_all(client)
assert code == 200
assert rspn == []
@pytest.mark.asyncio
async def test_menu_crud_add(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
assert code == 201
assert rspn['title'] == 'Menu'
assert rspn['description'] is None
await Repo.Menu.delete(client, rspn)
@pytest.mark.asyncio
async def test_menu_crud_get(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code, menu = await Repo.Menu.get(client, {'id': rspn.get('id')})
assert code == 200
assert menu['title'] == rspn['title']
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_menu_crud_update(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
upd_data = {
'id': rspn.get('id'),
'title': 'upd Menu',
'description': '',
}
code, upd_rspn = await Repo.Menu.update(client, upd_data)
assert code == 200
assert upd_rspn['title'] == 'upd Menu'
await Repo.Menu.delete(client, upd_rspn)
@pytest.mark.asyncio
async def test_menu_crud_delete(client: AsyncClient) -> None:
"""Тестирование функций меню"""
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code = await Repo.Menu.delete(client, rspn)
assert code == 200
code, rspn = await Repo.Menu.get(client, {'id': rspn.get('id')})
assert code == 404
@pytest.mark.asyncio
async def test_menu_crud_get_all(client: AsyncClient) -> None:
"""Тестирование функций меню"""
code, rspn = await Repo.Menu.read_all(client)
assert code == 200
assert rspn == []
data = {'title': 'Menu', 'description': None}
code, rspn = await Repo.Menu.write(client, data)
code, upd_rspn = await Repo.Menu.read_all(client)
assert code == 200
assert upd_rspn == [rspn]
await Repo.Menu.delete(client, rspn)

113
tests/test_submenu.py Normal file
View File

@ -0,0 +1,113 @@
import pytest
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_submenus_get_all(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
assert code == 201
menu.update(rspn)
# Проверяем наличие подменю
code, rspn = await Repo.Submenu.read_all(client, menu)
assert code == 200
assert rspn == []
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Проверяем наличие подменю
code, upd_rspn = await Repo.Submenu.read_all(client, menu)
assert code == 200
assert upd_rspn == [rspn]
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_add(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
assert code == 201
submenu.update(rspn)
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_update(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Обновляем подменю и проверяем
submenu['title'] = 'updated_submenu'
code, rspn = await Repo.Submenu.update(client, menu, submenu)
assert code == 200
assert submenu['title'] == rspn['title']
submenu.update(rspn)
# удаляем сопутствующее
await Repo.Submenu.delete(client, menu, submenu)
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_submenus_delete(client) -> None:
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu'}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Создаем и проверяем подменю
submenu = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
# Удаляем подменю
code = await Repo.Submenu.delete(client, menu, submenu)
assert code == 200
# Проверяем удаленное подменю
code, rspn = await Repo.Submenu.get(client, menu, submenu)
assert code == 404
# удаляем сопутствующее
await Repo.Menu.delete(client, menu)

113
tests/test_summary.py Normal file
View File

@ -0,0 +1,113 @@
import pytest
from httpx import AsyncClient
from .repository import Repository as Repo
@pytest.mark.asyncio
async def test_summary_with_menu(client: AsyncClient) -> None:
# Проверяем пустое summary
code, rspn = await Repo.Summary.read_summary(client)
assert code == 200
assert rspn == []
# Создаем меню и проверяем ответ
menu = {'title': 'Menu', 'description': 'main menu', 'submenus': []}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
# Удалим ненужные ключи, тк в модели они не используются
del menu['submenus_count']
del menu['dishes_count']
# Проверяем summary c меню
code, rspn = await Repo.Summary.read_summary(client)
assert code == 200
assert rspn == [menu]
# удаляем сопутствующее
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_summary_with_submenus(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu: dict[str, str | list | float] = {
'title': 'Menu',
'description': 'main menu',
'submenus': [],
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
del menu['submenus_count']
del menu['dishes_count']
# Создаем и проверяем подменю
submenu: dict[str, str | list | float] = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
'dishes': list(),
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
del submenu['dishes_count']
del submenu['parent_menu']
menu['submenus'] = [submenu]
# Получаем блюдо
code, rspn = await Repo.Summary.read_summary(client)
assert code == 200
assert rspn == [menu]
await Repo.Menu.delete(client, menu)
@pytest.mark.asyncio
async def test_summary_with_dishes(client: AsyncClient) -> None:
# Создаем меню и проверяем ответ
menu: dict[str, str | list | float] = {
'title': 'Menu',
'description': 'main menu',
'submenus': [],
}
code, rspn = await Repo.Menu.write(client, menu)
menu.update(rspn)
del menu['submenus_count']
del menu['dishes_count']
# Создаем и проверяем подменю
submenu: dict[str, str | list | float] = {
'title': 'Submenu',
'description': 'submenu',
'parent_menu': menu['id'],
'dishes': [],
}
code, rspn = await Repo.Submenu.write(client, menu, submenu)
submenu.update(rspn)
del submenu['dishes_count']
del submenu['parent_menu']
# Добавляем блюдо
dish = {
'title': 'dish',
'description': 'some dish',
'price': '12.5',
'parent_submenu': submenu['id'],
}
code, rspn = await Repo.Dish.write(client, menu, submenu, dish)
dish.update(rspn)
del dish['parent_submenu']
del dish['id']
submenu['dishes'] = [dish]
menu['submenus'] = [submenu]
code, rspn = await Repo.Summary.read_summary(client)
assert code == 200
assert rspn == [menu]
await Repo.Menu.delete(client, menu)