diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..d46d3fa0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +### **Why?** + +*Зачем выполняется реализация задачи и какую проблему она решает.* + +### **How To Do?** + +*Как мы выполняем реализацию данной задачи, какие действия нужны для достижения цели.* + +### **Where?** + +*Где делаем реализацию задачи, какие сервисы задействованы, ссылка на требования, сcылка на API, скриншоты.* diff --git a/.github/workflows/deploy_stage.yaml b/.github/workflows/deploy_stage.yaml new file mode 100644 index 00000000..79d28bc3 --- /dev/null +++ b/.github/workflows/deploy_stage.yaml @@ -0,0 +1,131 @@ +name: Stage backend build and deploy + +concurrency: + group: ${{ github.ref }} + cancel-in-progress: true + +on: + push: + branches: + - master + - develop + + workflow_dispatch: + +env: + REGISTRY: ghcr.io + REP_OWNER: studio-yandex-practicum + IMAGE_NAME: procharity_back_2.0_backend + DEPLOY_PATH: /procharity2/full + WORK_DIR: /app + +defaults: + run: + working-directory: . + +jobs: + build-and-push-image-to-github-packages: + name: Push backend Docker image to GitHub Packages + runs-on: ubuntu-latest + environment: + name: deploy + permissions: + contents: read + packages: write + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Checkout + uses: actions/checkout@v3 + with: + ref: develop + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Sanitize repo slug + uses: actions/github-script@v6 + id: repo_slug + with: + result-encoding: string + script: return 'ghcr.io/${{ github.repository }}'.toLowerCase() + - name: Push Stage to GitHub Container Registry + uses: docker/build-push-action@v4 + with: + context: "{{defaultContext}}" + push: true + tags: ${{ steps.repo_slug.outputs.result }}:stage + + deploy: + name: Deploy + runs-on: ubuntu-latest + needs: build-and-push-image-to-github-packages + environment: + name: deploy + steps: + - name: Check out repository + uses: actions/checkout@v2 + + - name: Copy docker-compose and nginx files + uses: appleboy/scp-action@master + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.SSH_USERNAME }} + password: ${{ secrets.SSH_PASSWORD }} + source: "infra/docker-compose.swag.yml, infra/nginx/*.conf" + target: ${{ env.DEPLOY_PATH }} + - name: Get SHORT_SHA + run: echo "SHORT_SHA=`echo ${{ github.sha }} | cut -c1-8`" >> $GITHUB_ENV + - name: Get COMMIT_DATE + run: echo COMMIT_DATE=$(git show -s --format=%ci ${{ env.SHORT_SHA }}) >> $GITHUB_ENV + - name: Create .env file + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.SSH_USERNAME }} + password: ${{ secrets.SSH_PASSWORD }} + script: | + cd ${{ env.DEPLOY_PATH }} + mv infra/docker-compose.swag.yml infra/docker-compose.yaml + rm -f .env + cat > .env <<- EOM + POSTGRES_DB=${{ secrets.POSTGRES_DB }} + POSTGRES_USER=${{ secrets.POSTGRES_USER }} + POSTGRES_PASSWORD=${{ secrets.POSTGRES_PASSWORD }} + DB_HOST=${{ secrets.DB_HOST }} + DB_PORT=${{ secrets.DB_PORT }} + BOT_TOKEN=${{ secrets.TELEGRAM_BOT_TOKEN }} + BOT_WEBHOOK_MODE=${{ secrets.BOT_WEBHOOK_MODE }} + APPLICATION_URL=${{ secrets.APPLICATION_URL }} + SECRET_KEY=${{ secrets.SECRET_KEY }} + ROOT_PATH=${{ secrets.ROOT_PATH }} + MAIL_SERVER=${{ secrets.MAIL_SERVER }} + MAIL_LOGIN=${{ secrets.MAIL_LOGIN }} + MAIL_PASSWORD=${{ secrets.MAIL_PASSWORD }} + ORGANIZATIONS_EMAIL=${{ secrets.ORGANIZATIONS_EMAIL }} + EMAIL_ADMIN=${{ secrets.EMAIL_ADMIN }} + LAST_COMMIT=${{ env.SHORT_SHA }} + COMMIT_DATE=${{ env.COMMIT_DATE }} + EOM + start_application: + name: Start application + runs-on: ubuntu-latest + environment: + name: deploy + needs: deploy + steps: + - name: Start application + uses: appleboy/ssh-action@master + with: + host: ${{ secrets.HOST }} + username: ${{ secrets.SSH_USERNAME }} + password: ${{ secrets.SSH_PASSWORD }} + script: | + cd ${{ env.DEPLOY_PATH }}/infra + docker compose --file docker-compose.yaml --env-file ../.env pull backend -q + docker compose --file docker-compose.yaml --env-file ../.env up -d --wait + docker system prune -f || true + + docker compose --env-file ../.env exec -Tw ${{ env.WORK_DIR }} backend bash -c "alembic upgrade head" diff --git a/.github/workflows/push_front.yaml b/.github/workflows/push_front.yaml new file mode 100644 index 00000000..80bfa20f --- /dev/null +++ b/.github/workflows/push_front.yaml @@ -0,0 +1,47 @@ +name: Stage bot frontend + +on: + push: + branches: + - master + - develop + paths: + - 'front/**' + + workflow_dispatch: + +env: + REGISTRY: ghcr.io + IMAGE_NAME_FRONT: procharity_bot_front + OWNER_LC: studio-yandex-practicum + + +jobs: + build_and_push_to_ghcr: + name: Push front Docker image to Github Container Registry + runs-on: ubuntu-latest + environment: + name: stage_deploy + permissions: + contents: read + packages: write + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Checkout + uses: actions/checkout@v3 + with: + ref: develop + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Push Front to ghcr.io + uses: docker/build-push-action@v2 + with: + context: "{{defaultContext}}" + file: ./infra/Dockerfile_front + push: true + tags: ${{ env.REGISTRY }}/${{ env.OWNER_LC }}/${{ env.IMAGE_NAME_FRONT }}:prod diff --git a/.gitignore b/.gitignore index b6e47617..df7d229f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,10 @@ +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +.idea/ + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -127,3 +134,13 @@ dmypy.json # Pyre type checker .pyre/ + +# vscode +.vscode/ +.devcontainer/ + +# logs +logs/ + +.DS_Store +src/.DS_Store diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..8dcd159a --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "front"] + path = front + url = https://github.com/ProCharity/procharity-front diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..e3a4ab33 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,33 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: check-added-large-files + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + - id: check-merge-conflict + - id: mixed-line-ending + - id: no-commit-to-branch + args: [--branch, master, --branch, develop] + +- repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + args: [--profile, black] + +- repo: https://github.com/psf/black + rev: 23.10.0 + hooks: + - id: black + args: [--line-length=120] + +- repo: https://github.com/pycqa/flake8 + rev: 6.1.0 + hooks: + - id: flake8 + additional_dependencies: + - flake8-docstrings + args: [--config, setup.cfg] diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..48d2c545 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,13 @@ +FROM python:3.11 + +RUN mkdir /app + +WORKDIR /app + +COPY requirements.txt /app + +RUN pip3 install -r /app/requirements.txt --no-cache-dir + +COPY . . + +CMD ["uvicorn", "src:app", "--host", "0.0.0.0", "--port", "8000", "--proxy-headers"] diff --git a/MIGRATIONS_GUIDE.md b/MIGRATIONS_GUIDE.md new file mode 100644 index 00000000..039b9bee --- /dev/null +++ b/MIGRATIONS_GUIDE.md @@ -0,0 +1,35 @@ +# Восстановление базы данных и применение миграций + +Этот README-файл содержит инструкции по восстановлению базы данных из дампа и применению миграций с использованием Docker и Alembic. + + +## Шаги по восстановлению данных из дампа + +1. Очистите существующие данные и таблицы в базе данных: + + ``` + docker exec -it infra-postgres-1 psql -U postgres -d procharity_back_db_local -c "DROP SCHEMA public CASCADE; CREATE SCHEMA public;" + ``` + +2. Загрузите данные из дампа SQL: + + ``` + docker exec -i infra-postgres-1 psql -U postgres procharity_back_db_local < name_dump.sql + ``` + +## Шаги по применению миграций с использованием Alembic + +1. Удалите таблицу alembic_version из базы данных: + + ``` + docker exec -it infra-postgres-1 psql -U postgres -d procharity_back_db_local -c "DROP TABLE alembic_version;" + ``` + + +2. Примените миграции с помощью Alembic: + + ``` + alembic upgrade head + ``` + +3. Восстановление базы данных и применение миграций завершено. diff --git a/README.md b/README.md index 39dad07f..bc2071bc 100644 --- a/README.md +++ b/README.md @@ -1 +1,384 @@ # ProCharity_back_2.0 + +
+ Оглавление +
    +
  1. + Описание + +
  2. +
  3. + Запуск бота локально +
  4. +
  5. + Для разработки + +
  6. +
  7. Использование
  8. +
  9. + Полезная информация + +
  10. +
+
+ +### Описание + +Создание чат-бота в Telegram для платформы интеллектуального волонтерства +ProCharity (НКО Фонд Друзья). + +Сайт [https://procharity.ru/](https://procharity.ru/) + +Чат-бот @ProCharity_bot + +Платформа представляет собой агрегатор волонтерских заданий от различных +благотворительных проектов - любой желающий согласно своим желаниям и +умениям может откликаться на конкретные предложения благотворительных +проектов о волонтерской помощи, в свою очередь благотворительный проект/фонд +выбирает из всех откликов подходящих кандидатов. + +Чат-бот реализует функционал волонтерской платформы в приложении Telegram - +с помощью JSON рассылает подписчикам новые появляющиеся задания от фондов. + +### Технологии + +[![FastAPI][FastAPI-badge]][FastAPI-url] +[![Python-telegram-bot][Python-telegram-bot-badge]][Python-telegram-bot-url] +[![Postgres][Postgres-badge]][Postgres-url] +[![Nginx][Nginx-badge]][Nginx-url] + +## Запуск бота локально + +1. Создайте и заполните файл `.env`: + + ```dotenv + # Переменные приложения + BOT_TOKEN= # Токен аутентификации бота + SECRET_KEY= # Cекретный ключ для генерации jwt-токенов + + # Переменные базы данных + POSTGRES_DB=procharity_back_db_local # Название базы данных + POSTGRES_USER=postgres # Логин для подключения к базе данных + POSTGRES_PASSWORD=postgres # Пароль для подключения к базе данных + DB_HOST=procharity_postgres # Название хоста с БД + DB_PORT=5432 # Порт для подключения к базе данных + + # Organization data + ORGANIZATIONS_EMAIL=procharity@yandex.ru + + # Адреса электронной почты администраторов + EMAIL_ADMIN=procharity.admin_1@yandex.ru + ``` + + > **Note** + > [Полный пример переменных окружения](env.example). + + > **Note** + > Для получения токена аутентификации бота обратитесь к + разделу [Регистрация бота Telegram](#регистрация-бота-telegram). + + +2. Собрать и запустить контейнеры из файла infra/docker-compose.local.yml. + + ```shell + docker compose -f infra/docker-compose.local.yml up + ``` + Эта команда создаст и запустит все необходимые контейнеры, включая базу данных и бэкенд. + +3. После успешного запуска контейнеров, выполните следующую команду, которая войдет в контейнер, выполнит миграции и наполнит тестовую базу данных: + + ```shell + docker exec -it procharity_bot_backend sh -c "alembic upgrade head && python3 fill_db.py" + ``` + +## Для разработки + +### Установка и настройка приложения + +1. Клонировать репозиторий. + + ```shell + git clone https://github.com/Studio-Yandex-Practicum/ProCharity_back_2.0.git + cd ProCharity_back_2.0 + ``` + +2. Установить зависимости и активировать виртуальное окружение. + + ```shell + poetry env use python3.11 + poetry install + poetry shell + ``` + + > **Note** + > [Документация по установке Poetry](https://python-poetry.org/docs/#installation) +3. Настроить pre-commit. +В режиме ```poetry shell``` + ``` + pre-commit install + ``` + > **Note** + > Перед каждым коммитом будет запущен линтер и форматтер, + который автоматически отформатирует код согласно принятому в команде codestyle. + +4. Создайте и заполните файл `.env`: + + ```dotenv + # Переменные приложения + BOT_TOKEN= # Токен аутентификации бота + SECRET_KEY= # Cекретный ключ для генерации jwt-токенов + + # Переменные базы данных + POSTGRES_DB=procharity_back_db_local # Название базы данных + POSTGRES_USER=postgres # Логин для подключения к базе данных + POSTGRES_PASSWORD=postgres # Пароль для подключения к базе данных + DB_HOST=localhost # Название хоста с БД + DB_PORT=5432 # Порт для подключения к базе данных + + # Organization data + ORGANIZATIONS_EMAIL=procharity@yandex.ru + + # Адреса электронной почты администраторов + EMAIL_ADMIN=procharity.admin_1@yandex.ru + ``` + + > **Note** + > [Полный пример переменных окружения](env.example). + + > **Note** + > Для получения токена аутентификации бота обратитесь к + разделу [Регистрация бота Telegram](#регистрация-бота-telegram). + +### Запуск + +1. Запустить Docker с БД. + + ```shell + sudo docker compose -f infra/docker-pg.yml up -d + ```` + +2. Применить миграции базы данных. + + ```shell + alembic upgrade head + +3. Выполнить скрипт наполнения тестовой базы. + + ```shell + python3 fill_db.py + ``` + +4. Запустить сервер приложения. + + ```shell + uvicorn src:app --reload + ``` + +## Использование + +После выполнения инструкций, описанных в разделе "[Для разработки](#для-разработки)", + +будет запущен FastAPI-сервер по адресу http://localhost:8000. + +Также по адресу http://localhost:8000/docs доступна полная документация API. + +## Полезная информация + +Данный раздел содержит информацию, которая может быть полезна для разработчиков. +Настоятельно рекомендуем каждому прочитать его хотя бы один раз. + +### Регистрация бота Telegram + +1. Найдите в Telegram бота [@BotFather](https://t.me/botfather) и откройте с ним чат. + +2. Напишите ему `/newbot`. + +3. Придумайте и напишите название бота. Оно будет отображаться в контактах и +чатах. Например: `My Dev Bot`. + +4. Придумайте и напишите юзернейм. Он используется для упоминания бота и в +ссылках. Юзернейм должен быть на латинице и обязательно заканчиваться на +«bot». Например: `my_dev_bot`. + +5. Готово. [@BotFather](https://t.me/botfather) пришлет токен бота — его нужно +скопировать в переменную окружения `BOT_TOKEN` (см. в разделе "[Установка и Запуск](#установка-и-запуск)"). + + > **Note** + > [Документация о боте BotFather](https://core.telegram.org/bots/features#botfather) + +
+

Режимы работы бота

+ +1. Запуск без API приложения + + Выполнить скрипт запуска. + + ```shell + python src/run.py + ``` + + > **Warning**: + Возможно только в режиме [polling](#polling). + +2. Polling + + Задать значение переменной окружения (`.env`). + + ```dotenv + BOT_WEBHOOK_MODE=False + ``` + +3. Webhook + + Задать значение переменным окружения (`.env`). + + ``` dotenv + BOT_WEBHOOK_MODE=True + APPLICATION_URL=http://example.com # Пример + ``` + + > **Note** + > [Подробнее о webhooks](https://github.com/python-telegram-bot/python-telegram-bot/wiki/Webhooks) + + > **Note** + > Для теста через HTTPS можно использовать [Ngrok](https://ngrok.com/) + > (см. раздел "[Использование Ngrok](#использование-ngrok)"). +
+ +
+

Работа с базой данных

+ +#### Создание миграций + +1. Применить существующие миграции: + + ```shell + alembic upgrade head + ``` + +2. Создать новую миграцию: + + ```shell + alembic revision --autogenerate -m "<Название миграции>" + ``` + + В название миграции указывается + для какого поля или модели внесены изменения, например: + + * add_shift_model + * shift_add_field_title + * shift_remove_field_title + +3. Повторить пункт 1, для применения созданной миграции. + +#### Откат миграций + +1. Откатить последнюю миграцию: + + ```shell + alembic downgrade -1 + ``` +
+ +
+

Работа с Poetry

+ +В этом разделе представлены наиболее часто используемые команды. + +Подробнее: https://python-poetry.org/docs/cli/ + +1. Настройка окружения проекта +Установку необходимо выполнять через curl, как в документации. + + ```shell + poetry env use python3.11; poetry install + ``` + +2. Активировать виртуальное окружение + + ```shell + poetry shell + ``` + +3. Добавить зависимость + + ```shell + poetry add + ``` + + > **Note** + > Использование флага `--dev (-D)` позволяет установить зависимость, + > необходимую только для разработки. + > Это полезно для разделения develop и prod зависимостей. + +#### Запустить скрипт без активации виртуального окружения + +```shell +poetry run .py +``` +
+ +
+

Использование Ngrok

+ +Этот раздел будет полезен, если у вас нет доменного имени с установленным +SSL-сертификатом. + +[Ngrok](https://ngrok.com/) — это инструмент, который позволяет создавать временный общедоступный +адрес (туннель) для вашего локального сервера, находящимся за NAT или +брандмауэром. + +Подробнее: https://ngrok.com/ + +### Для установки следуйте официальным инструкциям. + + https://ngrok.com/download + +**В режиме локального запуска.** + +1. Запустите сервер: + + ``` + ngrok http http://127.0.0.1:8000/ + ``` +2. Задайте значение переменной окружения в файле (.env) : + + ``` + APPLICATION_URL=https://1234-56-78-9.eu.ngrok.io + # Это пример. Рабочее значение нужно взять + в появившемся окне ngrock п.1 + ``` + +**В режиме разработки. Задайте значение переменной окружения в (.env).** + + ``` dotenv + USE_NGROK=True + ``` +
+ + + + + +[FastAPI-url]: https://fastapi.tiangolo.com/ +[FastAPI-badge]: https://img.shields.io/badge/FastAPI-005571?style=for-the-badge&logo=fastapi + +[Python-telegram-bot-url]: https://github.com/python-telegram-bot/python-telegram-bot +[Python-telegram-bot-badge]: https://img.shields.io/badge/python--telegram--bot-2CA5E0?style=for-the-badge + +[Postgres-url]: https://www.postgresql.org/ +[Postgres-badge]: https://img.shields.io/badge/postgres-%23316192.svg?style=for-the-badge&logo=postgresql&logoColor=white + +[Nginx-url]: https://nginx.org +[Nginx-badge]: https://img.shields.io/badge/nginx-%23009639.svg?style=for-the-badge&logo=nginx&logoColor=white~~ diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 00000000..f92a641a --- /dev/null +++ b/alembic.ini @@ -0,0 +1,107 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = src/core/db/migrations + +# template migration file names +file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(hour).2d.%%(minute).2d.%%(second).2d_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/env.example b/env.example new file mode 100644 index 00000000..bf2a8048 --- /dev/null +++ b/env.example @@ -0,0 +1,49 @@ +# Переменные приложения +BOT_TOKEN= # Токен аутентификации бота +APPLICATION_URL=procharity.duckdns.org # Домен, на котором развернуто приложение +BOT_WEBHOOK_MODE=False # Запустить бота в режиме webhook(True) | polling(False) +DEBUG=False # Включение(True) | Выключение(False) режима отладки +SECRET_KEY=a84167ccb889a32e12e639db236a6b98877d73d54b42e54f511856e20ccaf2ab # Cекретный ключ для генерации jwt-токенов +ROOT_PATH=/api # Для корректной работы без прокси ставится пустая строка, для работы с прокси "/api/" + +# Переменные базы данных +POSTGRES_DB=procharity_back_db_local # Название базы данных +POSTGRES_USER=postgres # Логин для подключения к базе данных +POSTGRES_PASSWORD=postgres # Пароль для подключения к базе данных +DB_HOST=localhost # Название хоста (контейнера) +DB_PORT=5432 # Порт для подключения к базе данных + +# Настройки почтового сервера +MAIL_SERVER=smtp.yandex.ru # Адрес постового сервиса +MAIL_PORT=465 # Порт для подключения к почтовому сервису +MAIL_LOGIN= # Логин для подключения к почтовому сервису +MAIL_PASSWORD= # Пароль для подключения к почтовому сервису +MAIL_STARTTLS=False # True или False, использовать ли STARTTLS +MAIL_SSL_TLS=True # True или False, использовать ли SSL и TLS +USE_CREDENTIALS=True # Использовать логин/пароль для подключения к почтовому серверу или нет +VALIDATE_CERTS=True # проверять SSL сертификат почтового сервера или нет + +# Organization data +ORGANIZATIONS_EMAIL=procharity@yandex.ru + +# Адреса электронной почты администраторов +EMAIL_ADMIN=procharity.admin_1@yandex.ru + +# Настройки логирования +LOG_LEVEL=INFO # Уровень логирования +LOG_DIR=logs # Директория для сохранения логов. По умолчанию - logs в корневой директории +LOG_FILE=app.log # Название файла с логами +LOG_FILE_SIZE=10485760 # Максимальный размер файла с логами, в байтах +LOG_FILES_TO_KEEP=5 # Количество сохраняемых файлов с логами + +# Настройка Pygnrok +USE_NGROK=False # True - туннель включен, False - туннель выключен + +# Настройки jwt +ALGORITHM=HS256 +ACCESS_TOKEN_EXPIRE_MINUTES=30 + +# URLs проекта Procharity +PROCHARITY_URL=http://test6.procharity.corptest.ru/ # Основной URL проекта +HELP_PROCHARITY_URL=https://help.procharity.ru/ # URL "Ответы на вопросы" проекта +YA_PRAKTIKUM_URL=https://praktikum.yandex.ru/ # URL Ya Praktikum diff --git a/fill_db.py b/fill_db.py new file mode 100644 index 00000000..2fae9ccd --- /dev/null +++ b/fill_db.py @@ -0,0 +1,307 @@ +import asyncio +import string +from contextlib import asynccontextmanager +from datetime import datetime, timedelta +from random import choice, choices, randint + +from faker import Faker +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + +from src.core.db import get_session +from src.core.db.models import Category, Task, UnsubscribeReason, User + +CHARACTERS = string.ascii_uppercase + string.digits + +CATEGORIES_TEST_DATA = [ + {"id": "1", "name": "Дизайн и верстка"}, + {"id": "2", "name": "Маркетинг и коммуникации"}, + {"id": "3", "name": "Переводы"}, + {"id": "4", "name": "IT"}, + {"id": "5", "name": "Юридические услуги"}, + {"id": "6", "name": "Стратегический консалтинг"}, + {"id": "7", "name": "Фото и видео"}, + {"id": "8", "name": "Обучение и тренинги"}, + {"id": "9", "name": "Финансы и фандрайзинг"}, + {"id": "10", "name": "Менеджмент"}, +] +SUBCATEGORIES_TEST_DATA = [ + {"id": "1", "name": "Новичок"}, + {"id": "2", "name": "Архивный новичок"}, + {"id": "3", "name": "Опытный"}, + {"id": "4", "name": "Архивный опытный"}, + {"id": "5", "name": "Профессионал"}, + {"id": "6", "name": "Архивный профессионал"}, +] +TEST_LOCATION = [ + "Москва", + "Санкт-Петербург", + "Новосибирск", + "Екатеринбург", + "Казань", + "Нижний Новгород", + "Челябинск", + "Самара", + "Омск", + "Ростов-на-Дону", +] +TEST_ORGANIZATION = [ + "Нефть без границ", + "Транспортный гигант-2", + "Российские роботы", + "Строй Инновация", + "Медицинский Колос", +] +TEST_TASKS = [ + { + "id": "1", + "tasks": [ + "Создание макета сайта в графическом редакторе", + "Изучение основных принципов дизайна: цвет, типографика, композиция", + "Создание векторной графики для использования на сайте", + "Создание нескольких вариантов дизайна для выбора наилучшего решения", + "Разработка дизайна логотипа и фирменного стиля", + ], + }, + { + "id": "2", + "tasks": [ + "Разработка и продвижение программы лояльности для доноров.", + "Организация и проведение благотворительного аукциона или лотереи.", + "Проведение промо-акций в социальных сетях для привлечения доноров.", + "Организация и проведение благотворительного мероприятия.", + "Создание PR-кампаний и пресс-релизов.", + ], + }, + { + "id": "3", + "tasks": [ + "Перевод средств на поддержку нуждающихся", + "Организация благотворительных мероприятий через переводы", + "Регулярные переводы на улучшение условий жизни нуждающихся", + "Перевод на медицинскую помощь нуждающимся", + "Помощь с переводом для мигрантов и беженцев", + ], + }, + { + "id": "4", + "tasks": [ + "Разработка и поддержка сайта организации", + "Создание базы данных для учета доноров и получателей помощи", + "Разработка мобильного приложения для сбора пожертвований", + "Автоматизация процесса учета и отчетности в организации", + "Разработка системы онлайн-консультаций для получателей помощи", + ], + }, + { + "id": "5", + "tasks": [ + "Регистрация благотворительной организации", + "Разработка устава и другой внутренней документации", + "Получение статуса НКО", + "Консультации по налогообложению и отчетности", + "Правовая поддержка при взаимодействии с государственными органами", + ], + }, + { + "id": "6", + "tasks": [ + "Определение и разработка концепции долгосрочного развития организации.", + "Поиск и обеспечение новых источников финансирования.", + "Разработка стратегии маркетинга и продвижения организации.", + "Оптимизация деятельности и уменьшение издержек организации.", + "Оценка эффективности программ благотворительной помощи.", + ], + }, + { + "id": "7", + "tasks": [ + "Создание промо-видео", + "Фотоотчет", + "Создание видеоинструкций", + "Фотографирование пациентов", + "Создание коротких видеороликов", + ], + }, + { + "id": "8", + "tasks": [ + "Развитие навыков лидерства и командной работы", + "Навыки эффективной коммуникации и делового переговоров", + "Финансовое планирование и управление бюджетом", + "Управление проектами и достижение целей организации", + "Развитие личной эффективности и эмоционального интеллекта", + ], + }, + { + "id": "9", + "tasks": [ + "Разработка стратегии привлечения доноров на следующий квартал.", + "Проведение анализа эффективности рекламных кампаний.", + "Организация благотворительного концерта с участием артистов.", + "Проведение финансовой аудитории для улучшения финансовой дисциплины.", + "Разработка системы мотивации для волонтеров.", + ], + }, + { + "id": "10", + "tasks": [ + "Разработать стратегию привлечения новых доноров", + "Организация гала-вечеринки в поддержку благотворительной программы", + "Подготовка презентации о деятельности организации.", + "Провести опрос среди населения для выявления актуальных проблем.", + "Разработать план волонтерской деятельности для привлечения помощи.", + ], + }, +] +TEST_UNSUBSCRIBE_REASON = [ + "Нехватка времени", + "Переезд", + "Большая загруженность", + "Отсутствие мотивации", + "Другое", +] +USERS_TABLE_ROWS = 30 + + +async def get_task_name_by_id(category_id): + """Function for selecting tasks from the TEST_TASKS list.""" + for task in TEST_TASKS: + if int(task["id"]) == category_id: + for i in task["tasks"]: + yield i + + +async def filling_category_in_db( + session: async_sessionmaker[AsyncSession], +) -> None: + """Filling the database with test data Categories. + The fields id, name, is_archived are filled in. + """ + for category in CATEGORIES_TEST_DATA: + category_obj = Category( + name=category["name"], + is_archived=choice([True, False]), + id=int(category["id"]), + ) + session.add(category_obj) + await session.commit() + + +async def filling_subcategory_in_db( + session: async_sessionmaker[AsyncSession], +) -> None: + """Filling the database with test data subcategories. + The fields id, name, is_archived, parent_id are filled in. + """ + for category in CATEGORIES_TEST_DATA: + parent_id = int(category["id"]) + category_name = str(category["name"]) + for subcategory in SUBCATEGORIES_TEST_DATA: + subcategory_obj = Category( + name=f"{subcategory['name']} для {category_name}", + is_archived=True if "Архивный" in subcategory["name"] else False, + parent_id=parent_id, + id=int(str(subcategory["id"]) + str(parent_id)), + ) + session.add(subcategory_obj) + await session.commit() + + +async def filling_task_in_db( + session: async_sessionmaker[AsyncSession], +) -> None: + """Filling the database with test data: Tasks. + The fields title, name_organization, deadline, category, + location, description, is_archived. + """ + for category_id in range(0, len(CATEGORIES_TEST_DATA) + 1): + for subcategory in SUBCATEGORIES_TEST_DATA: + async for title in get_task_name_by_id(category_id): + task = Task( + name_organization=f"{choice(TEST_ORGANIZATION)}", + deadline=datetime.now() + timedelta(days=10), + category_id=int(str(subcategory["id"]) + str(category_id)), + title=title, + bonus=randint(1, 4) + randint(1, 4), + location=f"{choice(TEST_LOCATION)}", + link=f"http://example.com/task/" f"{''.join(choices(CHARACTERS, k=6))}", + description=f"Описание {title}", + is_archived=choice([True, False]), + ) + session.add(task) + await session.commit() + + +async def filling_user_in_db( + session: async_sessionmaker[AsyncSession], +) -> None: + """Filling the database with test data: Users. + The fields telegram_id, username, email, external_id, first_name, + last_name, has_mailing, external_signup_date, banned. + """ + user_fake = Faker(locale="ru_RU") + external_id_fake = Faker() + days_period = 90 + for id in range(1, USERS_TABLE_ROWS + 1): + email = choice([None, user_fake.unique.email()]) + external_id = choice([None, external_id_fake.unique.random_int(min=1, max=USERS_TABLE_ROWS)]) + created_at = user_fake.date_between(datetime.now() - timedelta(days=days_period), datetime.now()) + user = User( + telegram_id=user_fake.unique.random_int(min=1, max=USERS_TABLE_ROWS), + username=user_fake.unique.user_name(), + email=email, + external_id=external_id, + first_name=user_fake.first_name(), + last_name=user_fake.last_name(), + has_mailing=False if email is None else True, + external_signup_date=None if external_id is None else created_at, + banned=user_fake.boolean(), + id=id, + created_at=created_at, + ) + session.add(user) + await session.commit() + + +async def filling_unsubscribe_reason_in_db( + session: async_sessionmaker[AsyncSession], +) -> None: + """Filling the database with test data: UnsubscribeReason. + The fields telegram_id, username, email, external_id, first_name, + last_name, has_mailing, external_signup_date, banned. + """ + user_fake = Faker() + days_period = 60 + for _ in range(1, int(USERS_TABLE_ROWS / 3) + 1): + unsubscribe_reason = UnsubscribeReason( + user_id=user_fake.unique.random_int(min=1, max=USERS_TABLE_ROWS), + unsubscribe_reason=choice(TEST_UNSUBSCRIBE_REASON), + created_at=user_fake.date_between(datetime.now() - timedelta(days=days_period), datetime.now()), + ) + session.add(unsubscribe_reason) + await session.commit() + + +async def delete_all_data( + session: async_sessionmaker[AsyncSession], +) -> None: + """The function deletes data from the tables Category, Tasks.""" + await session.execute(text("""TRUNCATE TABLE tasks, categories, unsubscribe_reason, users CASCADE""")) + await session.commit() + + +async def run(): + session_manager = asynccontextmanager(get_session) + async with session_manager() as session: + await delete_all_data(session) + await filling_category_in_db(session) + await filling_subcategory_in_db(session) + await filling_task_in_db(session) + await filling_user_in_db(session) + await filling_unsubscribe_reason_in_db(session) + print("Тестовые данные загружены в БД.") + + +if __name__ == "__main__": + asyncio.run(run()) diff --git a/front b/front new file mode 160000 index 00000000..50ccf3b0 --- /dev/null +++ b/front @@ -0,0 +1 @@ +Subproject commit 50ccf3b0981195a09bf092309e38eab13ca57fd5 diff --git a/infra/Dockerfile_front b/infra/Dockerfile_front new file mode 100644 index 00000000..9998d52c --- /dev/null +++ b/infra/Dockerfile_front @@ -0,0 +1,9 @@ +FROM node:12.13.0-alpine + +COPY ../front/package.json ./ +COPY ../front/yarn.lock ./ +COPY ../front ./frontend +WORKDIR /frontend +RUN yarn install +RUN yarn build +COPY ../front/public/robots.txt ./build/robots.txt diff --git a/infra/docker-compose.local.yml b/infra/docker-compose.local.yml new file mode 100644 index 00000000..f5f331cf --- /dev/null +++ b/infra/docker-compose.local.yml @@ -0,0 +1,33 @@ +version: "3.8" +services: + backend: + build: + context: ../ + dockerfile: Dockerfile + container_name: procharity_bot_backend + restart: always + depends_on: + postgres: + condition: service_healthy + ports: + - "8000:8000" + env_file: + - ../.env + postgres: + image: postgres:13.2 + container_name: procharity_postgres + restart: always + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data/ + env_file: + - ../.env + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres}"] + interval: 3s + timeout: 3s + retries: 5 + +volumes: + postgres_data: diff --git a/infra/docker-compose.staging.yml b/infra/docker-compose.staging.yml new file mode 100644 index 00000000..3e548ffe --- /dev/null +++ b/infra/docker-compose.staging.yml @@ -0,0 +1,54 @@ +services: + backend: + image: ghcr.io/studio-yandex-practicum/procharity_back_2.0:stage + container_name: procharity_bot_backend + restart: always + volumes: + - ./logs/:/app/logs/ + - static:/app/templates + depends_on: + postgres: + condition: service_healthy + ports: + - "8000:8000" + env_file: + - ../.env + + postgres: + image: postgres:13.2 + container_name: procharity_postgres + restart: always + volumes: + - postgres_data:/var/lib/postgresql/data/ + env_file: + - ../.env + healthcheck: + test: [ "CMD-SHELL", "pg_isready", "-U", "{{ POSTGRES_USER | 'postgres' }}"] + interval: 3s + timeout: 3s + retries: 5 + + front: + image: "ghcr.io/studio-yandex-practicum/procharity_bot_front:prod" + env_file: + - ../.env + volumes: + - frontend_build:/frontend/build + + nginx: + image: nginx:1.23.3-alpine + container_name: procharity_nginx + restart: always + ports: + - "80:80" + volumes: + - ./nginx/nginx.local.conf:/etc/nginx/conf.d/default.conf + - frontend_build:/var/html/admin/ + - static:/var/html/static/:ro + env_file: + - ../.env + +volumes: + postgres_data: + static: + frontend_build: diff --git a/infra/docker-compose.swag.yml b/infra/docker-compose.swag.yml new file mode 100644 index 00000000..aa4b9c48 --- /dev/null +++ b/infra/docker-compose.swag.yml @@ -0,0 +1,71 @@ +services: + backend: + image: ghcr.io/studio-yandex-practicum/procharity_back_2.0:stage + container_name: procharity_bot_backend + restart: always + volumes: + - ./logs/:/app/logs/ + - static:/app/templates + depends_on: + postgres: + condition: service_healthy + ports: + - "8000:8000" + env_file: + - ../.env + + postgres: + image: postgres:13.2 + container_name: procharity_postgres + restart: always + volumes: + - postgres_data:/var/lib/postgresql/data/ + ports: + - "5432:5432" + env_file: + - ../.env + healthcheck: + test: [ "CMD-SHELL", "pg_isready -U ${POSTGRES_USER}" ] + interval: 3s + timeout: 3s + retries: 5 + + front: + image: "ghcr.io/studio-yandex-practicum/procharity_bot_front:prod" + env_file: + - ../.env + volumes: + - frontend_build:/frontend/build + + swag: + image: lscr.io/linuxserver/swag:2.4.0 + container_name: procharity_swag + cap_add: + - NET_ADMIN + environment: + - TZ=Europe/Moscow + - URL=${APPLICATION_URL} + - VALIDATION=http + - CERTPROVIDER=zerossl + - EMAIL=yandex-practicum@yandex.ru + env_file: + - ../.env + volumes: + - ../nginx_logs:/var/log/nginx + - ./nginx/swag.conf:/config/nginx/site-confs/default.conf + - frontend_build:/var/html/admin/ + - keys:/config/keys + - static:/var/html/static/:ro + ports: + - "443:443" + - "80:80" + restart: unless-stopped + depends_on: + - backend + - front + +volumes: + postgres_data: + keys: + static: + frontend_build: diff --git a/infra/docker-pg.yml b/infra/docker-pg.yml new file mode 100644 index 00000000..c00fb223 --- /dev/null +++ b/infra/docker-pg.yml @@ -0,0 +1,15 @@ +version: '3.7' + +services: + postgres: + image: postgres:13.2 + restart: always + volumes: + - postgres_data:/var/lib/postgresql/data + ports: + - "5432:5432" + env_file: + - ../.env + +volumes: + postgres_data: diff --git a/infra/nginx/nginx.local.conf b/infra/nginx/nginx.local.conf new file mode 100644 index 00000000..0faba82e --- /dev/null +++ b/infra/nginx/nginx.local.conf @@ -0,0 +1,22 @@ +server { + listen 80; + server_name localhost bot.procharity.ru; + + server_tokens off; + + root /var/www/; + + location /static/ { + root /; + try_files /var/html$uri /var/html/admin$uri =404; + } + location /admin/ { + root /var/html; + } + location / { + proxy_pass http://localhost:8000/; + proxy_set_header Host $host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Real-IP $remote_addr; + } +} diff --git a/infra/nginx/swag.conf b/infra/nginx/swag.conf new file mode 100644 index 00000000..d2da1c10 --- /dev/null +++ b/infra/nginx/swag.conf @@ -0,0 +1,33 @@ +server { + listen 80; + listen [::]:80; + server_name _; + return 301 https://$host$request_uri; +} + +server { + listen 443 ssl; + listen [::]:443 ssl; + + server_name _; + + include /config/nginx/ssl.conf; + + client_max_body_size 25M; + + server_tokens off; + + location /static/ { + root /; + try_files /var/html$uri /var/html/admin$uri =404; + } + location /admin/ { + root /var/html; + } + location / { + proxy_pass http://backend:8000/; + proxy_set_header Host $host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Real-IP $remote_addr; + } +} diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..467019be --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1679 @@ +# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. + +[[package]] +name = "aiolimiter" +version = "1.1.0" +description = "asyncio rate limiter, a leaky bucket implementation" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "aiolimiter-1.1.0-py3-none-any.whl", hash = "sha256:0b4997961fc58b8df40279e739f9cf0d3e255e63e9a44f64df567a8c17241e24"}, + {file = "aiolimiter-1.1.0.tar.gz", hash = "sha256:461cf02f82a29347340d031626c92853645c099cb5ff85577b831a7bd21132b5"}, +] + +[[package]] +name = "aiosmtplib" +version = "2.0.2" +description = "asyncio SMTP client" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "aiosmtplib-2.0.2-py3-none-any.whl", hash = "sha256:1e631a7a3936d3e11c6a144fb8ffd94bb4a99b714f2cb433e825d88b698e37bc"}, + {file = "aiosmtplib-2.0.2.tar.gz", hash = "sha256:138599a3227605d29a9081b646415e9e793796ca05322a78f69179f0135016a3"}, +] + +[package.extras] +docs = ["sphinx (>=5.3.0,<6.0.0)", "sphinx_autodoc_typehints (>=1.7.0,<2.0.0)"] +uvloop = ["uvloop (>=0.14,<0.15)", "uvloop (>=0.14,<0.15)", "uvloop (>=0.17,<0.18)"] + +[[package]] +name = "alembic" +version = "1.12.0" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"}, + {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["python-dateutil"] + +[[package]] +name = "annotated-types" +version = "0.5.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.7" +files = [ + {file = "annotated_types-0.5.0-py3-none-any.whl", hash = "sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd"}, + {file = "annotated_types-0.5.0.tar.gz", hash = "sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802"}, +] + +[[package]] +name = "anyio" +version = "4.0.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, + {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.22)"] + +[[package]] +name = "asgi-correlation-id" +version = "4.2.0" +description = "Middleware correlating project logs to individual requests" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "asgi_correlation_id-4.2.0-py3-none-any.whl", hash = "sha256:bd3321a4022f38179db775d27c40cb45130ac9429d569cd59b5474cb2f65fb1b"}, + {file = "asgi_correlation_id-4.2.0.tar.gz", hash = "sha256:9d7dcfc2ed016f6e594fffd553788ac70129db7f92bb4dc268ab0b1a3284de5a"}, +] + +[package.dependencies] +starlette = ">=0.18" + +[package.extras] +celery = ["celery"] + +[[package]] +name = "asyncpg" +version = "0.27.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "asyncpg-0.27.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fca608d199ffed4903dce1bcd97ad0fe8260f405c1c225bdf0002709132171c2"}, + {file = "asyncpg-0.27.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20b596d8d074f6f695c13ffb8646d0b6bb1ab570ba7b0cfd349b921ff03cfc1e"}, + {file = "asyncpg-0.27.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a6206210c869ebd3f4eb9e89bea132aefb56ff3d1b7dd7e26b102b17e27bbb1"}, + {file = "asyncpg-0.27.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7a94c03386bb95456b12c66026b3a87d1b965f0f1e5733c36e7229f8f137747"}, + {file = "asyncpg-0.27.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bfc3980b4ba6f97138b04f0d32e8af21d6c9fa1f8e6e140c07d15690a0a99279"}, + {file = "asyncpg-0.27.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9654085f2b22f66952124de13a8071b54453ff972c25c59b5ce1173a4283ffd9"}, + {file = "asyncpg-0.27.0-cp310-cp310-win32.whl", hash = "sha256:879c29a75969eb2722f94443752f4720d560d1e748474de54ae8dd230bc4956b"}, + {file = "asyncpg-0.27.0-cp310-cp310-win_amd64.whl", hash = "sha256:ab0f21c4818d46a60ca789ebc92327d6d874d3b7ccff3963f7af0a21dc6cff52"}, + {file = "asyncpg-0.27.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:18f77e8e71e826ba2d0c3ba6764930776719ae2b225ca07e014590545928b576"}, + {file = "asyncpg-0.27.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2232d4625c558f2aa001942cac1d7952aa9f0dbfc212f63bc754277769e1ef2"}, + {file = "asyncpg-0.27.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a3a4ff43702d39e3c97a8786314123d314e0f0e4dabc8367db5b665c93914de"}, + {file = "asyncpg-0.27.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccddb9419ab4e1c48742457d0c0362dbdaeb9b28e6875115abfe319b29ee225d"}, + {file = "asyncpg-0.27.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:768e0e7c2898d40b16d4ef7a0b44e8150db3dd8995b4652aa1fe2902e92c7df8"}, + {file = "asyncpg-0.27.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609054a1f47292a905582a1cfcca51a6f3f30ab9d822448693e66fdddde27920"}, + {file = "asyncpg-0.27.0-cp311-cp311-win32.whl", hash = "sha256:8113e17cfe236dc2277ec844ba9b3d5312f61bd2fdae6d3ed1c1cdd75f6cf2d8"}, + {file = "asyncpg-0.27.0-cp311-cp311-win_amd64.whl", hash = "sha256:bb71211414dd1eeb8d31ec529fe77cff04bf53efc783a5f6f0a32d84923f45cf"}, + {file = "asyncpg-0.27.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4750f5cf49ed48a6e49c6e5aed390eee367694636c2dcfaf4a273ca832c5c43c"}, + {file = "asyncpg-0.27.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:eca01eb112a39d31cc4abb93a5aef2a81514c23f70956729f42fb83b11b3483f"}, + {file = "asyncpg-0.27.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5710cb0937f696ce303f5eed6d272e3f057339bb4139378ccecafa9ee923a71c"}, + {file = "asyncpg-0.27.0-cp37-cp37m-win_amd64.whl", hash = "sha256:71cca80a056ebe19ec74b7117b09e650990c3ca535ac1c35234a96f65604192f"}, + {file = "asyncpg-0.27.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4bb366ae34af5b5cabc3ac6a5347dfb6013af38c68af8452f27968d49085ecc0"}, + {file = "asyncpg-0.27.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16ba8ec2e85d586b4a12bcd03e8d29e3d99e832764d6a1d0b8c27dbbe4a2569d"}, + {file = "asyncpg-0.27.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d20dea7b83651d93b1eb2f353511fe7fd554752844523f17ad30115d8b9c8cd6"}, + {file = "asyncpg-0.27.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e56ac8a8237ad4adec97c0cd4728596885f908053ab725e22900b5902e7f8e69"}, + {file = "asyncpg-0.27.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf21ebf023ec67335258e0f3d3ad7b91bb9507985ba2b2206346de488267cad0"}, + {file = "asyncpg-0.27.0-cp38-cp38-win32.whl", hash = "sha256:69aa1b443a182b13a17ff926ed6627af2d98f62f2fe5890583270cc4073f63bf"}, + {file = "asyncpg-0.27.0-cp38-cp38-win_amd64.whl", hash = "sha256:62932f29cf2433988fcd799770ec64b374a3691e7902ecf85da14d5e0854d1ea"}, + {file = "asyncpg-0.27.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fddcacf695581a8d856654bc4c8cfb73d5c9df26d5f55201722d3e6a699e9629"}, + {file = "asyncpg-0.27.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7d8585707ecc6661d07367d444bbaa846b4e095d84451340da8df55a3757e152"}, + {file = "asyncpg-0.27.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:975a320baf7020339a67315284a4d3bf7460e664e484672bd3e71dbd881bc692"}, + {file = "asyncpg-0.27.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2232ebae9796d4600a7819fc383da78ab51b32a092795f4555575fc934c1c89d"}, + {file = "asyncpg-0.27.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:88b62164738239f62f4af92567b846a8ef7cf8abf53eddd83650603de4d52163"}, + {file = "asyncpg-0.27.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eb4b2fdf88af4fb1cc569781a8f933d2a73ee82cd720e0cb4edabbaecf2a905b"}, + {file = "asyncpg-0.27.0-cp39-cp39-win32.whl", hash = "sha256:8934577e1ed13f7d2d9cea3cc016cc6f95c19faedea2c2b56a6f94f257cea672"}, + {file = "asyncpg-0.27.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b6499de06fe035cf2fa932ec5617ed3f37d4ebbf663b655922e105a484a6af9"}, + {file = "asyncpg-0.27.0.tar.gz", hash = "sha256:720986d9a4705dd8a40fdf172036f5ae787225036a7eb46e704c45aa8f62c054"}, +] + +[package.extras] +dev = ["Cython (>=0.29.24,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "flake8 (>=5.0.4,<5.1.0)", "pytest (>=6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "uvloop (>=0.15.3)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["flake8 (>=5.0.4,<5.1.0)", "uvloop (>=0.15.3)"] + +[[package]] +name = "black" +version = "23.7.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "blinker" +version = "1.6.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.7" +files = [ + {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"}, + {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"}, +] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "41.0.4" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, + {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, + {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, + {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dependency-injector" +version = "4.41.0" +description = "Dependency injection framework for Python" +optional = false +python-versions = "*" +files = [ + {file = "dependency-injector-4.41.0.tar.gz", hash = "sha256:939dfc657104bc3e66b67afd3fb2ebb0850c9a1e73d0d26066f2bbdd8735ff9c"}, + {file = "dependency_injector-4.41.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2381a251b04244125148298212550750e6e1403e9b2850cc62e0e829d050ad3"}, + {file = "dependency_injector-4.41.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75280dfa23f7c88e1bf56c3920d58a43516816de6f6ab2a6650bb8a0f27d5c2c"}, + {file = "dependency_injector-4.41.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63bfba21f8bff654a80e9b9d06dd6c43a442990b73bf89cd471314c11c541ec2"}, + {file = "dependency_injector-4.41.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3535d06416251715b45f8412482b58ec1c6196a4a3baa207f947f0b03a7c4b44"}, + {file = "dependency_injector-4.41.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d09c08c944a25dabfb454238c1a889acd85102b93ae497de523bf9ab7947b28a"}, + {file = "dependency_injector-4.41.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:586a0821720b15932addbefb00f7370fbcd5831d6ebbd6494d774b44ff96d23a"}, + {file = "dependency_injector-4.41.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7fa4970f12a3fc95d8796938b11c41276ad1ff4c447b0e589212eab3fc527a90"}, + {file = "dependency_injector-4.41.0-cp310-cp310-win32.whl", hash = "sha256:d557e40673de984f78dab13ebd68d27fbb2f16d7c4e3b663ea2fa2f9fae6765b"}, + {file = "dependency_injector-4.41.0-cp310-cp310-win_amd64.whl", hash = "sha256:3744c327d18408e74781bd6d8b7738745ee80ef89f2c8daecf9ebd098cb84972"}, + {file = "dependency_injector-4.41.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:89c67edffe7007cf33cee79ecbca38f48efcc2add5c280717af434db6c789377"}, + {file = "dependency_injector-4.41.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:786f7aac592e191c9caafc47732161d807bad65c62f260cd84cd73c7e2d67d6d"}, + {file = "dependency_injector-4.41.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b61a15bc46a3aa7b29bd8a7384b650aa3a7ef943491e93c49a0540a0b3dda4"}, + {file = "dependency_injector-4.41.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4f113e5d4c3070973ad76e5bda7317e500abae6083d78689f0b6e37cf403abf"}, + {file = "dependency_injector-4.41.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fa3ed8f0700e47a0e7363f949b4525ffa8277aa1c5b10ca5b41fce4dea61bb9"}, + {file = "dependency_injector-4.41.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e15ea0f2b14c1127e8b0d1597fef13f98845679f63bf670ba12dbfc12a16ef"}, + {file = "dependency_injector-4.41.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3055b3fc47a0d6e5f27defb4166c0d37543a4967c279549b154afaf506ce6efc"}, + {file = "dependency_injector-4.41.0-cp311-cp311-win32.whl", hash = "sha256:37d5954026e3831663518d78bdf4be9c2dbfea691edcb73c813aa3093aa4363a"}, + {file = "dependency_injector-4.41.0-cp311-cp311-win_amd64.whl", hash = "sha256:f89a507e389b7e4d4892dd9a6f5f4da25849e24f73275478634ac594d621ab3f"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ac79f3c05747f9724bd56c06985e78331fc6c85eb50f3e3f1a35e0c60f9977e9"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75e7a733b372db3144a34020c4233f6b94db2c6342d6d16bc5245b1b941ee2bd"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40936d9384363331910abd59dd244158ec3572abf9d37322f15095315ac99893"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a31d9d60be4b585585081109480cfb2ef564d3b851cb32a139bf8408411a93a"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:953bfac819d32dc72b963767589e0ed372e5e9e78b03fb6b89419d0500d34bbe"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8f0090ff14038f17a026ca408a3a0b0e7affb6aa7498b2b59d670f40ac970fbe"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6b29abac56ce347d2eb58a560723e1663ee2125cf5cc38866ed92b84319927ec"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-win32.whl", hash = "sha256:059fbb48333148143e8667a5323d162628dfe27c386bd0ed3deeecfc390338bf"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-win_amd64.whl", hash = "sha256:16de2797dcfcc2263b8672bf0751166f7c7b369ca2ff9246ceb67b65f8e1d802"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c71d30b6708438050675f338edb9a25bea6c258478dbe5ec8405286756a2d347"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d283aee588a72072439e6721cb64aa6cba5bc18c576ef0ab28285a6ec7a9d655"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc852da612c7e347f2fcf921df2eca2718697a49f648a28a63db3ab504fd9510"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02620454ee8101f77a317f3229935ce687480883d72a40858ff4b0c87c935cce"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7a92680bea1c260e5c0d2d6cd60b0c913cba76a456a147db5ac047ecfcfcc758"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:168334cba3f1cbf55299ef38f0f2e31879115cc767b780c859f7814a52d80abb"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:48b6886a87b4ceb9b9f78550f77b2a5c7d2ce33bc83efd886556ad468cc9c85a"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-win32.whl", hash = "sha256:87be84084a1b922c4ba15e2e5aa900ee24b78a5467997cb7aec0a1d6cdb4a00b"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8b8cf1c6c56f5c18bdbd9f5e93b52ca29cb4d99606d4056e91f0c761eef496dc"}, + {file = "dependency_injector-4.41.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a8686fa330c83251c75c8238697686f7a0e0f6d40658538089165dc72df9bcff"}, + {file = "dependency_injector-4.41.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d670a844268dcd758195e58e9a5b39fc74bb8648aba99a13135a4a10ec9cfac"}, + {file = "dependency_injector-4.41.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3b9d41e0eff4c8e16fea1e33de66ff0030fe51137ca530f3c52ce110447914"}, + {file = "dependency_injector-4.41.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a724e0a737baadb4378f5dc1b079867cc3a88552fcca719b3dba84716828b2"}, + {file = "dependency_injector-4.41.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3588bd887b051d16b8bcabaae1127eb14059a0719a8fe34c8a75ba59321b352c"}, + {file = "dependency_injector-4.41.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:409441122f40e1b4b8582845fdd76deb9dc5c9d6eb74a057b85736ef9e9c671f"}, + {file = "dependency_injector-4.41.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7dcba8665cafec825b7095d5dd80afb5cf14404450eca3fe8b66e1edbf4dbc10"}, + {file = "dependency_injector-4.41.0-cp38-cp38-win32.whl", hash = "sha256:8b51efeaebacaf79ef68edfc65e9687699ccffb3538c4a3ab30d0d77e2db7189"}, + {file = "dependency_injector-4.41.0-cp38-cp38-win_amd64.whl", hash = "sha256:1662e2ef60ac6e681b9e11b5d8b7c17a0f733688916cf695f9540f8f50a61b1e"}, + {file = "dependency_injector-4.41.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51217cb384b468d7cc355544cec20774859f00812f9a1a71ed7fa701c957b2a7"}, + {file = "dependency_injector-4.41.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3890a12423ae3a9eade035093beba487f8d092ee6c6cb8706f4e7080a56e819"}, + {file = "dependency_injector-4.41.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99ed73b1521bf249e2823a08a730c9f9413a58f4b4290da022e0ad4fb333ba3d"}, + {file = "dependency_injector-4.41.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:300838e9d4f3fbf539892a5a4072851728e23b37a1f467afcf393edd994d88f0"}, + {file = "dependency_injector-4.41.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56d37b9d2f50a18f059d9abdbea7669a7518bd42b81603c21a27910a2b3f1657"}, + {file = "dependency_injector-4.41.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4a44ca3ce5867513a70b31855b218be3d251f5068ce1c480cc3a4ad24ffd3280"}, + {file = "dependency_injector-4.41.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:67b369592c57549ccdcad0d5fef1ddb9d39af7fed8083d76e789ab0111fc6389"}, + {file = "dependency_injector-4.41.0-cp39-cp39-win32.whl", hash = "sha256:740a8e8106a04d3f44b52b25b80570fdac96a8a3934423de7c9202c5623e7936"}, + {file = "dependency_injector-4.41.0-cp39-cp39-win_amd64.whl", hash = "sha256:22b11dbf696e184f0b3d5ac4e5418aeac3c379ba4ea758c04a83869b7e5d1cbf"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b365a8548e9a49049fa6acb24d3cd939f619eeb8e300ca3e156e44402dcc07ec"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5168dc59808317dc4cdd235aa5d7d556d33e5600156acaf224cead236b48a3e8"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3229d83e99e255451605d5276604386e06ad948e3d60f31ddd796781c77f76f"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1baee908f21190bdc46a65ce4c417a5175e9397ca62354928694fce218f84487"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b37f36ecb0c1227f697e1d4a029644e3eda8dd0f0716aa63ad04d96dbb15bbbb"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b0c9c966ff66c77364a2d43d08de9968aff7e3903938fe912ba49796b2133344"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12e91ac0333e7e589421943ff6c6bf9cf0d9ac9703301cec37ccff3723406332"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2440b32474d4e747209528ca3ae48f42563b2fbe3d74dbfe949c11dfbfef7c4"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54032d62610cf2f4421c9d92cef52957215aaa0bca403cda580c58eb3f726eda"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:76b94c8310929e54136f3cb3de3adc86d1a657b3984299f40bf1cd2ba0bae548"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6ee9810841c6e0599356cb884d16453bfca6ab739d0e4f0248724ed8f9ee0d79"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b98945edae88e777091bf0848f869fb94bd76dfa4066d7c870a5caa933391d0"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2dee5d4abdd21f1a30a51d46645c095be9dcc404c7c6e9f81d0a01415a49e64"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d03f5fa0fa98a18bd0dfce846db80e2798607f0b861f1f99c97f441f7669d7a2"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f2842e15bae664a9f69932e922b02afa055c91efec959cb1896f6c499bf68180"}, +] + +[package.dependencies] +six = ">=1.7.0,<=1.16.0" + +[package.extras] +aiohttp = ["aiohttp"] +flask = ["flask"] +pydantic = ["pydantic"] +yaml = ["pyyaml"] + +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + +[[package]] +name = "dnspython" +version = "2.4.2" +description = "DNS toolkit" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, + {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, +] + +[package.extras] +dnssec = ["cryptography (>=2.6,<42.0)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] +doq = ["aioquic (>=0.9.20)"] +idna = ["idna (>=2.1,<4.0)"] +trio = ["trio (>=0.14,<0.23)"] +wmi = ["wmi (>=1.5.1,<2.0.0)"] + +[[package]] +name = "ecdsa" +version = "0.18.0" +description = "ECDSA cryptographic signature library (pure python)" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, + {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, +] + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + +[[package]] +name = "email-validator" +version = "2.0.0.post2" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.7" +files = [ + {file = "email_validator-2.0.0.post2-py3-none-any.whl", hash = "sha256:2466ba57cda361fb7309fd3d5a225723c788ca4bbad32a0ebd5373b99730285c"}, + {file = "email_validator-2.0.0.post2.tar.gz", hash = "sha256:1ff6e86044200c56ae23595695c54e9614f4a9551e0e393614f764860b3d7900"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "faker" +version = "19.13.0" +description = "Faker is a Python package that generates fake data for you." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Faker-19.13.0-py3-none-any.whl", hash = "sha256:da880a76322db7a879c848a0771e129338e0a680a9f695fd9a3e7a6ac82b45e1"}, + {file = "Faker-19.13.0.tar.gz", hash = "sha256:14ccb0aec342d33aa3889a864a56e5b3c2d56bce1b89f9189f4fbc128b9afc1e"}, +] + +[package.dependencies] +python-dateutil = ">=2.4" + +[[package]] +name = "fastapi" +version = "0.100.1" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.7" +files = [ + {file = "fastapi-0.100.1-py3-none-any.whl", hash = "sha256:ec6dd52bfc4eff3063cfcd0713b43c87640fefb2687bbbe3d8a08d94049cdf32"}, + {file = "fastapi-0.100.1.tar.gz", hash = "sha256:522700d7a469e4a973d92321ab93312448fbe20fca9c8da97effc7e7bc56df23"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<3.0.0" +starlette = ">=0.27.0,<0.28.0" +typing-extensions = ">=4.5.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "fastapi-jwt" +version = "0.1.12" +description = "`FastAPI` extension for JTW Auth" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi_jwt-0.1.12-py3-none-any.whl", hash = "sha256:d53c7a768c9410ad8ba0e554f948f8c55705be750b85d030c15b8038617718c2"}, +] + +[package.dependencies] +fastapi = ">=0.50.0" +python-jose = {version = ">=3.3.0", extras = ["cryptography"]} + +[package.extras] +docs = ["MkAutoDoc (>=0.2.0,<1.0.0)", "lazydocs (>=0.4.5,<1.0.0)", "mike (>=1.1.0,<2.0.0)", "mkdocs (>=1.4.0,<2.0.0)", "mkdocs-awesome-pages-plugin (>=2.8.0,<3.0.0)", "mkdocs-include-markdown-plugin (>=4.0.0,<5.0.0)", "mkdocs-material (>=9.0.0,<10.0.0)"] +test = ["black (==23.1.0)", "flake8 (>=6.0.0,<7.0.0)", "httpx (>=0.23.0,<1.0.0)", "isort (>=5.11.0,<6.0.0)", "mypy (>=1.0.0,<2.0.0)", "pytest (>=7.0.0,<8.0.0)", "pytest-cov (>=4.0.0,<5.0.0)", "pytest-mock (>=3.0.0,<4.0.0)", "requests (>=2.28.0,<3.0.0)", "types-python-jose (==3.3.4.5)"] + +[[package]] +name = "fastapi-mail" +version = "1.4.1" +description = "Simple lightweight mail library for FastApi" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "fastapi_mail-1.4.1-py3-none-any.whl", hash = "sha256:fa5ef23b2dea4d3ba4587f4bbb53f8f15274124998fb4e40629b3b636c76c398"}, + {file = "fastapi_mail-1.4.1.tar.gz", hash = "sha256:9095b713bd9d3abb02fe6d7abb637502aaf680b52e177d60f96273ef6bc8bb70"}, +] + +[package.dependencies] +aiosmtplib = ">=2.0,<3.0" +blinker = ">=1.5,<2.0" +email-validator = ">=2.0,<3.0" +Jinja2 = ">=3.0,<4.0" +pydantic = ">=2.0,<3.0" +pydantic_settings = ">=2.0,<3.0" +starlette = ">=0.24,<1.0" + +[package.extras] +httpx = ["httpx[httpx] (>=0.23,<0.24)"] +redis = ["redis[redis] (>=4.3,<5.0)"] + +[[package]] +name = "filelock" +version = "3.12.3" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.12.3-py3-none-any.whl", hash = "sha256:f067e40ccc40f2b48395a80fcbd4728262fab54e232e090a4063ab804179efeb"}, + {file = "filelock-3.12.3.tar.gz", hash = "sha256:0ecc1dd2ec4672a10c8550a8182f1bd0c0a5088470ecd5a125e45f49472fac3d"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "gitdb" +version = "4.0.10" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.34" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.34-py3-none-any.whl", hash = "sha256:5d3802b98a3bae1c2b8ae0e1ff2e4aa16bcdf02c145da34d092324f599f01395"}, + {file = "GitPython-3.1.34.tar.gz", hash = "sha256:85f7d365d1f6bf677ae51039c1ef67ca59091c7ebd5a3509aa399d4eda02d6dd"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[[package]] +name = "greenlet" +version = "2.0.2" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, + {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, + {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, + {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, + {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, + {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, + {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, + {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, + {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, + {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, + {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, + {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, + {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, + {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, + {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, + {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, + {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, + {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, + {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, + {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, + {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, + {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, + {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, + {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, + {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, + {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, + {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, + {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, + {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, + {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, + {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, + {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, + {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, +] + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["objgraph", "psutil"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "0.17.3" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, + {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = "==1.*" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "httpx" +version = "0.24.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, + {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.18.0" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "identify" +version = "2.5.27" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.27-py2.py3-none-any.whl", hash = "sha256:fdb527b2dfe24602809b2201e033c2a113d7bdf716db3ca8e3243f735dcecaba"}, + {file = "identify-2.5.27.tar.gz", hash = "sha256:287b75b04a0e22d727bc9a41f0d4f3c1bcada97490fa6eabb5b28f0e9097e733"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "isort" +version = "5.12.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, +] + +[package.extras] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "mako" +version = "1.2.4" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, + {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "passlib" +version = "1.7.4" +description = "comprehensive password hashing framework supporting over 30 schemes" +optional = false +python-versions = "*" +files = [ + {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, + {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, +] + +[package.extras] +argon2 = ["argon2-cffi (>=18.2.0)"] +bcrypt = ["bcrypt (>=3.1.0)"] +build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] +totp = ["cryptography"] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "platformdirs" +version = "3.10.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pre-commit" +version = "3.4.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"}, + {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pyasn1" +version = "0.5.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, + {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, +] + +[[package]] +name = "pycodestyle" +version = "2.11.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, + {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydantic" +version = "2.3.0" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-2.3.0-py3-none-any.whl", hash = "sha256:45b5e446c6dfaad9444819a293b921a40e1db1aa61ea08aede0522529ce90e81"}, + {file = "pydantic-2.3.0.tar.gz", hash = "sha256:1607cc106602284cd4a00882986570472f193fde9cb1259bceeaedb26aa79a6d"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.6.3" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.6.3" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.6.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:1a0ddaa723c48af27d19f27f1c73bdc615c73686d763388c8683fe34ae777bad"}, + {file = "pydantic_core-2.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5cfde4fab34dd1e3a3f7f3db38182ab6c95e4ea91cf322242ee0be5c2f7e3d2f"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a7027bfc6b108e17c3383959485087d5942e87eb62bbac69829eae9bc1f7"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84e87c16f582f5c753b7f39a71bd6647255512191be2d2dbf49458c4ef024588"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:522a9c4a4d1924facce7270c84b5134c5cabcb01513213662a2e89cf28c1d309"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaafc776e5edc72b3cad1ccedb5fd869cc5c9a591f1213aa9eba31a781be9ac1"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a750a83b2728299ca12e003d73d1264ad0440f60f4fc9cee54acc489249b728"}, + {file = "pydantic_core-2.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e8b374ef41ad5c461efb7a140ce4730661aadf85958b5c6a3e9cf4e040ff4bb"}, + {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b594b64e8568cf09ee5c9501ede37066b9fc41d83d58f55b9952e32141256acd"}, + {file = "pydantic_core-2.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2a20c533cb80466c1d42a43a4521669ccad7cf2967830ac62c2c2f9cece63e7e"}, + {file = "pydantic_core-2.6.3-cp310-none-win32.whl", hash = "sha256:04fe5c0a43dec39aedba0ec9579001061d4653a9b53a1366b113aca4a3c05ca7"}, + {file = "pydantic_core-2.6.3-cp310-none-win_amd64.whl", hash = "sha256:6bf7d610ac8f0065a286002a23bcce241ea8248c71988bda538edcc90e0c39ad"}, + {file = "pydantic_core-2.6.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:6bcc1ad776fffe25ea5c187a028991c031a00ff92d012ca1cc4714087e575973"}, + {file = "pydantic_core-2.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:df14f6332834444b4a37685810216cc8fe1fe91f447332cd56294c984ecbff1c"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b7486d85293f7f0bbc39b34e1d8aa26210b450bbd3d245ec3d732864009819"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a892b5b1871b301ce20d40b037ffbe33d1407a39639c2b05356acfef5536d26a"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:883daa467865e5766931e07eb20f3e8152324f0adf52658f4d302242c12e2c32"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4eb77df2964b64ba190eee00b2312a1fd7a862af8918ec70fc2d6308f76ac64"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce8c84051fa292a5dc54018a40e2a1926fd17980a9422c973e3ebea017aa8da"}, + {file = "pydantic_core-2.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22134a4453bd59b7d1e895c455fe277af9d9d9fbbcb9dc3f4a97b8693e7e2c9b"}, + {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:02e1c385095efbd997311d85c6021d32369675c09bcbfff3b69d84e59dc103f6"}, + {file = "pydantic_core-2.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d79f1f2f7ebdb9b741296b69049ff44aedd95976bfee38eb4848820628a99b50"}, + {file = "pydantic_core-2.6.3-cp311-none-win32.whl", hash = "sha256:430ddd965ffd068dd70ef4e4d74f2c489c3a313adc28e829dd7262cc0d2dd1e8"}, + {file = "pydantic_core-2.6.3-cp311-none-win_amd64.whl", hash = "sha256:84f8bb34fe76c68c9d96b77c60cef093f5e660ef8e43a6cbfcd991017d375950"}, + {file = "pydantic_core-2.6.3-cp311-none-win_arm64.whl", hash = "sha256:5a2a3c9ef904dcdadb550eedf3291ec3f229431b0084666e2c2aa8ff99a103a2"}, + {file = "pydantic_core-2.6.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8421cf496e746cf8d6b677502ed9a0d1e4e956586cd8b221e1312e0841c002d5"}, + {file = "pydantic_core-2.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bb128c30cf1df0ab78166ded1ecf876620fb9aac84d2413e8ea1594b588c735d"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a822f630712817b6ecc09ccc378192ef5ff12e2c9bae97eb5968a6cdf3b862"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:240a015102a0c0cc8114f1cba6444499a8a4d0333e178bc504a5c2196defd456"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f90e5e3afb11268628c89f378f7a1ea3f2fe502a28af4192e30a6cdea1e7d5e"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:340e96c08de1069f3d022a85c2a8c63529fd88709468373b418f4cf2c949fb0e"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1480fa4682e8202b560dcdc9eeec1005f62a15742b813c88cdc01d44e85308e5"}, + {file = "pydantic_core-2.6.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f14546403c2a1d11a130b537dda28f07eb6c1805a43dae4617448074fd49c282"}, + {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a87c54e72aa2ef30189dc74427421e074ab4561cf2bf314589f6af5b37f45e6d"}, + {file = "pydantic_core-2.6.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f93255b3e4d64785554e544c1c76cd32f4a354fa79e2eeca5d16ac2e7fdd57aa"}, + {file = "pydantic_core-2.6.3-cp312-none-win32.whl", hash = "sha256:f70dc00a91311a1aea124e5f64569ea44c011b58433981313202c46bccbec0e1"}, + {file = "pydantic_core-2.6.3-cp312-none-win_amd64.whl", hash = "sha256:23470a23614c701b37252618e7851e595060a96a23016f9a084f3f92f5ed5881"}, + {file = "pydantic_core-2.6.3-cp312-none-win_arm64.whl", hash = "sha256:1ac1750df1b4339b543531ce793b8fd5c16660a95d13aecaab26b44ce11775e9"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:a53e3195f134bde03620d87a7e2b2f2046e0e5a8195e66d0f244d6d5b2f6d31b"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:f2969e8f72c6236c51f91fbb79c33821d12a811e2a94b7aa59c65f8dbdfad34a"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:672174480a85386dd2e681cadd7d951471ad0bb028ed744c895f11f9d51b9ebe"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:002d0ea50e17ed982c2d65b480bd975fc41086a5a2f9c924ef8fc54419d1dea3"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ccc13afee44b9006a73d2046068d4df96dc5b333bf3509d9a06d1b42db6d8bf"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:439a0de139556745ae53f9cc9668c6c2053444af940d3ef3ecad95b079bc9987"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d63b7545d489422d417a0cae6f9898618669608750fc5e62156957e609e728a5"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b44c42edc07a50a081672e25dfe6022554b47f91e793066a7b601ca290f71e42"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1c721bfc575d57305dd922e6a40a8fe3f762905851d694245807a351ad255c58"}, + {file = "pydantic_core-2.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e4a2cf8c4543f37f5dc881de6c190de08096c53986381daebb56a355be5dfe6"}, + {file = "pydantic_core-2.6.3-cp37-none-win32.whl", hash = "sha256:d9b4916b21931b08096efed090327f8fe78e09ae8f5ad44e07f5c72a7eedb51b"}, + {file = "pydantic_core-2.6.3-cp37-none-win_amd64.whl", hash = "sha256:a8acc9dedd304da161eb071cc7ff1326aa5b66aadec9622b2574ad3ffe225525"}, + {file = "pydantic_core-2.6.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5e9c068f36b9f396399d43bfb6defd4cc99c36215f6ff33ac8b9c14ba15bdf6b"}, + {file = "pydantic_core-2.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e61eae9b31799c32c5f9b7be906be3380e699e74b2db26c227c50a5fc7988698"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85463560c67fc65cd86153a4975d0b720b6d7725cf7ee0b2d291288433fc21b"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9616567800bdc83ce136e5847d41008a1d602213d024207b0ff6cab6753fe645"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e9b65a55bbabda7fccd3500192a79f6e474d8d36e78d1685496aad5f9dbd92c"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f468d520f47807d1eb5d27648393519655eadc578d5dd862d06873cce04c4d1b"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9680dd23055dd874173a3a63a44e7f5a13885a4cfd7e84814be71be24fba83db"}, + {file = "pydantic_core-2.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a718d56c4d55efcfc63f680f207c9f19c8376e5a8a67773535e6f7e80e93170"}, + {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8ecbac050856eb6c3046dea655b39216597e373aa8e50e134c0e202f9c47efec"}, + {file = "pydantic_core-2.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:788be9844a6e5c4612b74512a76b2153f1877cd845410d756841f6c3420230eb"}, + {file = "pydantic_core-2.6.3-cp38-none-win32.whl", hash = "sha256:07a1aec07333bf5adebd8264047d3dc518563d92aca6f2f5b36f505132399efc"}, + {file = "pydantic_core-2.6.3-cp38-none-win_amd64.whl", hash = "sha256:621afe25cc2b3c4ba05fff53525156d5100eb35c6e5a7cf31d66cc9e1963e378"}, + {file = "pydantic_core-2.6.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:813aab5bfb19c98ae370952b6f7190f1e28e565909bfc219a0909db168783465"}, + {file = "pydantic_core-2.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:50555ba3cb58f9861b7a48c493636b996a617db1a72c18da4d7f16d7b1b9952b"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e20f8baedd7d987bd3f8005c146e6bcbda7cdeefc36fad50c66adb2dd2da48"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b0a5d7edb76c1c57b95df719af703e796fc8e796447a1da939f97bfa8a918d60"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f06e21ad0b504658a3a9edd3d8530e8cea5723f6ea5d280e8db8efc625b47e49"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea053cefa008fda40f92aab937fb9f183cf8752e41dbc7bc68917884454c6362"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:171a4718860790f66d6c2eda1d95dd1edf64f864d2e9f9115840840cf5b5713f"}, + {file = "pydantic_core-2.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ed7ceca6aba5331ece96c0e328cd52f0dcf942b8895a1ed2642de50800b79d3"}, + {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:acafc4368b289a9f291e204d2c4c75908557d4f36bd3ae937914d4529bf62a76"}, + {file = "pydantic_core-2.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1aa712ba150d5105814e53cb141412217146fedc22621e9acff9236d77d2a5ef"}, + {file = "pydantic_core-2.6.3-cp39-none-win32.whl", hash = "sha256:44b4f937b992394a2e81a5c5ce716f3dcc1237281e81b80c748b2da6dd5cf29a"}, + {file = "pydantic_core-2.6.3-cp39-none-win_amd64.whl", hash = "sha256:9b33bf9658cb29ac1a517c11e865112316d09687d767d7a0e4a63d5c640d1b17"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d7050899026e708fb185e174c63ebc2c4ee7a0c17b0a96ebc50e1f76a231c057"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99faba727727b2e59129c59542284efebbddade4f0ae6a29c8b8d3e1f437beb7"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fa159b902d22b283b680ef52b532b29554ea2a7fc39bf354064751369e9dbd7"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:046af9cfb5384f3684eeb3f58a48698ddab8dd870b4b3f67f825353a14441418"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:930bfe73e665ebce3f0da2c6d64455098aaa67e1a00323c74dc752627879fc67"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:85cc4d105747d2aa3c5cf3e37dac50141bff779545ba59a095f4a96b0a460e70"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b25afe9d5c4f60dcbbe2b277a79be114e2e65a16598db8abee2a2dcde24f162b"}, + {file = "pydantic_core-2.6.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e49ce7dc9f925e1fb010fc3d555250139df61fa6e5a0a95ce356329602c11ea9"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2dd50d6a1aef0426a1d0199190c6c43ec89812b1f409e7fe44cb0fbf6dfa733c"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6595b0d8c8711e8e1dc389d52648b923b809f68ac1c6f0baa525c6440aa0daa"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ef724a059396751aef71e847178d66ad7fc3fc969a1a40c29f5aac1aa5f8784"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3c8945a105f1589ce8a693753b908815e0748f6279959a4530f6742e1994dcb6"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c8c6660089a25d45333cb9db56bb9e347241a6d7509838dbbd1931d0e19dbc7f"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:692b4ff5c4e828a38716cfa92667661a39886e71136c97b7dac26edef18767f7"}, + {file = "pydantic_core-2.6.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f1a5d8f18877474c80b7711d870db0eeef9442691fcdb00adabfc97e183ee0b0"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3796a6152c545339d3b1652183e786df648ecdf7c4f9347e1d30e6750907f5bb"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b962700962f6e7a6bd77e5f37320cabac24b4c0f76afeac05e9f93cf0c620014"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56ea80269077003eaa59723bac1d8bacd2cd15ae30456f2890811efc1e3d4413"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c0ebbebae71ed1e385f7dfd9b74c1cff09fed24a6df43d326dd7f12339ec34"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:252851b38bad3bfda47b104ffd077d4f9604a10cb06fe09d020016a25107bf98"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6656a0ae383d8cd7cc94e91de4e526407b3726049ce8d7939049cbfa426518c8"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d9140ded382a5b04a1c030b593ed9bf3088243a0a8b7fa9f071a5736498c5483"}, + {file = "pydantic_core-2.6.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d38bbcef58220f9c81e42c255ef0bf99735d8f11edef69ab0b499da77105158a"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:c9d469204abcca28926cbc28ce98f28e50e488767b084fb3fbdf21af11d3de26"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48c1ed8b02ffea4d5c9c220eda27af02b8149fe58526359b3c07eb391cb353a2"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2b1bfed698fa410ab81982f681f5b1996d3d994ae8073286515ac4d165c2e7"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf9d42a71a4d7a7c1f14f629e5c30eac451a6fc81827d2beefd57d014c006c4a"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4292ca56751aebbe63a84bbfc3b5717abb09b14d4b4442cc43fd7c49a1529efd"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7dc2ce039c7290b4ef64334ec7e6ca6494de6eecc81e21cb4f73b9b39991408c"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:615a31b1629e12445c0e9fc8339b41aaa6cc60bd53bf802d5fe3d2c0cda2ae8d"}, + {file = "pydantic_core-2.6.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1fa1f6312fb84e8c281f32b39affe81984ccd484da6e9d65b3d18c202c666149"}, + {file = "pydantic_core-2.6.3.tar.gz", hash = "sha256:1508f37ba9e3ddc0189e6ff4e2228bd2d3c3a4641cbe8c07177162f76ed696c7"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.0.3" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_settings-2.0.3-py3-none-any.whl", hash = "sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625"}, + {file = "pydantic_settings-2.0.3.tar.gz", hash = "sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945"}, +] + +[package.dependencies] +pydantic = ">=2.0.1" +python-dotenv = ">=0.21.0" + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyngrok" +version = "6.0.0" +description = "A Python wrapper for ngrok." +optional = false +python-versions = ">=3.5" +files = [ + {file = "pyngrok-6.0.0.tar.gz", hash = "sha256:dd8a87944b8e28572e3d126bfb2c81a2948594575f5237eefd3651ae0b487155"}, +] + +[package.dependencies] +PyYAML = "*" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +optional = false +python-versions = "*" +files = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""} +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] + +[[package]] +name = "python-telegram-bot" +version = "20.5" +description = "We have made you a wrapper you can't refuse" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-telegram-bot-20.5.tar.gz", hash = "sha256:2f45a94c861cbd40440ece2be176ef0fc69e10d84e6dfa17f9a456e32aeece13"}, + {file = "python_telegram_bot-20.5-py3-none-any.whl", hash = "sha256:fc9605a855794231c802cc3948e6f7c319a817b5cd1827371f170bc7ca0ca279"}, +] + +[package.dependencies] +aiolimiter = {version = ">=1.1.0,<1.2.0", optional = true, markers = "extra == \"rate-limiter\""} +httpx = ">=0.24.1,<0.25.0" + +[package.extras] +all = ["APScheduler (>=3.10.4,<3.11.0)", "aiolimiter (>=1.1.0,<1.2.0)", "cachetools (>=5.3.1,<5.4.0)", "cryptography (>=39.0.1)", "httpx[http2]", "httpx[socks]", "pytz (>=2018.6)", "tornado (>=6.2,<7.0)"] +callback-data = ["cachetools (>=5.3.1,<5.4.0)"] +ext = ["APScheduler (>=3.10.4,<3.11.0)", "aiolimiter (>=1.1.0,<1.2.0)", "cachetools (>=5.3.1,<5.4.0)", "pytz (>=2018.6)", "tornado (>=6.2,<7.0)"] +http2 = ["httpx[http2]"] +job-queue = ["APScheduler (>=3.10.4,<3.11.0)", "pytz (>=2018.6)"] +passport = ["cryptography (>=39.0.1)"] +rate-limiter = ["aiolimiter (>=1.1.0,<1.2.0)"] +socks = ["httpx[socks]"] +webhooks = ["tornado (>=6.2,<7.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "rich" +version = "13.5.2" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, + {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "setuptools" +version = "68.1.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, + {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.6" +files = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.20" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759b51346aa388c2e606ee206c0bc6f15a5299f6174d1e10cadbe4530d3c7a98"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1506e988ebeaaf316f183da601f24eedd7452e163010ea63dbe52dc91c7fc70e"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5768c268df78bacbde166b48be788b83dddaa2a5974b8810af422ddfe68a9bc8"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3f0dd6d15b6dc8b28a838a5c48ced7455c3e1fb47b89da9c79cc2090b072a50"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:243d0fb261f80a26774829bc2cee71df3222587ac789b7eaf6555c5b15651eed"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb6d77c31e1bf4268b4d61b549c341cbff9842f8e115ba6904249c20cb78a61"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-win32.whl", hash = "sha256:bcb04441f370cbe6e37c2b8d79e4af9e4789f626c595899d94abebe8b38f9a4d"}, + {file = "SQLAlchemy-2.0.20-cp310-cp310-win_amd64.whl", hash = "sha256:d32b5ffef6c5bcb452723a496bad2d4c52b346240c59b3e6dba279f6dcc06c14"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd81466bdbc82b060c3c110b2937ab65ace41dfa7b18681fdfad2f37f27acdd7"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fe7d61dc71119e21ddb0094ee994418c12f68c61b3d263ebaae50ea8399c4d4"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4e571af672e1bb710b3cc1a9794b55bce1eae5aed41a608c0401885e3491179"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3364b7066b3c7f4437dd345d47271f1251e0cfb0aba67e785343cdbdb0fff08c"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1be86ccea0c965a1e8cd6ccf6884b924c319fcc85765f16c69f1ae7148eba64b"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1d35d49a972649b5080557c603110620a86aa11db350d7a7cb0f0a3f611948a0"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-win32.whl", hash = "sha256:27d554ef5d12501898d88d255c54eef8414576f34672e02fe96d75908993cf53"}, + {file = "SQLAlchemy-2.0.20-cp311-cp311-win_amd64.whl", hash = "sha256:411e7f140200c02c4b953b3dbd08351c9f9818d2bd591b56d0fa0716bd014f1e"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3c6aceebbc47db04f2d779db03afeaa2c73ea3f8dcd3987eb9efdb987ffa09a3"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d3f175410a6db0ad96b10bfbb0a5530ecd4fcf1e2b5d83d968dd64791f810ed"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea8186be85da6587456c9ddc7bf480ebad1a0e6dcbad3967c4821233a4d4df57"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c3d99ba99007dab8233f635c32b5cd24fb1df8d64e17bc7df136cedbea427897"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:76fdfc0f6f5341987474ff48e7a66c3cd2b8a71ddda01fa82fedb180b961630a"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-win32.whl", hash = "sha256:d3793dcf5bc4d74ae1e9db15121250c2da476e1af8e45a1d9a52b1513a393459"}, + {file = "SQLAlchemy-2.0.20-cp37-cp37m-win_amd64.whl", hash = "sha256:79fde625a0a55220d3624e64101ed68a059c1c1f126c74f08a42097a72ff66a9"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:599ccd23a7146e126be1c7632d1d47847fa9f333104d03325c4e15440fc7d927"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1a58052b5a93425f656675673ef1f7e005a3b72e3f2c91b8acca1b27ccadf5f4"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79543f945be7a5ada9943d555cf9b1531cfea49241809dd1183701f94a748624"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63e73da7fb030ae0a46a9ffbeef7e892f5def4baf8064786d040d45c1d6d1dc5"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ce5e81b800a8afc870bb8e0a275d81957e16f8c4b62415a7b386f29a0cb9763"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb0d3e94c2a84215532d9bcf10229476ffd3b08f481c53754113b794afb62d14"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-win32.whl", hash = "sha256:8dd77fd6648b677d7742d2c3cc105a66e2681cc5e5fb247b88c7a7b78351cf74"}, + {file = "SQLAlchemy-2.0.20-cp38-cp38-win_amd64.whl", hash = "sha256:6f8a934f9dfdf762c844e5164046a9cea25fabbc9ec865c023fe7f300f11ca4a"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:26a3399eaf65e9ab2690c07bd5cf898b639e76903e0abad096cd609233ce5208"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4cde2e1096cbb3e62002efdb7050113aa5f01718035ba9f29f9d89c3758e7e4e"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b09ba72e4e6d341bb5bdd3564f1cea6095d4c3632e45dc69375a1dbe4e26ec"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b74eeafaa11372627ce94e4dc88a6751b2b4d263015b3523e2b1e57291102f0"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:77d37c1b4e64c926fa3de23e8244b964aab92963d0f74d98cbc0783a9e04f501"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eefebcc5c555803065128401a1e224a64607259b5eb907021bf9b175f315d2a6"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-win32.whl", hash = "sha256:3423dc2a3b94125094897118b52bdf4d37daf142cbcf26d48af284b763ab90e9"}, + {file = "SQLAlchemy-2.0.20-cp39-cp39-win_amd64.whl", hash = "sha256:5ed61e3463021763b853628aef8bc5d469fe12d95f82c74ef605049d810f3267"}, + {file = "SQLAlchemy-2.0.20-py3-none-any.whl", hash = "sha256:63a368231c53c93e2b67d0c5556a9836fdcd383f7e3026a39602aad775b14acf"}, + {file = "SQLAlchemy-2.0.20.tar.gz", hash = "sha256:ca8a5ff2aa7f3ade6c498aaafce25b1eaeabe4e42b73e25519183e4566a16fc6"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.2.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx-oracle (>=7)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "starlette" +version = "0.27.0" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.7" +files = [ + {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, + {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] + +[[package]] +name = "structlog" +version = "23.1.0" +description = "Structured Logging for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "structlog-23.1.0-py3-none-any.whl", hash = "sha256:79b9e68e48b54e373441e130fa447944e6f87a05b35de23138e475c05d0f7e0e"}, + {file = "structlog-23.1.0.tar.gz", hash = "sha256:270d681dd7d163c11ba500bc914b2472d2b50a8ef00faa999ded5ff83a2f906b"}, +] + +[package.extras] +dev = ["structlog[docs,tests,typing]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "twisted"] +tests = ["coverage[toml]", "freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy", "rich", "twisted"] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "uvicorn" +version = "0.21.1" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.7" +files = [ + {file = "uvicorn-0.21.1-py3-none-any.whl", hash = "sha256:e47cac98a6da10cd41e6fd036d472c6f58ede6c5dbee3dbee3ef7a100ed97742"}, + {file = "uvicorn-0.21.1.tar.gz", hash = "sha256:0fac9cb342ba099e0d582966005f3fdba5b0290579fed4a6266dc702ca7bb032"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "virtualenv" +version = "20.24.4" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.24.4-py3-none-any.whl", hash = "sha256:29c70bb9b88510f6414ac3e55c8b413a1f96239b6b789ca123437d5e892190cb"}, + {file = "virtualenv-20.24.4.tar.gz", hash = "sha256:772b05bfda7ed3b8ecd16021ca9716273ad9f4467c801f27e83ac73430246dca"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<4" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "904ac2248c35892f2c8c31fe705574513cb1aba88545372dfd65383b30e3573c" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..8e632b2a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,68 @@ +[tool.poetry] +name = "procharity-back-2-0" +version = "0.1.0" +description = "" +authors = ["Your Name "] +readme = "README.md" +packages = [{include = "src"}] + +[tool.black] +line-length = 120 +include = '\.pyi?$' +exclude = ''' +( + /( + | .env + | .env.example + | .git + | __pycache__ + | venv/ + | env/ + | .*/tests/* + | .*/migrations/* + | .*/migrations/* + )/ +) +''' + +[tool.isort] +multi_line_output = 3 +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true +line_length = 120 +skip = ['src/core/db/migrations/',] + +[tool.poetry.dependencies] +python = "^3.11" +fastapi = "^0.100.0" +uvicorn = "^0.21.1" +sqlalchemy = "^2.0.9" +alembic = "^1.10.3" +asyncpg = "^0.27.0" +greenlet = "^2.0.2" +python-telegram-bot = {extras = ["rate-limiter"], version = "^20.2"} +structlog = "^23.1.0" +asgi-correlation-id = "^4.2.0" +rich = "^13.3.5" +fastapi-mail = "^1.4.0" +dependency-injector = "^4.41.0" +passlib = "^1.7.4" +python-jose = "^3.3.0" +fastapi-jwt = "^0.1.12" + + +[tool.poetry.group.dev.dependencies] +flake8 = "^6.0.0" +black = "^23.3.0" +isort = "^5.12.0" +pyngrok = "^6.0.0" +python-dotenv = "^1.0.0" +pre-commit = "^3.3.2" +gitpython = "^3.1.32" +faker = "19.13.0" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..58e662c3 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,51 @@ +aiolimiter==1.1.0 ; python_version >= "3.11" and python_version < "4.0" +aiosmtplib==2.0.2 ; python_version >= "3.11" and python_version < "4.0" +alembic==1.12.0 ; python_version >= "3.11" and python_version < "4.0" +annotated-types==0.5.0 ; python_version >= "3.11" and python_version < "4.0" +anyio==4.0.0 ; python_version >= "3.11" and python_version < "4.0" +asgi-correlation-id==4.2.0 ; python_version >= "3.11" and python_version < "4.0" +asyncpg==0.27.0 ; python_version >= "3.11" and python_version < "4.0" +blinker==1.6.2 ; python_version >= "3.11" and python_version < "4.0" +certifi==2023.7.22 ; python_version >= "3.11" and python_version < "4.0" +cffi==1.15.1 ; python_version >= "3.11" and python_version < "4.0" +click==8.1.7 ; python_version >= "3.11" and python_version < "4.0" +colorama==0.4.6 ; python_version >= "3.11" and python_version < "4.0" and platform_system == "Windows" +cryptography==41.0.4 ; python_version >= "3.11" and python_version < "4.0" +dependency-injector==4.41.0 ; python_version >= "3.11" and python_version < "4.0" +dnspython==2.4.2 ; python_version >= "3.11" and python_version < "4.0" +ecdsa==0.18.0 ; python_version >= "3.11" and python_version < "4.0" +email-validator==2.0.0.post2 ; python_version >= "3.11" and python_version < "4.0" +fastapi-jwt==0.1.12 ; python_version >= "3.11" and python_version < "4.0" +fastapi-mail==1.4.1 ; python_version >= "3.11" and python_version < "4.0" +fastapi==0.100.1 ; python_version >= "3.11" and python_version < "4.0" +faker==19.13.0 ; python_version >= "3.11" and python_version < "4.0" +greenlet==2.0.2 ; python_version >= "3.11" and python_version < "4.0" +h11==0.14.0 ; python_version >= "3.11" and python_version < "4.0" +httpcore==0.17.3 ; python_version >= "3.11" and python_version < "4.0" +httpx==0.24.1 ; python_version >= "3.11" and python_version < "4.0" +idna==3.4 ; python_version >= "3.11" and python_version < "4.0" +jinja2==3.1.2 ; python_version >= "3.11" and python_version < "4.0" +mako==1.2.4 ; python_version >= "3.11" and python_version < "4.0" +markdown-it-py==3.0.0 ; python_version >= "3.11" and python_version < "4.0" +markupsafe==2.1.3 ; python_version >= "3.11" and python_version < "4.0" +mdurl==0.1.2 ; python_version >= "3.11" and python_version < "4.0" +passlib==1.7.4 ; python_version >= "3.11" and python_version < "4.0" +pyasn1==0.5.0 ; python_version >= "3.11" and python_version < "4.0" +pycparser==2.21 ; python_version >= "3.11" and python_version < "4.0" +pydantic-core==2.6.3 ; python_version >= "3.11" and python_version < "4.0" +pydantic-settings==2.0.3 ; python_version >= "3.11" and python_version < "4.0" +pydantic==2.3.0 ; python_version >= "3.11" and python_version < "4.0" +pygments==2.16.1 ; python_version >= "3.11" and python_version < "4.0" +python-dotenv==1.0.0 ; python_version >= "3.11" and python_version < "4.0" +python-jose==3.3.0 ; python_version >= "3.11" and python_version < "4.0" +python-jose[cryptography]==3.3.0 ; python_version >= "3.11" and python_version < "4.0" +python-telegram-bot[rate-limiter]==20.5 ; python_version >= "3.11" and python_version < "4.0" +rich==13.5.2 ; python_version >= "3.11" and python_version < "4.0" +rsa==4.9 ; python_version >= "3.11" and python_version < "4" +six==1.16.0 ; python_version >= "3.11" and python_version < "4.0" +sniffio==1.3.0 ; python_version >= "3.11" and python_version < "4.0" +sqlalchemy==2.0.20 ; python_version >= "3.11" and python_version < "4.0" +starlette==0.27.0 ; python_version >= "3.11" and python_version < "4.0" +structlog==23.1.0 ; python_version >= "3.11" and python_version < "4.0" +typing-extensions==4.7.1 ; python_version >= "3.11" and python_version < "4.0" +uvicorn==0.21.1 ; python_version >= "3.11" and python_version < "4.0" diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..8543fc7f --- /dev/null +++ b/setup.cfg @@ -0,0 +1,31 @@ +[flake8] +ignore = + W503, + D +exclude = + .env, + .env.example, + .git, + __pycache__, + venv/, + env/, + */tests/*, + */migrations/*, +max-line-length = 120 +max-complexity = 10 +classmethod-decorators = + classmethod, + validator, + root_validator + +[isort] +skip = + .env, + .env.example, + .git, + __pycache__, + venv/, + env/, + tests/*, + */migrations/*, +profile = black diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 00000000..7c989753 --- /dev/null +++ b/src/__init__.py @@ -0,0 +1,3 @@ +from src.main import main + +app = main diff --git a/src/api/__init__.py b/src/api/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/api/auth/__init__.py b/src/api/auth/__init__.py new file mode 100644 index 00000000..01b93484 --- /dev/null +++ b/src/api/auth/__init__.py @@ -0,0 +1,3 @@ +from .token import check_header_contains_token + +__all__ = ("check_header_contains_token",) diff --git a/src/api/auth/token.py b/src/api/auth/token.py new file mode 100644 index 00000000..bce44041 --- /dev/null +++ b/src/api/auth/token.py @@ -0,0 +1,33 @@ +import structlog +from dependency_injector.wiring import Provide, inject +from fastapi import Depends, Request + +from src.core.depends import Container +from src.core.exceptions import InvalidToken, TokenNotProvided +from src.settings import Settings + +log = structlog.get_logger() + + +@inject +async def check_header_contains_token( + request: Request, + settings: Settings = Depends(Provide[Container.settings]), +): + """Проверяем, содержится ли в заголовке запроса token, и сравниваем его + со значением ACCESS_TOKEN_FOR_PROCAHRITY из settings.py""" + + if not settings.ACCESS_TOKEN_FOR_PROCHARITY: + await log.awarning( + "ACCESS_TOKEN_FOR_PROCHARITY не определен, возможны проблемы безопасности. Проверьте настройки проекта." + ) + return + match request.headers.get("token"): + case None: + await log.ainfo("В заголовке запроса не содержится токен.") + raise TokenNotProvided + case settings.ACCESS_TOKEN_FOR_PROCHARITY: + return + case _: + await log.ainfo("Токен в заголовке запроса неверный.") + raise InvalidToken diff --git a/src/api/constants.py b/src/api/constants.py new file mode 100644 index 00000000..a3b165fe --- /dev/null +++ b/src/api/constants.py @@ -0,0 +1,56 @@ +DATE_FORMAT = "%d-%m-%Y" +DATE_FORMAT_FOR_STATISTICS = "YYYY-MM-DD" +DATE_FORMAT_FOR_TASK_SCHEMA = "%d.%m.%Y" +DATE_TIME_FORMAT = "%d-%m-%Y %H:%M:%S" +DAYS_NUMBER_FOR_USERS_STATISTIC = 30 +API_DESCRIPTION = """ +## Content + +**Обновление категорий** + +Категории - разделы, помогающие отсортировать размещенные на сайте задания. + +Пользователь может подписаться на одну или несколько категорий, +чат-бот будет присылать только те задания, которые относятся к категориям, +на которые подписался пользователь. + +Категории бывают двух видов - родительские и дочерние. +Родительская категория может содержать несколько дочерних. +Дочерняя категория может относиться только к одной родительской. +Пользователь может подписаться только на дочерние категории. + +Всегда передается полный список актуальных категорий, получаемый от клиента. +Если в списке присутствует категория, которой раньше не было +(это определяется по ее `id`), то она добавляется на сайт. +При изменении данных категории +(имя или ее принадлежность к родительской или дочерней), +актуальные значения добавляются на сайт. +Если ранее существовавшая категория не передается, +то она архивируется и не показывается на сайте. +Если передается категория, которая была ранее заархивирована, +то она добавляется в список актуальных категорий. + +При передаче API-запроса необходимо дополнительно указывать токен авторизации +(webhooks token) в заголовке запроса. +Этот токен уникален для чат-бота (Фичи пока нет) + +**Обновление задач** + +Пользователь видит только задачи, которые относятся к тем категориям, +на которые он подписан. +Задача, которая единожды показывалась пользователю, +более для него не отображается, до тех пор, пока она не обновится. + +Всегда передается полный список актуальных задач, получаемый от клиента. +Если в списке присутствует задача, которой раньше не было +(это определяется по `id`), то она добавляется на сайт. +При изменении данных задачи (название, организация, дэдлайн, +принадлежность к категории, бонус, локация, ссылка на задачу), +актуальные значения добавляются в бот. +Еслиранее существовавшая задача не передается, +то она архивируется и не показывается в боте. +Если передается задача, которая была ранее заархивирована, +то она добавляется в список актуальных задач. + +Задачу можно добавить только к существующей дочерней категории. +""" diff --git a/src/api/endpoints/__init__.py b/src/api/endpoints/__init__.py new file mode 100644 index 00000000..ed52fe14 --- /dev/null +++ b/src/api/endpoints/__init__.py @@ -0,0 +1,20 @@ +from .admin import admin_user_router +from .analytics import analytic_router +from .categories import category_router +from .external_site_user import site_user_router +from .health_check import health_check_router +from .notification import notification_router +from .tasks import task_router +from .telegram_webhook import telegram_webhook_router + +__all__ = ( + "analytic_router", + "category_router", + "health_check_router", + "task_router", + "telegram_webhook_router", + "form_router", + "notification_router", + "site_user_router", + "admin_user_router", +) diff --git a/src/api/endpoints/admin.py b/src/api/endpoints/admin.py new file mode 100644 index 00000000..9ddc654a --- /dev/null +++ b/src/api/endpoints/admin.py @@ -0,0 +1,33 @@ +from http import HTTPStatus + +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends + +from src.api.schemas import AdminUserRequest +from src.api.services.admin_service import AdminService +from src.core.depends import Container + +admin_user_router = APIRouter() + + +@admin_user_router.post("/login/", description="Логин для админа") +@inject +def auth( + admin_data: AdminUserRequest, + admin_service: AdminService = Depends( + Provide[Container.api_services_container.admin_service], + ), + access_security=Depends( + Provide[Container.jwt_services_container.access_security], + ), + refresh_security=Depends( + Provide[Container.jwt_services_container.refresh_security], + ), +): + user = admin_service.authenticate_user(admin_data.email, admin_data.password) + if user is None: + return HTTPStatus.BAD_REQUEST("Неверный почтовый адрес или пароль.") + data = {"email": admin_data.email, "password": admin_data.password} + access_token = access_security.create_access_token(subject=data) + refresh_token = refresh_security.create_refresh_token(subject=data) + return {"access_token": access_token, "refresh_token": refresh_token} diff --git a/src/api/endpoints/analytics.py b/src/api/endpoints/analytics.py new file mode 100644 index 00000000..f260bded --- /dev/null +++ b/src/api/endpoints/analytics.py @@ -0,0 +1,41 @@ +from datetime import date + +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends, Query + +from src.api.schemas import ActiveTasks, AllUsersStatistic, Analytic, DBStatus, ReasonCancelingStatistics +from src.api.services import HealthCheckService +from src.api.services.analytics import AnalyticsService +from src.core.depends import Container + +analytic_router = APIRouter() + + +@analytic_router.get("/", description="Возращает статистику сервиса.") +@inject +async def get_analytics( + date_limit: date = Query(..., example="2023-10-11"), + analytic_service: AnalyticsService = Depends(Provide[Container.api_services_container.analytic_service]), +) -> Analytic: + return Analytic( + number_users=await analytic_service.get_user_number(), + reasons_canceling=ReasonCancelingStatistics(**await analytic_service.get_reason_cancelling_statistics()), + all_users_statistic=AllUsersStatistic( + added_users=await analytic_service.get_added_users_statistic(date_limit), + added_external_users=await analytic_service.get_added_external_users_statistic(date_limit), + users_unsubscribed=await analytic_service.get_unsubscribed_users_statistic(date_limit), + ), + tasks=await get_active_tasks_analytic(), + ) + + +@inject +async def get_active_tasks_analytic( + health_check_service: HealthCheckService = Depends(Provide[Container.api_services_container.health_check_service]), +) -> ActiveTasks: + db_status: DBStatus = await health_check_service.check_db_connection() + last_update = db_status["last_update"] + if not (db_status["status"] or db_status["last_update"]): + last_update = "Unable to get last_update" + active_tasks = db_status["active_tasks"] + return ActiveTasks(last_update=last_update, active_tasks=active_tasks) diff --git a/src/api/endpoints/categories.py b/src/api/endpoints/categories.py new file mode 100644 index 00000000..8be60490 --- /dev/null +++ b/src/api/endpoints/categories.py @@ -0,0 +1,32 @@ +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends + +from src.api.auth import check_header_contains_token +from src.api.schemas import CategoryRequest, CategoryResponse +from src.api.services import CategoryService +from src.core.db.models import Category +from src.core.depends import Container + +category_router = APIRouter(dependencies=[Depends(check_header_contains_token)]) + + +@category_router.get( + "/", + response_model=list[CategoryResponse], + response_model_exclude_none=True, + description="Получает список всех категорий.", +) +@inject +async def get_categories( + category_service: CategoryService = Depends(Provide[Container.api_services_container.category_service]), +) -> list[CategoryResponse]: + return await category_service.get_all() + + +@category_router.post("/", description="Актуализирует список категорий.") +@inject +async def actualize_categories( + categories: list[CategoryRequest], + category_service: CategoryService = Depends(Provide[Container.api_services_container.category_service]), +) -> None: + await category_service.actualize_objects(categories, Category) diff --git a/src/api/endpoints/external_site_user.py b/src/api/endpoints/external_site_user.py new file mode 100644 index 00000000..557f0b72 --- /dev/null +++ b/src/api/endpoints/external_site_user.py @@ -0,0 +1,18 @@ +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends + +from src.api.auth import check_header_contains_token +from src.api.schemas import ExternalSiteUserRequest +from src.api.services import ExternalSiteUserService +from src.core.depends import Container + +site_user_router = APIRouter(dependencies=[Depends(check_header_contains_token)]) + + +@site_user_router.post("/external_user_registration/", description="Актуализирует пользователя с сайта ProCharity.") +@inject +async def external_user_registration( + site_user: ExternalSiteUserRequest, + site_user_service: ExternalSiteUserService = Depends(Provide[Container.api_services_container.site_user_service]), +) -> None: + await site_user_service.register(site_user) diff --git a/src/api/endpoints/health_check.py b/src/api/endpoints/health_check.py new file mode 100644 index 00000000..065a6c9a --- /dev/null +++ b/src/api/endpoints/health_check.py @@ -0,0 +1,23 @@ +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends + +from src.api.auth import check_header_contains_token +from src.api.schemas import HealthCheck +from src.api.services.health_check import HealthCheckService +from src.core.depends import Container +from src.core.logging.utils import logger_decor + +health_check_router = APIRouter(dependencies=[Depends(check_header_contains_token)]) + + +@logger_decor +@health_check_router.get("/", description="Проверяет соединение с БД, ботом и выводит информацию о последнем коммите.") +@inject +async def get_health_check( + health_check_service: HealthCheckService = Depends(Provide[Container.api_services_container.health_check_service]), +) -> HealthCheck: + return HealthCheck( + db=await health_check_service.check_db_connection(), + bot=await health_check_service.check_bot(), + git=await health_check_service.get_last_commit(), + ) diff --git a/src/api/endpoints/notification.py b/src/api/endpoints/notification.py new file mode 100644 index 00000000..690954ed --- /dev/null +++ b/src/api/endpoints/notification.py @@ -0,0 +1,70 @@ +import structlog +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends + +from src.api.auth import check_header_contains_token +from src.api.schemas import InfoRate, MessageList, TelegramNotificationRequest, TelegramNotificationUsersRequest +from src.api.services.messages import TelegramNotificationService +from src.core.depends import Container + +notification_router = APIRouter(dependencies=[Depends(check_header_contains_token)]) +log = structlog.get_logger() + + +@notification_router.post( + "/", + response_model=InfoRate, + description="Сообщение для группы пользователей", +) +@inject +async def send_telegram_notification( + notifications: TelegramNotificationUsersRequest, + telegram_notification_service: TelegramNotificationService = Depends( + Provide[Container.api_services_container.message_service] + ), +) -> InfoRate: + """Отправляет сообщение указанной группе пользователей""" + result = await telegram_notification_service.send_messages_to_group_of_users(notifications) + rate = InfoRate() + rate = telegram_notification_service.collect_respond_and_status(result, rate) + return rate + + +@notification_router.post( + "/group", + response_model=InfoRate, + description="Сообщения для разных пользователей", +) +@inject +async def send_messages_to_group_of_users( + message_list: MessageList, + telegram_notification_service: TelegramNotificationService = Depends( + Provide[Container.api_services_container.message_service] + ), +): + await log.ainfo("Начало отправки сообщений для группы пользователей") + rate = InfoRate() + for message in message_list.messages: + status, msg = await telegram_notification_service.send_message_to_user(message.telegram_id, message) + rate = telegram_notification_service.count_rate(status, msg, rate) + await log.ainfo("Конец отправки сообщений для группы пользователей") + return rate + + +@notification_router.post( + "/{telegram_id}", + response_model=InfoRate, + description="Отправляет сообщение определенному пользователю.", +) +@inject +async def send_user_message( + telegram_id: int, + notifications: TelegramNotificationRequest, + telegram_notification_service: TelegramNotificationService = Depends( + Provide[Container.api_services_container.message_service] + ), +) -> InfoRate: + rate = InfoRate() + status, notifications.message = await telegram_notification_service.send_message_to_user(telegram_id, notifications) + rate = telegram_notification_service.count_rate(status, notifications.message, rate) + return rate diff --git a/src/api/endpoints/tasks.py b/src/api/endpoints/tasks.py new file mode 100644 index 00000000..25ba44cd --- /dev/null +++ b/src/api/endpoints/tasks.py @@ -0,0 +1,57 @@ +from dependency_injector.wiring import Provide, inject +from fastapi import APIRouter, Depends + +from src.api.auth import check_header_contains_token +from src.api.schemas import TaskRequest, TaskResponse +from src.api.services import TaskService +from src.api.services.messages import TelegramNotificationService +from src.core.db.models import Task +from src.core.depends import Container +from src.core.utils import display_tasks +from src.settings import Settings + +task_router = APIRouter(dependencies=[Depends(check_header_contains_token)]) + + +@task_router.post("/", description="Актуализирует список задач.") +@inject +async def actualize_tasks( + tasks: list[TaskRequest], + task_service: TaskService = Depends(Provide[Container.api_services_container.task_service]), + telegram_notification_service: TelegramNotificationService = Depends( + Provide[Container.api_services_container.message_service] + ), + settings: Settings = Provide(Container.settings), +) -> None: + new_tasks_ids = await task_service.actualize_objects(tasks, Task) + new_category_tasks = await task_service.get_user_tasks_ids(new_tasks_ids) + for task in new_category_tasks: + message = display_tasks(task, settings.HELP_PROCHARITY_URL) + await telegram_notification_service.send_messages_to_subscribed_users(message, task.category_id) + + +@task_router.get( + "/{user_id}", + response_model=list[TaskResponse], + response_model_exclude_none=True, + description="Получает список всех задач из категорий на которые подписан юзер.", +) +@inject +async def get_tasks_for_user( + user_id: int, + task_service: TaskService = Depends(Provide[Container.api_services_container.task_service]), +) -> list[TaskResponse]: + return await task_service.get_tasks_for_user(user_id) + + +@task_router.get( + "/", + response_model=list[TaskResponse], + response_model_exclude_none=True, + description="Получает список всех задач.", +) +@inject +async def get_all_tasks( + task_service: TaskService = Depends(Provide[Container.api_services_container.task_service]), +) -> list[TaskResponse]: + return await task_service.get_all() diff --git a/src/api/endpoints/telegram_webhook.py b/src/api/endpoints/telegram_webhook.py new file mode 100644 index 00000000..17be1ea5 --- /dev/null +++ b/src/api/endpoints/telegram_webhook.py @@ -0,0 +1,23 @@ +from fastapi import APIRouter, Request +from telegram import Update + +from src.core.exceptions.exceptions import UnauthorizedError, WebhookOnError +from src.settings import settings + +telegram_webhook_router = APIRouter() + + +@telegram_webhook_router.post( + "/webhook", + description="Получить обновления telegram.", +) +async def get_telegram_bot_updates(request: Request) -> None: + """Получение обновлений telegram в режиме работы бота webhook.""" + if not settings.BOT_WEBHOOK_MODE: + raise WebhookOnError + secret_token = request.headers.get("x-telegram-bot-api-secret-token") + if secret_token != settings.SECRET_KEY: + raise UnauthorizedError + bot_instance = request.app.state.bot_instance + request_json_data = await request.json() + await bot_instance.update_queue.put(Update.de_json(data=request_json_data, bot=bot_instance.bot)) diff --git a/src/api/events.py b/src/api/events.py new file mode 100644 index 00000000..957a9e76 --- /dev/null +++ b/src/api/events.py @@ -0,0 +1,39 @@ +from dependency_injector.wiring import Provide, inject +from fastapi import FastAPI +from telegram.ext import Application + +from src.bot import shutdown_bot, startup_bot +from src.core.depends import Container +from src.core.utils import set_ngrok +from src.settings import Settings + + +@inject +async def startup( + fastapi_app: FastAPI, + run_bot: bool, + bot: Application = Provide[Container.applications_container.telegram_bot], + settings: Settings = Provide[Container.settings], +): + if settings.USE_NGROK is True: + set_ngrok() + if run_bot: + fastapi_app.state.bot_instance = await startup_bot( + bot=bot, + bot_webhook_mode=settings.BOT_WEBHOOK_MODE, + telegram_webhook_url=settings.telegram_webhook_url, + secret_key=settings.SECRET_KEY, + ) + + +@inject +async def shutdown( + fastapi_app: FastAPI, + run_bot: bool, + settings: Settings = Provide[Container.settings], +): + if run_bot: + await shutdown_bot( + fastapi_app.state.bot_instance, + bot_webhook_mode=settings.BOT_WEBHOOK_MODE, + ) diff --git a/src/api/main.py b/src/api/main.py new file mode 100644 index 00000000..456ba354 --- /dev/null +++ b/src/api/main.py @@ -0,0 +1,66 @@ +from asgi_correlation_id import CorrelationIdMiddleware +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.staticfiles import StaticFiles + +from src.api.constants import API_DESCRIPTION +from src.core.logging.middleware import LoggingMiddleware +from src.core.logging.setup import setup_logging +from src.settings import Settings + + +def add_middleware(fastapi_app: FastAPI): + origins = ["*"] + fastapi_app.add_middleware( + CORSMiddleware, + allow_origins=origins, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + setup_logging() + fastapi_app.add_middleware(LoggingMiddleware) + fastapi_app.add_middleware(CorrelationIdMiddleware) + + +def include_router(fastapi_app: FastAPI): + from src.api.router import api_router + + fastapi_app.include_router(api_router) + + +def set_events(fastapi_app: FastAPI, run_bot: bool): + @fastapi_app.on_event("startup") + async def on_startup(): + from .events import startup + + await startup(fastapi_app, run_bot) + + @fastapi_app.on_event("shutdown") + async def on_shutdown(): + """Действия после остановки сервера.""" + from .events import shutdown + + await shutdown(fastapi_app, run_bot) + + +def init_fastapi( + fastapi_app: FastAPI, + settings: Settings, + run_bot: bool, +) -> FastAPI: + """Инициализация приложения FastAPI.""" + + add_middleware(fastapi_app) + include_router(fastapi_app) + set_events(fastapi_app, run_bot) + + fastapi_app.description = API_DESCRIPTION + + if settings.DEBUG: + fastapi_app.mount( + "/static", + StaticFiles(directory=settings.STATIC_DIR, html=True), + name="static", + ) + return fastapi_app diff --git a/src/api/router.py b/src/api/router.py new file mode 100644 index 00000000..6634d558 --- /dev/null +++ b/src/api/router.py @@ -0,0 +1,23 @@ +from fastapi import APIRouter + +from src.api.endpoints import ( + admin_user_router, + analytic_router, + category_router, + health_check_router, + notification_router, + site_user_router, + task_router, + telegram_webhook_router, +) +from src.settings import settings + +api_router = APIRouter(prefix=settings.ROOT_PATH) +api_router.include_router(analytic_router, prefix="/analytics", tags=["Analytic"]) +api_router.include_router(category_router, prefix="/categories", tags=["Content"]) +api_router.include_router(health_check_router, prefix="/health_check", tags=["Healthcheck"]) +api_router.include_router(notification_router, prefix="/messages", tags=["Messages"]) +api_router.include_router(task_router, prefix="/tasks", tags=["Content"]) +api_router.include_router(telegram_webhook_router, prefix="/telegram", tags=["Telegram"]) +api_router.include_router(admin_user_router, prefix="/auth", tags=["AdminUser"]) +api_router.include_router(site_user_router, prefix="/auth", tags=["ExternalSiteUser"]) diff --git a/src/api/schemas/__init__.py b/src/api/schemas/__init__.py new file mode 100644 index 00000000..4e46cb2d --- /dev/null +++ b/src/api/schemas/__init__.py @@ -0,0 +1,45 @@ +from .admin import AdminUserRequest +from .analytics import ActiveTasks, AllUsersStatistic, Analytic, ReasonCancelingStatistics +from .base import RequestBase, ResponseBase +from .categories import CategoryRequest, CategoryResponse +from .external_site_user import ExternalSiteUser, ExternalSiteUserRequest +from .health_check import BotStatus, CommitStatus, DBStatus, HealthCheck +from .notification import ( + ErrorsSending, + FeedbackFormQueryParams, + InfoRate, + Message, + MessageList, + TelegramNotificationRequest, + TelegramNotificationUsersGroups, + TelegramNotificationUsersRequest, +) +from .tasks import TaskRequest, TaskResponse + +__all__ = ( + "ActiveTasks", + "AdminUserRequest", + "AllUsersStatistic", + "Analytic", + "RequestBase", + "ResponseBase", + "CategoryRequest", + "CategoryResponse", + "ExternalSiteUser", + "ExternalSiteUserRequest", + "BotStatus", + "CommitStatus", + "DBStatus", + "HealthCheck", + "ErrorsSending", + "FeedbackFormQueryParams", + "InfoRate", + "Message", + "MessageList", + "ReasonCancelingStatistics", + "TelegramNotificationRequest", + "TelegramNotificationUsersGroups", + "TelegramNotificationUsersRequest", + "TaskRequest", + "TaskResponse", +) diff --git a/src/api/schemas/admin.py b/src/api/schemas/admin.py new file mode 100644 index 00000000..faee305f --- /dev/null +++ b/src/api/schemas/admin.py @@ -0,0 +1,18 @@ +from pydantic import Field + +from src.api.schemas.base import RequestBase + + +class AdminUserRequest(RequestBase): + """Класс модели запроса для AdminUser.""" + + email: str = Field(..., max_length=48) + password: str = Field(..., max_length=48) + + class Config: + json_schema_extra = { + "example": { + "email": "email", + "password": "password", + } + } diff --git a/src/api/schemas/analytics.py b/src/api/schemas/analytics.py new file mode 100644 index 00000000..9ce86a5e --- /dev/null +++ b/src/api/schemas/analytics.py @@ -0,0 +1,36 @@ +from pydantic import BaseModel + + +class ActiveTasks(BaseModel): + """Класс ответа для аналитики по задачам.""" + + last_update: str + active_tasks: int + + +class ReasonCancelingStatistics(BaseModel): + to_much_messages: int = 0 + no_time: int = 0 + no_match: int = 0 + uncomfortable: int = 0 + funds_dont_choose: int = 0 + other: int = 0 + + +class AllUsersStatistic(BaseModel): + """Класс ответа для подробной аналитики по пользователям.""" + + added_users: dict[str, int] = {} + added_external_users: dict[str, int] = {} + users_unsubscribed: dict[str, int] = {} + + +class Analytic(BaseModel): + """Класс модели запроса для статистики.""" + + command_stats: dict[str, str] = {} + reasons_canceling: ReasonCancelingStatistics = {} + number_users: int = 0 + all_users_statistic: AllUsersStatistic + active_users_statistic: dict[str, str] = {} + tasks: ActiveTasks = {} diff --git a/src/api/schemas/base.py b/src/api/schemas/base.py new file mode 100644 index 00000000..c1c61265 --- /dev/null +++ b/src/api/schemas/base.py @@ -0,0 +1,15 @@ +from pydantic import BaseModel, Extra + + +class ResponseBase(BaseModel): + """Базовый класс для модели ответа.""" + + class Config: + from_attributes = True + + +class RequestBase(BaseModel): + """Базовый класс для модели запроса.""" + + class Config: + extra = Extra.forbid diff --git a/src/api/schemas/categories.py b/src/api/schemas/categories.py new file mode 100644 index 00000000..1b195342 --- /dev/null +++ b/src/api/schemas/categories.py @@ -0,0 +1,50 @@ +from typing import Optional + +from pydantic import Field, root_validator + +from src.api.schemas.base import RequestBase, ResponseBase + + +class CategoryRequest(RequestBase): + """Класс модели запроса для Category.""" + + id: int = Field(..., example=1, description="Уникальный идентификатор категории.") + name: str = Field(..., min_length=2, max_length=100, example="Category Name", description="Название категории.") + parent_id: Optional[int] = Field( + None, + example=1, + description="Принадлежность к родительской категории. Если null, то это родительская категория.", + ) + + @root_validator(skip_on_failure=True) + def validate_self_parent(cls, values): + if values["parent_id"] and values["parent_id"] == values["id"]: + raise ValueError("Категория не может быть дочерней для самой себя.") + return values + + class Config: + json_schema_extra = { + "example": { + "id": 1, + "name": "Category Name", + "parent_id": 1, + } + } + + +class CategoryResponse(ResponseBase): + """Класс модели ответа для Category.""" + + id: int = Field(..., example=1, description="Уникальный идентификатор категории.") + name: str = Field(..., min_length=2, max_length=100, example="Category Name", description="Название категории.") + parent_id: Optional[int] = Field( + None, + example=1, + description="Принадлежность к родительской категории. Если null, то это родительская категория.", + ) + is_archived: bool = Field( + example=False, description="Статус категории. Если True, то эта категория заархивирована." + ) + + class Config: + json_schema_extra = {"example": {"id": 1, "name": "Category Name", "parent_id": 1, "is_archived": False}} diff --git a/src/api/schemas/external_site_user.py b/src/api/schemas/external_site_user.py new file mode 100644 index 00000000..996044e2 --- /dev/null +++ b/src/api/schemas/external_site_user.py @@ -0,0 +1,35 @@ +from pydantic import Field, field_validator + +from src.api.schemas.base import RequestBase +from src.core.db.models import ExternalSiteUser + + +class ExternalSiteUserRequest(RequestBase): + """Класс модели запроса для ExternalSiteUser.""" + + id: int = Field(...) + id_hash: str = Field(..., max_length=256) + first_name: str | None = Field(None, max_length=64) + last_name: str | None = Field(None, max_length=64) + email: str = Field(..., max_length=48) + specializations: list[int] | None = None + + def to_orm(self) -> ExternalSiteUser: + return ExternalSiteUser( + id=self.id, + id_hash=self.id_hash, + email=self.email, + first_name=self.first_name, + last_name=self.last_name, + specializations=self.specializations, + ) + + @field_validator("specializations", mode="before") + def specializations_str_validation(cls, value: str): + if not isinstance(value, str): + return value + try: + new_value = [int(value) for value in value.split(", ")] + return new_value + except ValueError: + raise ValueError("Для передачи строки с числами в поле specializations " 'используйте формат: "1, 2, 3" ') diff --git a/src/api/schemas/health_check.py b/src/api/schemas/health_check.py new file mode 100644 index 00000000..3650d862 --- /dev/null +++ b/src/api/schemas/health_check.py @@ -0,0 +1,38 @@ +from typing_extensions import NotRequired, TypedDict + +from src.api.schemas.base import ResponseBase + + +class DBStatus(TypedDict): + """Класс ответа для проверки работы базы данных.""" + + status: bool + last_update: NotRequired[str] + active_tasks: NotRequired[int] + db_connection_error: NotRequired[str] + + +class BotStatus(TypedDict): + """Класс ответа для проверки работы бота.""" + + status: bool + method: NotRequired[str] + url: NotRequired[str] + error: NotRequired[str] + + +class CommitStatus(TypedDict): + """Класс ответа для git коммита.""" + + last_commit: str + commit_date: str + git_tags: list[str] + commit_error: NotRequired[str] + + +class HealthCheck(ResponseBase): + """Класс модели запроса для проверки работы бота.""" + + db: DBStatus = {} + bot: BotStatus = {} + git: CommitStatus = {} diff --git a/src/api/schemas/notification.py b/src/api/schemas/notification.py new file mode 100644 index 00000000..7c0c1806 --- /dev/null +++ b/src/api/schemas/notification.py @@ -0,0 +1,87 @@ +import urllib + +from pydantic import BaseModel, Extra, Field + +from src.api.schemas.base import RequestBase +from src.core.enums import TelegramNotificationUsersGroups + + +class FeedbackFormQueryParams(BaseModel): + """Класс формирования параметров запроса для формы обратной связи.""" + + name: str | None + surname: str | None + email: str | None + + def as_url_query(self): + return f"?{urllib.parse.urlencode(self.dict())}" + + +class TelegramNotificationRequest(RequestBase): + """ + Класс формирования параметров запроса для отправки + сообщения определенному пользователю. + """ + + message: str = Field(..., min_length=2) + + class Config: + json_schema_extra = { + "example": { + "message": "Type here your message for user", + } + } + + +class TelegramNotificationUsersRequest(TelegramNotificationRequest): + """Класс формирования параметров запроса для отправки + сообщения определенной группе пользователей.""" + + mode: TelegramNotificationUsersGroups + + class Config: + json_schema_extra = { + "example": { + "message": "Type here your message for user", + "mode": "all", + } + } + + +class Message(TelegramNotificationRequest): + telegram_id: int + + +class MessageList(RequestBase): + messages: list[Message] + + class Config: + extra = Extra.forbid + json_schema_extra = { + "example": { + "messages": [ + {"telegram_id": 000000000, "message": "hi there"}, + {"telegram_id": 000000000, "message": "hi there"}, + ] + } + } + + +class ErrorsSending(BaseModel): + """ + Класс для вывода ошибок при отправке сообщения. + """ + + type: str = "TelegramError" + message: str = "" + + +class InfoRate(BaseModel): + """ + Класс для вывода информации о количестве успешных и неуспешных отправлений + """ + + messages: list[str] = [] + errors: list[ErrorsSending] = [] + successful_rate: int = 0 + unsuccessful_rate: int = 0 diff --git a/src/api/schemas/tasks.py b/src/api/schemas/tasks.py new file mode 100644 index 00000000..02d258c8 --- /dev/null +++ b/src/api/schemas/tasks.py @@ -0,0 +1,81 @@ +from datetime import date, datetime + +from pydantic import Extra, Field, NonNegativeInt, StrictStr, field_validator + +from src.api.constants import DATE_FORMAT, DATE_FORMAT_FOR_TASK_SCHEMA +from src.api.schemas.base import RequestBase, ResponseBase + + +class TaskRequest(RequestBase): + """Класс модели запроса для Task.""" + + id: NonNegativeInt = Field(..., ge=1, example=1, description="Уникальный идентификатор задачи.") + title: StrictStr = Field(..., example="Task Title", description="Название задачи.") + name_organization: StrictStr = Field( + ..., example="My Organization", description="Название организации, оставившей задачу." + ) + deadline: date = Field(..., example="31.12.2025", description="Время, до которого нужно выполнить задачу.") + category_id: NonNegativeInt = Field( + ..., example=1, description="ID дочерней категории, к которой относится задача." + ) + bonus: NonNegativeInt = Field(..., ge=1, lt=10, example=5, description="Величина бонуса за выполнение задачи.") + location: StrictStr = Field(..., example="My Location", description="Локация, в которой находится заказчик задачи.") + link: StrictStr = Field(..., example="https://example.com", description="Ссылка на сайт, где размещена задача.") + description: StrictStr = Field(None, example="Task description", description="Описание задачи.") + + @field_validator("deadline", mode="before") + def str_to_date(cls, v: object) -> object: + if isinstance(v, str): + return datetime.strptime(v, DATE_FORMAT_FOR_TASK_SCHEMA).date() + return v + + class Config: + extra = Extra.ignore + json_schema_extra = { + "example": { + "id": 1, + "title": "Task Title", + "name_organization": "My Organization", + "deadline": "31.12.2025", + "category_id": 1, + "bonus": 5, + "location": "My Location", + "link": "https://example.com", + "description": "Task description", + } + } + + +class TaskResponse(ResponseBase): + """Класс модели ответа для Task.""" + + title: StrictStr = Field(..., example="Task Title", description="Название задачи.") + name_organization: StrictStr = Field( + ..., example="My Organization", description="Название организации, оставившей задачу." + ) + deadline: date = Field( + ..., format=DATE_FORMAT, example="31-12-2025", description="Время, до которого нужно выполнить задачу." + ) + category_id: NonNegativeInt = Field( + ..., example=1, description="Показывает, к какой дочерней категории относится задача." + ) + bonus: NonNegativeInt = Field(..., ge=1, lt=10, example=5, description="Величина бонуса за выполнение задачи.") + location: StrictStr = Field(..., example="My Location", description="Локация, в которой находится заказчик задачи.") + link: StrictStr = Field(..., example="https://example.com", description="Ссылка на сайт, где размещена задача.") + description: StrictStr = Field(None, example="Task description", description="Описание задачи.") + is_archived: bool = Field(example=False, description="Статус задачи. Если True, то эта задача заархивирована.") + + class Config: + json_schema_extra = { + "example": { + "title": "Task Title", + "name_organization": "My Organization", + "deadline": "31-12-2025", + "category_id": 1, + "bonus": 5, + "location": "My Location", + "link": "https://example.com", + "description": "Task description", + "is_archived": False, + } + } diff --git a/src/api/services/__init__.py b/src/api/services/__init__.py new file mode 100644 index 00000000..3d81011a --- /dev/null +++ b/src/api/services/__init__.py @@ -0,0 +1,19 @@ +from .admin_service import AdminService +from .analytics import AnalyticsService +from .base import ContentService +from .category import CategoryService +from .external_site_user import ExternalSiteUserService +from .health_check import HealthCheckService +from .messages import TelegramNotificationService +from .task import TaskService + +__all__ = ( + "ContentService", + "CategoryService", + "TaskService", + "ExternalSiteUserService", + "AdminService", + "AnalyticsService", + "HealthCheckService", + "TelegramNotificationService", +) diff --git a/src/api/services/admin_service.py b/src/api/services/admin_service.py new file mode 100644 index 00000000..989ae8bf --- /dev/null +++ b/src/api/services/admin_service.py @@ -0,0 +1,32 @@ +from jose import JWTError, jwt + +from src.core.db.models import AdminUser +from src.core.db.repository.admin_repository import AdminUserRepository +from src.core.exceptions.exceptions import CredentialsException +from src.settings import settings + + +class AdminService: + """Сервис для работы с моделью AdminUser.""" + + def __init__(self, admin_repository: AdminUserRepository) -> None: + self._repository: AdminUserRepository = admin_repository + + async def authenticate_user(self, email: str, password: str) -> AdminUser | None: + user = await self._repository.get_by_email(email) + if user and user.check_password(password): + return user + return None + + async def get_current_user(self, token: str) -> AdminUser: + try: + payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM]) + email: str = payload.get("email") + if email is None: + raise CredentialsException("Don't have an email in the token") + except JWTError: + raise CredentialsException("Could not validate credentials(token)") + user = await self._repository.get_by_email(email) + if not user: + raise CredentialsException("There is no user in db with such email") + return user diff --git a/src/api/services/analytics.py b/src/api/services/analytics.py new file mode 100644 index 00000000..e77a8c92 --- /dev/null +++ b/src/api/services/analytics.py @@ -0,0 +1,42 @@ +from datetime import timedelta + +from src.api.constants import DAYS_NUMBER_FOR_USERS_STATISTIC +from src.core.db.repository import UnsubscribeReasonRepository, UserRepository + + +class AnalyticsService: + """Сервис для работы с моделью Analytics.""" + + def __init__( + self, + user_repository: UserRepository, + unsubscribe_reason_repository: UnsubscribeReasonRepository, + ) -> None: + self._user_repository: UserRepository = user_repository + self._unsubscribe_reason_repository: UnsubscribeReasonRepository = unsubscribe_reason_repository + + async def get_user_number(self) -> None: + return await self._user_repository.count_all() + + async def get_reason_cancelling_statistics(self) -> dict[str, int]: + reasons_statistic_from_db = await self._unsubscribe_reason_repository.get_reason_cancelling_statistics() + return dict(reasons_statistic_from_db) + + async def get_added_users_statistic(self, date_limit) -> dict[str, int]: + date_begin = date_limit - timedelta(days=DAYS_NUMBER_FOR_USERS_STATISTIC - 1) + added_users = await self._user_repository.get_statistics_by_days(date_begin, date_limit, "created_at") + return added_users + + async def get_added_external_users_statistic(self, date_limit) -> dict[str, int]: + date_begin = date_limit - timedelta(days=DAYS_NUMBER_FOR_USERS_STATISTIC - 1) + added_external_users = await self._user_repository.get_statistics_by_days( + date_begin, date_limit, "external_signup_date" + ) + return added_external_users + + async def get_unsubscribed_users_statistic(self, date_limit) -> dict[str, int]: + date_begin = date_limit - timedelta(days=DAYS_NUMBER_FOR_USERS_STATISTIC - 1) + users_unsubscribed = await self._unsubscribe_reason_repository.get_statistics_by_days( + date_begin, date_limit, "created_at" + ) + return users_unsubscribed diff --git a/src/api/services/base.py b/src/api/services/base.py new file mode 100644 index 00000000..1d8ea561 --- /dev/null +++ b/src/api/services/base.py @@ -0,0 +1,34 @@ +import abc + +from sqlalchemy.ext.asyncio import AsyncSession + +from src.core.db.repository import ContentRepository + + +class ContentService(abc.ABC): + """Абстрактный класс для контента.""" + + def __init__(self, repository: ContentRepository, session: AsyncSession): + self._repository: ContentRepository = repository + self._session: AsyncSession = session + + async def actualize_objects(self, objects: list[any], model_class: any) -> list[any]: + to_create, to_update = [], [] + ids = [obj.id for obj in objects] + async with self._session as session: + await self._repository.archive_by_ids(ids, commit=False) + already_have = await self._repository.get_by_ids(ids) + for obj in objects: + if obj.id not in already_have: + to_create.append(model_class(**obj.dict(), is_archived=False)) + else: + to_update.append({**obj.dict(), "is_archived": False}) + if to_create: + await self._repository.create_all(to_create, commit=False) + if to_update: + await self._repository.update_all(to_update, commit=False) + await session.commit() + return [obj.id for obj in to_create] + + async def get_all(self) -> list[any]: + return await self._repository.get_all() diff --git a/src/api/services/category.py b/src/api/services/category.py new file mode 100644 index 00000000..38a3fccf --- /dev/null +++ b/src/api/services/category.py @@ -0,0 +1,11 @@ +from sqlalchemy.ext.asyncio import AsyncSession + +from src.api.services.base import ContentService +from src.core.db.repository.category import CategoryRepository + + +class CategoryService(ContentService): + """Сервис для работы с моделью Category.""" + + def __init__(self, category_repository: CategoryRepository, session: AsyncSession) -> None: + super().__init__(category_repository, session) diff --git a/src/api/services/external_site_user.py b/src/api/services/external_site_user.py new file mode 100644 index 00000000..e79a9dc6 --- /dev/null +++ b/src/api/services/external_site_user.py @@ -0,0 +1,19 @@ +from sqlalchemy.ext.asyncio import AsyncSession + +from src.api.schemas import ExternalSiteUserRequest +from src.core.db.repository.external_site_user import ExternalSiteUserRepository + + +class ExternalSiteUserService: + """Сервис для работы с моделью ExternalSiteUser.""" + + def __init__(self, site_user_repository: ExternalSiteUserRepository, session: AsyncSession) -> None: + self._repository: ExternalSiteUserRepository = site_user_repository + self._session: AsyncSession = session + + async def register(self, site_user_schema: ExternalSiteUserRequest) -> None: + site_user = await self._repository.get_or_none(site_user_schema.id) + if site_user: + await self._repository.update(site_user.id, site_user_schema.to_orm()) + else: + await self._repository.create(site_user_schema.to_orm()) diff --git a/src/api/services/health_check.py b/src/api/services/health_check.py new file mode 100644 index 00000000..844857c8 --- /dev/null +++ b/src/api/services/health_check.py @@ -0,0 +1,71 @@ +import datetime +import os + +from sqlalchemy.exc import SQLAlchemyError +from telegram.ext import Application + +from src.api.constants import DATE_TIME_FORMAT +from src.api.schemas import BotStatus, CommitStatus, DBStatus +from src.core.db.repository import TaskRepository +from src.settings import settings + + +class HealthCheckService: + """Сервис для проверки работы бота.""" + + def __init__(self, task_repository: TaskRepository, telegram_bot: Application) -> None: + self._repository = task_repository + self._bot = telegram_bot + + async def check_bot(self) -> BotStatus: + try: + webhook_info = await self._bot.bot.get_webhook_info() + except Exception as exc: + bot_status: BotStatus = {"status": False, "error": f"{exc.__class__.__name__}: {exc}"} + return bot_status + if settings.BOT_WEBHOOK_MODE: + method = "webhooks" + bot_status: BotStatus = {"status": True, "method": method, "url": webhook_info.url} + return bot_status + method = "pulling" + bot_status: BotStatus = {"status": True, "method": method} + return bot_status + + async def get_last_commit(self) -> CommitStatus: + """В режиме dev - возвращает сведения о последнем коммите, или берет данные из переменных окружения.""" + try: + from git import Repo + + repo = Repo(os.getcwd()) + except (ImportError, NameError) as exc: + commit_status: CommitStatus = { + "last_commit": settings.LAST_COMMIT, + "commit_date": settings.COMMIT_DATE, + "git_tags": settings.TAGS, + "commit_error": f"{exc.__class__.__name__}: {exc}", + } + return commit_status + master = repo.head.reference + commit_date = datetime.datetime.fromtimestamp(master.commit.committed_date) + commit_status: CommitStatus = { + "last_commit": str(master.commit)[:7], + "commit_date": commit_date.strftime(DATE_TIME_FORMAT), + "git_tags": repo.tags, + } + return commit_status + + async def check_db_connection(self) -> DBStatus: + try: + active_tasks = await self._repository.count_active_all() + get_last_update = await self._repository.get_last_update() + except SQLAlchemyError as exc: + db_status: DBStatus = {"status": False, "db_connection_error": f"{exc.__class__.__name__}: {exc}"} + return db_status + if get_last_update is None: + get_last_update = "Unable to get last_update" + db_status: DBStatus = { + "status": True, + "last_update": get_last_update, + "active_tasks": active_tasks, + } + return db_status diff --git a/src/api/services/messages.py b/src/api/services/messages.py new file mode 100644 index 00000000..fed427e8 --- /dev/null +++ b/src/api/services/messages.py @@ -0,0 +1,64 @@ +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import joinedload +from telegram.ext import Application + +from src.api.schemas import ErrorsSending, InfoRate +from src.core.db.models import Category, User +from src.core.enums import TelegramNotificationUsersGroups +from src.core.services.notification import TelegramNotification + + +class TelegramNotificationService: + """Класс описывающий функционал передачи сообщения + определенному пользователю""" + + def __init__( + self, + telegram_bot: Application, + session: AsyncSession, + ) -> None: + self._session = session + self.telegram_notification = TelegramNotification(telegram_bot) + + async def send_messages_to_group_of_users(self, notifications): + """Отправляет сообщение указанной группе пользователей""" + match notifications.mode.upper(): + case TelegramNotificationUsersGroups.ALL.name: + users = await self._session.scalars(select(User)) + case TelegramNotificationUsersGroups.SUBSCRIBED.name: + users = await self._session.scalars(select(User).where(User.has_mailing == True)) # noqa + case TelegramNotificationUsersGroups.UNSUBSCRIBED.name: + users = await self._session.scalars(select(User).where(User.has_mailing == False)) # noqa + return await self.telegram_notification.send_messages(message=notifications.message, users=users) + + async def send_message_to_user(self, telegram_id, notifications): + """Отправляет сообщение указанному по telegram_id пользователю""" + return await self.telegram_notification.send_message(user_id=telegram_id, message=notifications.message) + + async def send_messages_to_subscribed_users(self, notifications, category_id): + """Отправляет сообщение пользователям, подписанным на определенные категории""" + category = await self._session.scalars( + select(Category).options(joinedload(Category.users)).where(Category.id == category_id) + ) + category = category.first() + await self.telegram_notification.send_messages(message=notifications, users=category.users) + + def count_rate(self, respond: bool, msg: str, rate: InfoRate): + errors_sending = ErrorsSending() + if respond: + rate.successful_rate += 1 + rate.messages.append(msg) + else: + rate.unsuccessful_rate += 1 + errors_sending.message = msg + rate.errors.append(errors_sending) + return rate + + def collect_respond_and_status(self, result, rate): + """ + Функция для формирования отчета об отправке + """ + for res in result: + rate = self.count_rate(res[0], res[1], rate) + return rate diff --git a/src/api/services/task.py b/src/api/services/task.py new file mode 100644 index 00000000..f0179549 --- /dev/null +++ b/src/api/services/task.py @@ -0,0 +1,21 @@ +from sqlalchemy.ext.asyncio import AsyncSession + +from src.api.services.base import ContentService +from src.core.db.models import Task +from src.core.db.repository.task import TaskRepository + + +class TaskService(ContentService): + """Сервис для работы с моделью Task.""" + + def __init__(self, task_repository: TaskRepository, session: AsyncSession) -> None: + super().__init__(task_repository, session) + + async def get_tasks_for_user(self, user_id: int) -> list[Task]: + return await self._repository.get_tasks_for_user(user_id) + + async def get_user_task_id(self, task_id: int) -> list[Task]: + return await self._repository.get_user_task_id(task_id) + + async def get_user_tasks_ids(self, ids: list[int]) -> list[Task]: + return await self._repository.get_user_tasks_ids(ids) diff --git a/src/bot/__init__.py b/src/bot/__init__.py new file mode 100644 index 00000000..126275f9 --- /dev/null +++ b/src/bot/__init__.py @@ -0,0 +1,8 @@ +from .bot import create_bot, shutdown_bot, start_bot, startup_bot + +__all__ = ( + "start_bot", + "create_bot", + "shutdown_bot", + "startup_bot", +) diff --git a/src/bot/bot.py b/src/bot/bot.py new file mode 100644 index 00000000..5cfaed4b --- /dev/null +++ b/src/bot/bot.py @@ -0,0 +1,54 @@ +import structlog +from telegram.ext import AIORateLimiter, Application + +log = structlog.get_logger() + + +def create_bot(bot_token) -> Application: + bot = Application.builder().token(bot_token).rate_limiter(AIORateLimiter()).build() + return bot + + +async def start_bot( + bot: Application, bot_webhook_mode: bool, telegram_webhook_url: str, secret_key: str +) -> Application: + """Запуск бота в `Background` режиме.""" + await bot.initialize() + if bot_webhook_mode is True: + bot.updater = None + await bot.bot.set_webhook( + url=telegram_webhook_url, + secret_token=secret_key, + ) + else: + await bot.updater.start_polling() # type: ignore + await bot.start() + await log.ainfo("Bot started") + return bot + + +async def startup_bot( + bot: Application, bot_webhook_mode: bool, telegram_webhook_url: str, secret_key: str +) -> Application: + bot_instance = await start_bot(bot, bot_webhook_mode, telegram_webhook_url, secret_key) + result = await bot_instance.bot.setMyCommands( + [ + [ + "start", + "Запустить бота", + ], + [ + "menu", + "Открыть меню", + ], + ] + ) + await log.ainfo(result) + return bot_instance + + +async def shutdown_bot(bot_instance, bot_webhook_mode: bool): + if bot_webhook_mode is True: + await bot_instance.updater.stop() + await bot_instance.stop() + await bot_instance.shutdown() diff --git a/src/bot/constants/__init__.py b/src/bot/constants/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/bot/constants/callback_data.py b/src/bot/constants/callback_data.py new file mode 100644 index 00000000..e479111d --- /dev/null +++ b/src/bot/constants/callback_data.py @@ -0,0 +1,8 @@ +VIEW_TASKS = "view_tasks" +CHANGE_CATEGORY = "change_category" +ABOUT_PROJECT = "about_project" +JOB_SUBSCRIPTION = "job_subscription" +GET_CATEGORIES = "categories_callback" +ADD_CATEGORIES = "add_categories" +CONFIRM_CATEGORIES = "confirm_categories" +MENU = "menu_callback" diff --git a/src/bot/constants/commands.py b/src/bot/constants/commands.py new file mode 100644 index 00000000..3df5b93b --- /dev/null +++ b/src/bot/constants/commands.py @@ -0,0 +1,4 @@ +GREETING = "choose_category_after_start" +GREETING_REGISTERED_USER = "before_confirm_categories" +START = "start" +MENU = "menu" diff --git a/src/bot/constants/enum.py b/src/bot/constants/enum.py new file mode 100644 index 00000000..149ab13e --- /dev/null +++ b/src/bot/constants/enum.py @@ -0,0 +1,10 @@ +import enum + + +class REASONS(enum.StrEnum): + to_much_messages = "Слишком много уведомлений" + no_time = "Нет времени на волонтёрство" + no_match = "Нет подходящих заданий" + uncomfortable = "Бот мне неудобен" + funds_dont_choose = "Фонды меня не выбирают" + other = "Другое" diff --git a/src/bot/constants/patterns.py b/src/bot/constants/patterns.py new file mode 100644 index 00000000..1376d2e7 --- /dev/null +++ b/src/bot/constants/patterns.py @@ -0,0 +1,5 @@ +SUBCATEGORIES = r"category_(\d+)" +SELECT_CATEGORY = r"select_category_(\d+)" +BACK_SUBCATEGORY = r"back_to_(\d+)" +NO_MAILING_REASON = r"reason_(\w+)" +TASK_DETAILS = r"task_details_(\d+$)" diff --git a/src/bot/constants/states.py b/src/bot/constants/states.py new file mode 100644 index 00000000..da8f6bf4 --- /dev/null +++ b/src/bot/constants/states.py @@ -0,0 +1 @@ +GREETING = "greeting" diff --git a/src/bot/handlers/__init__.py b/src/bot/handlers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/bot/handlers/categories.py b/src/bot/handlers/categories.py new file mode 100644 index 00000000..fe12deec --- /dev/null +++ b/src/bot/handlers/categories.py @@ -0,0 +1,139 @@ +from dependency_injector.wiring import Provide +from telegram import Update +from telegram.constants import ParseMode +from telegram.ext import Application, CallbackQueryHandler, ContextTypes + +from src.bot.constants import callback_data, patterns +from src.bot.keyboards import ( + get_checked_categories_keyboard, + get_open_tasks_and_menu_keyboard, + get_subcategories_keyboard, +) +from src.bot.services.category import CategoryService +from src.bot.services.user import UserService +from src.bot.utils import delete_previous_message +from src.core.depends import Container +from src.core.logging.utils import logger_decor + + +@logger_decor +@delete_previous_message +async def categories_callback( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + category_service: CategoryService = Provide[Container.bot_services_container.bot_category_service], + user_service: UserService = Provide[Container.bot_services_container.bot_user_service], +): + context.user_data["parent_id"] = None + categories = await category_service.get_unarchived_parents_with_children_count() + selected_categories_with_parents = await user_service.get_user_categories_with_parents(update.effective_user.id) + await context.bot.send_message( + chat_id=update.effective_chat.id, + text="Чтобы я знал, с какими задачами ты готов помогать, " + "выбери свои профессиональные компетенции (можно выбрать " + 'несколько). После этого, нажми на пункт "Готово 👌"', + reply_markup=await get_checked_categories_keyboard(categories, selected_categories_with_parents), + ) + + +async def confirm_categories_callback( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + user_service: UserService = Provide[Container.bot_services_container.bot_user_service], +): + """Записывает выбранные категории в базу данных и отправляет пользователю отчет о выбранных категориях.""" + query = update.callback_query + telegram_id = update.effective_user.id + + categories = await user_service.get_user_categories(telegram_id) + if not categories: + await query.message.edit_text( + text="Категории не выбраны.", + reply_markup=await get_open_tasks_and_menu_keyboard(), + ) + else: + await query.message.edit_text( + text="Отлично! Теперь я буду присылать тебе уведомления о новых " + f"заданиях в категориях: *{', '.join(categories.values())}*.\n\n", + parse_mode=ParseMode.MARKDOWN, + reply_markup=await get_open_tasks_and_menu_keyboard(), + ) + await user_service.check_and_set_has_mailing_atribute(telegram_id) + + +@logger_decor +async def subcategories_callback( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + category_service: CategoryService = Provide[Container.bot_services_container.bot_category_service], + user_service: UserService = Provide[Container.bot_services_container.bot_user_service], +): + query = update.callback_query + parent_id = int(context.match.group(1)) + context.user_data["parent_id"] = parent_id + subcategories = await category_service.get_unarchived_subcategories(parent_id) + selected_categories = await user_service.get_user_categories(update.effective_user.id) + + await query.message.edit_text( + "Чтобы я знал, с какими задачами ты готов помогать, " + "выбери свои профессиональные компетенции (можно выбрать " + 'несколько). После этого, нажми на пункт "Готово 👌"', + reply_markup=await get_subcategories_keyboard(parent_id, subcategories, selected_categories), + ) + + +@logger_decor +async def select_subcategory_callback( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + category_service: CategoryService = Provide[Container.bot_services_container.bot_category_service], + user_service: UserService = Provide[Container.bot_services_container.bot_user_service], +): + query = update.callback_query + subcategory_id = int(context.match.group(1)) + selected_categories = await user_service.get_user_categories(update.effective_user.id) + + if subcategory_id not in selected_categories: + selected_categories[subcategory_id] = None + await user_service.add_category_to_user(update.effective_user.id, subcategory_id) + else: + del selected_categories[subcategory_id] + await user_service.delete_category_from_user(update.effective_user.id, subcategory_id) + + parent_id = context.user_data["parent_id"] + subcategories = await category_service.get_unarchived_subcategories(parent_id) + + await query.message.edit_text( + "Чтобы я знал, с какими задачами ты готов помогать, " + "выбери свои профессиональные компетенции (можно выбрать " + 'несколько). После этого, нажми на пункт "Готово 👌"', + reply_markup=await get_subcategories_keyboard(parent_id, subcategories, selected_categories), + ) + + +@logger_decor +async def back_subcategory_callback( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + category_service: CategoryService = Provide[Container.bot_services_container.bot_category_service], + user_service: UserService = Provide[Container.bot_services_container.bot_user_service], +): + query = update.callback_query + categories = await category_service.get_unarchived_parents_with_children_count() + selected_categories_with_parents = await user_service.get_user_categories_with_parents(update.effective_user.id) + + await query.message.edit_text( + "Чтобы я знал, с какими задачами ты готов помогать, " + "выбери свои профессиональные компетенции (можно выбрать " + 'несколько). После этого, нажми на пункт "Готово 👌"', + reply_markup=await get_checked_categories_keyboard(categories, selected_categories_with_parents), + ) + + +def registration_handlers(app: Application): + app.add_handler(CallbackQueryHandler(subcategories_callback, pattern=patterns.SUBCATEGORIES)) + app.add_handler(CallbackQueryHandler(select_subcategory_callback, pattern=patterns.SELECT_CATEGORY)) + app.add_handler(CallbackQueryHandler(back_subcategory_callback, pattern=patterns.BACK_SUBCATEGORY)) + app.add_handler(CallbackQueryHandler(categories_callback, pattern=callback_data.CHANGE_CATEGORY)) + app.add_handler(CallbackQueryHandler(categories_callback, pattern=callback_data.GET_CATEGORIES)) + app.add_handler(CallbackQueryHandler(confirm_categories_callback, pattern=callback_data.CONFIRM_CATEGORIES)) diff --git a/src/bot/handlers/feedback_form.py b/src/bot/handlers/feedback_form.py new file mode 100644 index 00000000..349735a5 --- /dev/null +++ b/src/bot/handlers/feedback_form.py @@ -0,0 +1,36 @@ +import json + +from telegram import InlineKeyboardButton, InlineKeyboardMarkup, Update +from telegram.ext import Application, ContextTypes, MessageHandler +from telegram.ext.filters import StatusUpdate + +from src.bot.constants import callback_data +from src.bot.schemas import FeedbackModel +from src.core.logging.utils import logger_decor +from src.core.services.email import EmailProvider +from src.settings import settings + + +@logger_decor +async def web_app_data_handler(update: Update, context: ContextTypes.DEFAULT_TYPE): + user_data = json.loads(update.effective_message.web_app_data.data) + buttons = [ + [InlineKeyboardButton(text="Открыть меню", callback_data=callback_data.MENU)], + [InlineKeyboardButton(text="Посмотреть открытые задания", callback_data=callback_data.VIEW_TASKS)], + ] + keyboard = InlineKeyboardMarkup(buttons) + await update.message.reply_text( + text="Спасибо, я передал информацию команде ProCharity!", + reply_markup=keyboard, + ) + email_provider = EmailProvider() + feedback = FeedbackModel.model_validate(user_data) + await email_provider.send_question_feedback( + telegram_id=update.effective_user.id, + message=feedback.to_message(), + email=settings.EMAIL_ADMIN, + ) + + +def registration_handlers(app: Application): + app.add_handler(MessageHandler(StatusUpdate.WEB_APP_DATA, web_app_data_handler)) diff --git a/src/bot/handlers/menu.py b/src/bot/handlers/menu.py new file mode 100644 index 00000000..1771e3cc --- /dev/null +++ b/src/bot/handlers/menu.py @@ -0,0 +1,112 @@ +import structlog +from dependency_injector.wiring import Provide +from telegram import Update +from telegram.constants import ParseMode +from telegram.ext import Application, CallbackQueryHandler, CommandHandler, ContextTypes + +from src.bot.constants import callback_data, commands, enum, patterns +from src.bot.keyboards import get_back_menu, get_menu_keyboard, get_no_mailing_keyboard +from src.bot.services.unsubscribe_reason import UnsubscribeReasonService +from src.bot.services.user import UserService +from src.bot.utils import delete_previous_message +from src.core.depends import Container +from src.core.logging.utils import logger_decor +from src.settings import Settings + +log = structlog.get_logger() + + +@logger_decor +@delete_previous_message +async def menu_callback( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + user_service: UserService = Provide[Container.bot_services_container.bot_user_service], +): + """Возвращает в меню.""" + await context.bot.send_message( + chat_id=update.effective_chat.id, + text="Выбери, что тебя интересует:", + reply_markup=await get_menu_keyboard(await user_service.get_by_telegram_id(update.effective_user.id)), + ) + + +@logger_decor +@delete_previous_message +async def set_mailing( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + user_service: UserService = Provide[Container.bot_services_container.bot_user_service], + settings: Settings = Provide[Container.settings], +): + """Включение/выключение подписки пользователя на почтовую рассылку.""" + telegram_id = update.effective_user.id + has_mailing = await user_service.set_mailing(telegram_id) + if has_mailing: + text = "Отлично! Теперь я буду присылать тебе уведомления о новых заданиях на почту." + keyboard = await get_back_menu() + parse_mode = ParseMode.MARKDOWN + else: + text = ( + "Ты больше не будешь получать новые задания от фондов, но всегда сможешь найти их на сайте " + f'ProCharity.\n\n' + "Поделись, пожалуйста, почему ты решил отписаться?" + ) + keyboard = get_no_mailing_keyboard() + parse_mode = ParseMode.HTML + await context.bot.send_message( + chat_id=update.effective_user.id, + text=text, + reply_markup=keyboard, + parse_mode=parse_mode, + disable_web_page_preview=True, + ) + + +@logger_decor +async def reason_handler( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + unsubscribe_reason_service: UnsubscribeReasonService = Provide[ + Container.bot_services_container.unsubscribe_reason_service + ], +): + query = update.callback_query + reason = enum.REASONS[context.match.group(1)] + await unsubscribe_reason_service.save_reason(telegram_id=context._user_id, reason=reason.name) + await log.ainfo( + f"Пользователь {update.effective_user.username} ({update.effective_user.id}) отписался от " + f"рассылки по причине: {reason}" + ) + await query.message.edit_text( + text="Спасибо, я передал информацию команде ProCharity!", + reply_markup=await get_back_menu(), + parse_mode=ParseMode.MARKDOWN, + ) + + +@logger_decor +@delete_previous_message +async def about_project( + update: Update, context: ContextTypes.DEFAULT_TYPE, settings: Settings = Provide[Container.settings] +): + await context.bot.send_message( + chat_id=update.effective_chat.id, + text="С ProCharity профессионалы могут помочь некоммерческим " + "организациям в вопросах, которые требуют специальных знаний и " + "опыта.\n\nИнтеллектуальный волонтёр безвозмездно дарит фонду своё " + "время и профессиональные навыки, позволяя решать задачи, " + "которые трудно закрыть силами штатных сотрудников.\n\n" + f'Сделано студентами Яндекс.Практикума.', + reply_markup=await get_back_menu(), + parse_mode=ParseMode.HTML, + disable_web_page_preview=True, + ) + + +def registration_handlers(app: Application): + app.add_handler(CommandHandler(commands.MENU, menu_callback)) + app.add_handler(CallbackQueryHandler(menu_callback, pattern=callback_data.MENU)) + app.add_handler(CallbackQueryHandler(about_project, pattern=callback_data.ABOUT_PROJECT)) + app.add_handler(CallbackQueryHandler(set_mailing, pattern=callback_data.JOB_SUBSCRIPTION)) + app.add_handler(CallbackQueryHandler(reason_handler, pattern=patterns.NO_MAILING_REASON)) diff --git a/src/bot/handlers/registration.py b/src/bot/handlers/registration.py new file mode 100644 index 00000000..20a81c49 --- /dev/null +++ b/src/bot/handlers/registration.py @@ -0,0 +1,87 @@ +from dependency_injector.wiring import Provide, inject +from telegram import Update +from telegram.constants import ParseMode +from telegram.ext import Application, CallbackQueryHandler, CommandHandler, ContextTypes + +from src.bot.constants import callback_data, commands +from src.bot.keyboards import feedback_buttons, get_confirm_keyboard, get_start_keyboard +from src.bot.services.external_site_user import ExternalSiteUserService +from src.bot.services.user import UserService +from src.bot.utils import delete_previous_message, get_connection_url +from src.core.depends import Container +from src.core.logging.utils import logger_decor +from src.settings import Settings + + +@logger_decor +@inject +async def start_command( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + ext_user_service: ExternalSiteUserService = Provide[Container.bot_services_container.bot_site_user_service], + user_service: UserService = Provide[Container.bot_services_container.bot_user_service], + settings: Settings = Provide[Container.settings], +): + ext_user = await ext_user_service.get_ext_user_by_args(context.args) + if ext_user is not None: + await user_service.register_user( + telegram_id=update.effective_user.id, + username=update.effective_user.username, + first_name=ext_user.first_name, + last_name=ext_user.last_name, + email=ext_user.email, + external_id=ext_user.id, + ) + await user_service.set_categories_to_user(update.effective_user.id, ext_user.specializations) + url_connect = get_connection_url(update.effective_user.id, ext_user.id) + else: + await user_service.register_user( + telegram_id=update.effective_user.id, + username=update.effective_user.username, + first_name=update.effective_user.first_name, + last_name=update.effective_user.last_name, + ) + url_connect = get_connection_url(update.effective_user.id) + categories = await user_service.get_user_categories(update.effective_user.id) + callback_data_on_start = commands.GREETING_REGISTERED_USER if categories else callback_data.CHANGE_CATEGORY + keyboard = await get_start_keyboard(callback_data_on_start=callback_data_on_start, url_for_connection=url_connect) + keyboard_feedback = await feedback_buttons(update.effective_user) + await context.bot.send_message( + chat_id=update.effective_user.id, + text="Привет! 👋 \n\n", + reply_markup=keyboard_feedback, + ) + await context.bot.send_message( + chat_id=update.effective_user.id, + text=f'Я бот платформы интеллектуального волонтерства ProCharity. ' + "Буду держать тебя в курсе новых задач и помогу " + "оперативно связаться с командой поддержки.\n\n", + reply_markup=keyboard, + parse_mode=ParseMode.HTML, + disable_web_page_preview=True, + ) + + +@logger_decor +@delete_previous_message +async def confirm_chosen_categories( + update: Update, + context: ContextTypes.DEFAULT_TYPE, + user_service: UserService = Provide[Container.bot_services_container.bot_user_service], +): + keyboard = get_confirm_keyboard() + categories = await user_service.get_user_categories(update.effective_user.id) + context.user_data["selected_categories"] = {category: None for category in categories} + text = ", ".join(categories.values()) + + await context.bot.send_message( + chat_id=update.effective_user.id, + text=f"Вот список твоих профессиональных компетенций: *{text}* Все верно?", + reply_markup=keyboard, + parse_mode=ParseMode.MARKDOWN, + ) + + +def registration_handlers(app: Application): + app.add_handler(CommandHandler(commands.START, start_command)) + app.add_handler(CallbackQueryHandler(confirm_chosen_categories, pattern=commands.GREETING_REGISTERED_USER)) diff --git a/src/bot/handlers/tasks.py b/src/bot/handlers/tasks.py new file mode 100644 index 00000000..f3927767 --- /dev/null +++ b/src/bot/handlers/tasks.py @@ -0,0 +1,84 @@ +from dependency_injector.wiring import Provide +from telegram import InlineKeyboardButton, InlineKeyboardMarkup, Update +from telegram.constants import ParseMode +from telegram.ext import Application, CallbackContext, CallbackQueryHandler + +from src.bot.constants import callback_data, patterns +from src.bot.keyboards import get_back_menu, view_more_tasks_keyboard +from src.bot.services.task import TaskService +from src.bot.utils import delete_previous_message +from src.core.depends import Container +from src.core.logging.utils import logger_decor +from src.core.utils import display_task_verbosely, display_tasks +from src.settings import Settings + + +@logger_decor +async def task_details_callback( + update: Update, + context: CallbackContext, + task_service: TaskService = Provide[Container.bot_services_container.bot_task_service], + settings: Settings = Provide[Container.settings], +): + query = update.callback_query + task_id = int(context.match.group(1)) + task = await task_service.get_task_by_id(task_id) + detailed_text = display_task_verbosely(task, settings.HELP_PROCHARITY_URL) + await query.message.edit_text( + detailed_text, + parse_mode=ParseMode.HTML, + disable_web_page_preview=True, + ) + + +@logger_decor +@delete_previous_message +async def view_task_callback( + update: Update, + context: CallbackContext, + limit: int = 3, + task_service: TaskService = Provide[Container.bot_services_container.bot_task_service], + settings: Settings = Provide[Container.settings], +): + telegram_id = context._user_id + tasks_to_show, offset, page_number = await task_service.get_user_tasks_by_page( + context.user_data.get("page_number", 1), + limit, + telegram_id, + ) + + for task in tasks_to_show: + message = display_tasks(task, settings.HELP_PROCHARITY_URL) + inline_keyboard = [[InlineKeyboardButton("ℹ️ Подробнее", callback_data=f"task_details_{task.id}")]] + reply_markup = InlineKeyboardMarkup(inline_keyboard) + await context.bot.send_message( + chat_id=update.effective_chat.id, + text=message, + parse_mode=ParseMode.HTML, + disable_web_page_preview=True, + reply_markup=reply_markup, + ) + remaining_tasks = await task_service.get_remaining_user_tasks_count(limit, offset, telegram_id) + await show_next_tasks(update, context, page_number, remaining_tasks) + + +@delete_previous_message +async def show_next_tasks(update: Update, context: CallbackContext, page_number: int, remaining_tasks: int): + if remaining_tasks > 0: + text = f"Есть ещё задания, показать? Осталось: {remaining_tasks}" + context.user_data["page_number"] = page_number + 1 + keyboard = await view_more_tasks_keyboard() + else: + text = "Заданий больше нет." + keyboard = await get_back_menu() + + await context.bot.send_message( + chat_id=update.effective_chat.id, + text=text, + reply_markup=keyboard, + ) + + +def registration_handlers(app: Application): + app.add_handler(CallbackQueryHandler(view_task_callback, pattern=callback_data.VIEW_TASKS)) + app.add_handler(CallbackQueryHandler(task_details_callback, pattern=patterns.TASK_DETAILS)) diff --git a/src/bot/keyboards.py b/src/bot/keyboards.py new file mode 100644 index 00000000..778f8010 --- /dev/null +++ b/src/bot/keyboards.py @@ -0,0 +1,141 @@ +from urllib.parse import urljoin + +from dependency_injector.wiring import Provide +from telegram import InlineKeyboardButton, InlineKeyboardMarkup, KeyboardButton, ReplyKeyboardMarkup, WebAppInfo + +from src.api.schemas import FeedbackFormQueryParams +from src.bot.constants import callback_data, enum +from src.core.db.models import Category, User +from src.core.depends import Container +from src.settings import Settings, settings + +MENU_KEYBOARD = [ + [InlineKeyboardButton("🔎 Посмотреть открытые задания", callback_data=callback_data.VIEW_TASKS)], + [InlineKeyboardButton("✏️ Изменить компетенции", callback_data=callback_data.CHANGE_CATEGORY)], + [InlineKeyboardButton("ℹ️ О платформе", callback_data=callback_data.ABOUT_PROJECT)], +] +UNSUBSCRIBE_BUTTON = [ + InlineKeyboardButton("⏹️ Остановить подписку на задания", callback_data=callback_data.JOB_SUBSCRIPTION) +] +SUBSCRIBE_BUTTON = [ + InlineKeyboardButton("▶️ Включить подписку на задания", callback_data=callback_data.JOB_SUBSCRIPTION) +] +SUGGESTION_BUTTON_TITLE = "✉️ Отправить предложение/ошибку" +QUESTION_BUTTON_TITLE = "❓ Задать вопрос" + + +async def get_checked_categories_keyboard( + categories: dict[str, int, int], selected_categories: dict[Category] = {} +) -> InlineKeyboardButton: + keyboard = [] + + for category_name, category_id, category_children_count in categories: + if category_id in selected_categories: + if category_children_count == len(selected_categories[category_id]): + button = InlineKeyboardButton(f"✅ {category_name}", callback_data=f"category_{category_id}") + else: + button = InlineKeyboardButton(f"☑️ {category_name}", callback_data=f"category_{category_id}") + else: + button = InlineKeyboardButton(category_name, callback_data=f"category_{category_id}") + keyboard.append([button]) + + keyboard.extend( + [ + [InlineKeyboardButton("Нет моих компетенций 😕", callback_data=callback_data.ADD_CATEGORIES)], + [InlineKeyboardButton("Готово 👌", callback_data=callback_data.CONFIRM_CATEGORIES)], + ] + ) + return InlineKeyboardMarkup(keyboard) + + +async def get_subcategories_keyboard( + parent_id: int, subcategories: list[Category], selected_categories: dict[Category] = {} +) -> InlineKeyboardMarkup: + keyboard = [] + + for category in subcategories: + if category.id not in selected_categories: + button = InlineKeyboardButton(category.name, callback_data=f"select_category_{category.id}") + else: + button = InlineKeyboardButton(f"✅ {category.name}", callback_data=f"select_category_{category.id}") + keyboard.append([button]) + + keyboard.append([InlineKeyboardButton("Назад ⬅️", callback_data=f"back_to_{parent_id}")]) + return InlineKeyboardMarkup(keyboard) + + +async def get_menu_keyboard(user: User) -> InlineKeyboardMarkup: + keyboard = [] + keyboard.extend(MENU_KEYBOARD) + # Кнопка включения/выключения подписки на новые заказы + if user.has_mailing: + keyboard.extend([UNSUBSCRIBE_BUTTON]) + else: + keyboard.extend([SUBSCRIBE_BUTTON]) + return InlineKeyboardMarkup(keyboard) + + +async def feedback_buttons(user: User) -> ReplyKeyboardMarkup: + if hasattr(user, "email"): + email = user.email + else: + email = None + web_app = WebAppInfo( + url=urljoin( + settings.feedback_form_template_url, + FeedbackFormQueryParams(name=user.first_name, surname=user.last_name, email=email).as_url_query(), + ) + ) + keyboard = [ + [KeyboardButton(QUESTION_BUTTON_TITLE, web_app=web_app)], + [KeyboardButton(SUGGESTION_BUTTON_TITLE, web_app=web_app)], + ] + return ReplyKeyboardMarkup(keyboard, resize_keyboard=True) + + +async def get_back_menu() -> InlineKeyboardMarkup: + keyboard = [[InlineKeyboardButton(text="Вернуться в меню", callback_data=callback_data.MENU)]] + return InlineKeyboardMarkup(keyboard) + + +async def get_start_keyboard( + callback_data_on_start: str, + url_for_connection: str, + settings: Settings = Provide[Container.settings], +) -> InlineKeyboardMarkup: + keyboard = [ + [InlineKeyboardButton("Начнём", callback_data=callback_data_on_start)], + [InlineKeyboardButton("Перейти на сайт ProCharity", url=settings.PROCHARITY_URL)], + [InlineKeyboardButton("Связать аккаунт с ботом", url=url_for_connection)], + ] + return InlineKeyboardMarkup(keyboard) + + +async def get_open_tasks_and_menu_keyboard() -> InlineKeyboardMarkup: + keyboard = [ + [InlineKeyboardButton("Посмотреть открытые задачи", callback_data=callback_data.VIEW_TASKS)], + [InlineKeyboardButton("Открыть меню", callback_data=callback_data.MENU)], + ] + return InlineKeyboardMarkup(keyboard) + + +async def view_more_tasks_keyboard() -> InlineKeyboardMarkup: + keyboard = [ + [InlineKeyboardButton(text="Показать ещё задания", callback_data=callback_data.VIEW_TASKS)], + [InlineKeyboardButton(text="Открыть меню", callback_data=callback_data.MENU)], + ] + return InlineKeyboardMarkup(keyboard) + + +def get_confirm_keyboard() -> InlineKeyboardMarkup: + keyboard = [ + [InlineKeyboardButton("Да", callback_data=callback_data.CONFIRM_CATEGORIES)], + [InlineKeyboardButton("Нет, хочу изменить", callback_data=callback_data.CHANGE_CATEGORY)], + ] + return InlineKeyboardMarkup(keyboard) + + +def get_no_mailing_keyboard() -> InlineKeyboardMarkup: + """Клавиатура с причинами отписки от рассылки на почту""" + keyboard = [[InlineKeyboardButton(reason, callback_data=f"reason_{reason.name}")] for reason in enum.REASONS] + return InlineKeyboardMarkup(keyboard) diff --git a/src/bot/main.py b/src/bot/main.py new file mode 100644 index 00000000..ed3a3fa7 --- /dev/null +++ b/src/bot/main.py @@ -0,0 +1,18 @@ +from telegram import Update +from telegram.ext import Application, TypeHandler + +from src.core.logging.utils import logging_updates + + +def init_bot(telegram_bot: Application) -> Application: + """Инициализация телеграм бота.""" + + from .handlers import categories, feedback_form, menu, registration, tasks + + registration.registration_handlers(telegram_bot) + categories.registration_handlers(telegram_bot) + tasks.registration_handlers(telegram_bot) + menu.registration_handlers(telegram_bot) + feedback_form.registration_handlers(telegram_bot) + telegram_bot.add_handler(TypeHandler(Update, logging_updates)) + return telegram_bot diff --git a/src/bot/schemas.py b/src/bot/schemas.py new file mode 100644 index 00000000..46098f87 --- /dev/null +++ b/src/bot/schemas.py @@ -0,0 +1,18 @@ +from pydantic import BaseModel + + +class FeedbackModel(BaseModel): + """Класс модели для обратной связи.""" + + surname: str + name: str + email: str + feedback: str + + def to_message(self): + return f"""Получено сообщение от пользователя телеграмм бота. + Фамилия: {self.surname}, + Имя: {self.name}, + email: {self.email}, + Отзыв: {self.feedback} + """ diff --git a/src/bot/services/__init__.py b/src/bot/services/__init__.py new file mode 100644 index 00000000..cf79dd36 --- /dev/null +++ b/src/bot/services/__init__.py @@ -0,0 +1,3 @@ +from .unsubscribe_reason import UnsubscribeReasonService + +__all__ = ("UnsubscribeReasonService",) diff --git a/src/bot/services/category.py b/src/bot/services/category.py new file mode 100644 index 00000000..23852b0e --- /dev/null +++ b/src/bot/services/category.py @@ -0,0 +1,15 @@ +from src.core.db.models import Category +from src.core.db.repository.category import CategoryRepository + + +class CategoryService: + """Сервис бота для работы с моделью Category.""" + + def __init__(self, category_repository: CategoryRepository) -> None: + self._category_repository = category_repository + + async def get_unarchived_subcategories(self, parent_id) -> list[Category]: + return await self._category_repository.get_unarchived_subcategories(parent_id) + + async def get_unarchived_parents_with_children_count(self): # -> dict[Category, int]: + return await self._category_repository.get_unarchived_parents_with_children_count() diff --git a/src/bot/services/external_site_user.py b/src/bot/services/external_site_user.py new file mode 100644 index 00000000..22fde3d3 --- /dev/null +++ b/src/bot/services/external_site_user.py @@ -0,0 +1,16 @@ +from src.core.db.models import ExternalSiteUser +from src.core.db.repository.external_site_user import ExternalSiteUserRepository + + +class ExternalSiteUserService: + """Сервис бота для работы с моделью ExternalSiteUser.""" + + def __init__(self, site_user_repository: ExternalSiteUserRepository): + self._site_user_repository = site_user_repository + + async def get_ext_user_by_args(self, args) -> ExternalSiteUser | None: + """Возвращает пользователя (или None) по арументам.""" + if args: + id_hash = args[0] + return await self._site_user_repository.get_by_id_hash(id_hash) + return None diff --git a/src/bot/services/task.py b/src/bot/services/task.py new file mode 100644 index 00000000..81551b2a --- /dev/null +++ b/src/bot/services/task.py @@ -0,0 +1,27 @@ +from typing import Optional + +from src.core.db.models import Task +from src.core.db.repository.task import TaskRepository +from src.core.db.repository.user import UserRepository + + +class TaskService: + """Сервис бота для работы с моделью Task.""" + + def __init__(self, task_repository: TaskRepository, user_repository: UserRepository): + self._task_repository = task_repository + self._user_repository = user_repository + + async def get_user_tasks_by_page(self, page_number: int, limit: int, telegram_id: int) -> list[Task]: + offset = (page_number - 1) * limit + user = await self._user_repository.get_by_telegram_id(telegram_id) + return await self._task_repository.get_tasks_limit_for_user(limit, offset, user), offset, page_number + + async def get_remaining_user_tasks_count(self, limit: int, offset: int, telegram_id: int) -> int: + user = await self._user_repository.get_by_telegram_id(telegram_id) + total_tasks = await self._task_repository.get_user_tasks_count(user) + remaining_tasks = total_tasks - (offset + limit) + return remaining_tasks + + async def get_task_by_id(self, task_id: int) -> Optional[Task]: + return await self._task_repository.get_user_task_id(task_id) diff --git a/src/bot/services/unsubscribe_reason.py b/src/bot/services/unsubscribe_reason.py new file mode 100644 index 00000000..87a55b81 --- /dev/null +++ b/src/bot/services/unsubscribe_reason.py @@ -0,0 +1,24 @@ +from src.core.db.models import UnsubscribeReason +from src.core.db.repository import UnsubscribeReasonRepository, UserRepository + + +class UnsubscribeReasonService: + """Сервис для работы с моделью UnsubscribeReason.""" + + def __init__( + self, user_repository: UserRepository, unsubscribe_reason_repository: UnsubscribeReasonRepository + ) -> None: + self._user_repository = user_repository + self._unsubscribe_reason_repository = unsubscribe_reason_repository + + async def save_reason(self, telegram_id: int, reason: str) -> None: + user = await self._user_repository.get_by_telegram_id(telegram_id) + reason_obj = await self._unsubscribe_reason_repository.get_by_user(user) + if reason_obj is not None: + await self._unsubscribe_reason_repository.update( + reason_obj.id, UnsubscribeReason(user_id=user.id, unsubscribe_reason=reason) + ) + else: + await self._unsubscribe_reason_repository.create( + UnsubscribeReason(user_id=user.id, unsubscribe_reason=reason) + ) diff --git a/src/bot/services/user.py b/src/bot/services/user.py new file mode 100644 index 00000000..883f3acd --- /dev/null +++ b/src/bot/services/user.py @@ -0,0 +1,103 @@ +from src.core.db.models import User, UsersCategories +from src.core.db.repository.user import UserRepository + + +class UserService: + def __init__(self, user_repository: UserRepository) -> None: + self._user_repository = user_repository + + async def register_user( + self, + telegram_id: int, + username: str = "", + first_name: str = "", + last_name: str = "", + email: str | None = None, + external_id: int | None = None, + ) -> User: + """Регистрирует нового пользователя по telegram_id. + + Если пользователь найден, обновляет имя и флаг "заблокирован". + """ + user = await self._user_repository.get_by_telegram_id(telegram_id) + if user is not None: + return await self._user_repository.restore_existing_user( + user=user, + username=username, + first_name=first_name, + last_name=last_name, + ) + return await self._user_repository.create( + User( + telegram_id=telegram_id, + username=username, + first_name=first_name, + last_name=last_name, + email=email, + external_id=external_id, + ) + ) + + async def set_categories_to_user(self, telegram_id: int, categories_ids: list[int]) -> None: + """Присваивает пользователю список категорий.""" + await self._user_repository.set_categories_to_user(telegram_id, categories_ids) + + async def add_category_to_user(self, telegram_id: int, category_id: int) -> None: + """Добавляет пользователю указанную категорию""" + user = await self._user_repository.get_by_telegram_id(telegram_id) + await self._user_repository.create(UsersCategories(user_id=user.id, category_id=category_id)) + + async def delete_category_from_user(self, telegram_id: int, category_id: int) -> None: + """Удаляет у пользователя указанную категорию""" + user = await self._user_repository.get_by_telegram_id(telegram_id) + await self._user_repository.delete_category_from_user(user, category_id) + + async def get_user_categories(self, telegram_id: int) -> dict[int, str]: + """Возвращает словарь с id и name категорий пользователя по его telegram_id.""" + user = await self._user_repository.get_by_telegram_id(telegram_id) + categories = await self._user_repository.get_user_categories(user) + return {category.id: category.name for category in categories} + + async def get_user_categories_with_parents(self, telegram_id: int) -> dict[int, dict[int, str]]: + """Возвращает словарь с id родительской группы словарей с id и name категорий пользователя + по его telegram_id.""" + repository = self._user_repository + user = await repository.get_by_telegram_id(telegram_id) + categories = await repository.get_user_categories(user) + result = {} + for category in categories: + if category.parent_id in result: + result[category.parent_id].update({category.id: category.name}) + else: + result[category.parent_id] = {category.id: category.name} + return result + + async def get_mailing(self, telegram_id: int) -> bool: + """Возвращает статус подписки пользователя на почтовую рассылку.""" + user = await self._user_repository.get_by_telegram_id(telegram_id) + return user.has_mailing + + async def set_mailing(self, telegram_id: int) -> bool: + """ + Присваивает пользователю получение почтовой рассылки на задания. + Возвращает статус подписки пользователя на почтовую рассылку. + """ + user = await self._user_repository.get_by_telegram_id(telegram_id) + await self._user_repository.set_mailing(user, not user.has_mailing) + return user.has_mailing + + async def check_and_set_has_mailing_atribute(self, telegram_id: int) -> None: + """ + Присваивает пользователю атрибут has_mailing, для получения почтовой + рассылки на задания после выбора категорий. Предварительно + осуществляется проверка, установлен ли этот атрибут у пользователя + ранее. + """ + user = await self._user_repository.get_by_telegram_id(telegram_id) + if not user.has_mailing: + await self._user_repository.set_mailing(user, True) + + async def get_by_telegram_id(self, telegram_id: int) -> User: + """Оборачивает одноименную функцию из UserRepository.""" + user = await self._user_repository.get_by_telegram_id(telegram_id) + return user diff --git a/src/bot/utils.py b/src/bot/utils.py new file mode 100644 index 00000000..d45651a3 --- /dev/null +++ b/src/bot/utils.py @@ -0,0 +1,40 @@ +from collections.abc import Awaitable, Callable +from functools import wraps +from typing import ParamSpec, TypeVar + +from dependency_injector.wiring import Provide +from telegram import Update + +from src.core.depends import Container +from src.settings import Settings + +ReturnType = TypeVar("ReturnType") +ParameterTypes = ParamSpec("ParameterTypes") + + +def delete_previous_message( + coroutine: Callable[ParameterTypes, Awaitable[ReturnType]] +) -> Callable[ParameterTypes, Awaitable[ReturnType]]: + """Декоратор для функций, отправляющих новые сообщения с inline-кнопками. + После выполнения оборачиваемой функции удаляет сообщение с inline-кнопкой, + нажатие на которую повлекло вызов оборачиваемой функции.""" + + @wraps(coroutine) + async def wrapper(update: Update, *args: ParameterTypes.args, **kwargs: ParameterTypes.kwargs) -> ReturnType: + result = await coroutine(update, *args, **kwargs) + await update.callback_query.message.delete() + return result + + return wrapper + + +def get_connection_url( + telegram_id: int, + external_id: int = None, + settings: Settings = Provide[Container.settings], +) -> str: + """Получение ссылки для связи аккаунта с ботом по external_id и telegram_id. + В случае отсутствия external_id возвращает ссылку на страницу авторизации""" + if external_id: + return f"{settings.PROCHARITY_URL}auth/bot_procharity.php?user_id={external_id}&telegram_id={telegram_id}" + return f"{settings.PROCHARITY_URL}auth/" diff --git a/src/core/__init__.py b/src/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/core/db/__init__.py b/src/core/db/__init__.py new file mode 100644 index 00000000..745ee58e --- /dev/null +++ b/src/core/db/__init__.py @@ -0,0 +1,3 @@ +from src.core.db.db import get_session + +__all__ = ("get_session",) diff --git a/src/core/db/db.py b/src/core/db/db.py new file mode 100644 index 00000000..ddc02858 --- /dev/null +++ b/src/core/db/db.py @@ -0,0 +1,14 @@ +from typing import Generator + +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine + +from src.settings import settings + +engine = create_async_engine(settings.database_url) + + +async def get_session( + sessionmaker=async_sessionmaker(engine, expire_on_commit=False) +) -> Generator[AsyncSession, None, None]: + async with sessionmaker() as session: + yield session diff --git a/src/core/db/migrations/env.py b/src/core/db/migrations/env.py new file mode 100644 index 00000000..9e84fc45 --- /dev/null +++ b/src/core/db/migrations/env.py @@ -0,0 +1,82 @@ +import asyncio +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import engine_from_config, pool +from sqlalchemy.ext.asyncio import AsyncEngine + +from src.core.db.models import Base +from src.settings import settings + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config +config.set_main_option("sqlalchemy.url", settings.database_url) + + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + Calls to context.execute() here emit the given string to the + script output. + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection): + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_migrations_online(): + """Run migrations in 'online' mode. + In this scenario we need to create an Engine + and associate a connection with the context. + """ + connectable = AsyncEngine( + engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + future=True, + ) + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + asyncio.run(run_migrations_online()) diff --git a/src/core/db/migrations/script.py.mako b/src/core/db/migrations/script.py.mako new file mode 100644 index 00000000..55df2863 --- /dev/null +++ b/src/core/db/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/src/core/db/migrations/versions/.gitkeep b/src/core/db/migrations/versions/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/src/core/db/migrations/versions/2023-11-11_12.18.35_initial.py b/src/core/db/migrations/versions/2023-11-11_12.18.35_initial.py new file mode 100644 index 00000000..a2e04c9e --- /dev/null +++ b/src/core/db/migrations/versions/2023-11-11_12.18.35_initial.py @@ -0,0 +1,241 @@ +"""initial + +Revision ID: 6c4739908b74 +Revises: +Create Date: 2023-11-09 14:46:01.346164 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "6c4739908b74" +down_revision = None +branch_labels = None +depends_on = None + + +def column_exists(table_name, column_name): + bind = op.get_context().bind + insp = sa.inspect(bind) + columns = insp.get_columns(table_name) + return any(c["name"] == column_name for c in columns) + + +def upgrade() -> None: + connection = op.get_bind() + inspector = sa.inspect(connection) + + if not inspector.has_table("users"): + op.create_table( + "users", + sa.Column("telegram_id", sa.BigInteger(), nullable=False), + sa.Column("username", sa.String(length=32), nullable=True), + sa.Column("email", sa.String(length=48), nullable=True), + sa.Column("external_id", sa.Integer(), nullable=True), + sa.Column("first_name", sa.String(length=64), nullable=True), + sa.Column("last_name", sa.String(length=64), nullable=True), + sa.Column("has_mailing", sa.Boolean(), nullable=False), + sa.Column("external_signup_date", sa.Date(), nullable=True), + sa.Column("banned", sa.Boolean(), server_default=sa.text("false"), nullable=False), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("created_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.Column("updated_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("email"), + sa.UniqueConstraint("external_id"), + sa.UniqueConstraint("telegram_id"), + sa.UniqueConstraint("username"), + ) + else: + if column_exists("users", "date_registration"): + op.alter_column("users", "date_registration", new_column_name="created_at") + op.add_column( + "users", sa.Column("updated_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False) + ) + op.add_column("users", sa.Column("id", sa.Integer())) + op.execute("CREATE SEQUENCE users_id_seq") + op.execute("UPDATE users SET id = nextval('users_id_seq')") + op.alter_column("users", "id", nullable=False) + op.create_unique_constraint("users_id_key", "users", ["id"]) + + if not inspector.has_table("admin_users"): + op.create_table( + "admin_users", + sa.Column("email", sa.String(length=48), nullable=False), + sa.Column("first_name", sa.String(length=64), nullable=True), + sa.Column("last_name", sa.String(length=64), nullable=True), + sa.Column("password", sa.String(length=128), nullable=False), + sa.Column("last_login", sa.Date(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("created_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.Column("updated_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("email"), + ) + else: + if not column_exists("admin_users", "created_at"): + op.add_column( + "admin_users", + sa.Column("created_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + ) + op.add_column( + "admin_users", + sa.Column("updated_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + ) + op.alter_column("admin_users", "last_logon", new_column_name="last_login") + + if not inspector.has_table("categories"): + op.create_table( + "categories", + sa.Column("name", sa.String(length=100), nullable=False), + sa.Column("parent_id", sa.Integer(), nullable=True), + sa.Column("is_archived", sa.Boolean(), server_default=sa.text("false"), nullable=False), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("created_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.Column("updated_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.ForeignKeyConstraint( + ["parent_id"], + ["categories.id"], + ), + sa.PrimaryKeyConstraint("id"), + ) + else: + if not column_exists("categories", "created_at"): + op.add_column( + "categories", + sa.Column("created_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + ) + op.add_column( + "categories", + sa.Column("updated_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + ) + op.alter_column("categories", "archive", new_column_name="is_archived") + + if not inspector.has_table("external_site_users"): + op.create_table( + "external_site_users", + sa.Column("id_hash", sa.String(length=256), nullable=False), + sa.Column("email", sa.String(length=48), nullable=False), + sa.Column("first_name", sa.String(length=64), nullable=True), + sa.Column("last_name", sa.String(length=64), nullable=True), + sa.Column("specializations", sa.ARRAY(sa.Integer()), nullable=True), + sa.Column("source", sa.String(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("created_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.Column("updated_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("email"), + ) + else: + if column_exists("external_site_users", "external_id"): + op.alter_column("external_site_users", "external_id", new_column_name="id") + op.alter_column("external_site_users", "external_id_hash", new_column_name="id_hash") + op.alter_column("external_site_users", "created_date", new_column_name="created_at") + op.alter_column("external_site_users", "updated_date", new_column_name="updated_at") + + if not inspector.has_table("notifications"): + op.create_table( + "notifications", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("message", sa.String(length=4096), nullable=False), + sa.Column("was_sent", sa.Boolean()), + sa.Column("sent_date", sa.TIMESTAMP()), + sa.Column("sent_by", sa.String(length=48)), + sa.PrimaryKeyConstraint("id"), + ) + else: + if not column_exists("notifications", "created_at"): + op.add_column( + "notifications", + sa.Column("created_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + ) + op.add_column( + "notifications", + sa.Column("updated_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + ) + + if not inspector.has_table("reasons_canceling") and not inspector.has_table("unsubscribe_reason"): + op.create_table( + "unsubscribe_reason", + sa.Column("id", sa.Integer(), unique=True, nullable=False), + sa.Column("telegram_id", sa.BigInteger(), nullable=True), + sa.Column("unsubscribe_reason", sa.String(length=48), nullable=False), + sa.Column("created_at", sa.TIMESTAMP(), nullable=False, server_default=sa.func.current_timestamp()), + sa.Column("updated_at", sa.TIMESTAMP(), nullable=False, server_default=sa.func.current_timestamp()), + sa.Column("archive", sa.Boolean(), nullable=True, server_default=sa.sql.expression.false()), + sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id")), + sa.PrimaryKeyConstraint("id"), + ) + else: + if inspector.has_table("reasons_canceling"): + op.rename_table("reasons_canceling", "unsubscribe_reason") + op.alter_column("unsubscribe_reason", "reason_canceling", new_column_name="unsubscribe_reason") + op.alter_column("unsubscribe_reason", "added_date", new_column_name="created_at") + op.alter_column("unsubscribe_reason", "updated_date", new_column_name="updated_at") + op.add_column( + "unsubscribe_reason", sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id"), nullable=True) + ) + + if not inspector.has_table("tasks"): + op.create_table( + "tasks", + sa.Column("id", sa.Integer(), primary_key=True), + sa.Column("title", sa.String(), nullable=True), + sa.Column("name_organization", sa.String(), nullable=True), + sa.Column("deadline", sa.Date(), nullable=True), + sa.Column("category_id", sa.Integer(), sa.ForeignKey("categories.id")), + sa.Column("bonus", sa.Integer()), + sa.Column("location", sa.String()), + sa.Column("link", sa.String()), + sa.Column("description", sa.String()), + sa.Column("is_archived", sa.Boolean(), nullable=True, server_default=sa.sql.expression.false()), + sa.Column("created_at", sa.TIMESTAMP(), nullable=False, server_default=sa.func.current_timestamp()), + sa.Column("updated_at", sa.TIMESTAMP(), nullable=False, server_default=sa.func.current_timestamp()), + ) + else: + if column_exists("tasks", "created_date"): + op.alter_column("tasks", "created_date", new_column_name="created_at") + op.alter_column("tasks", "updated_date", new_column_name="updated_at") + op.alter_column("tasks", "archive", new_column_name="is_archived") + + if not inspector.has_table("users_categories"): + op.create_table( + "users_categories", + sa.Column("category_id", sa.Integer(), sa.ForeignKey("categories.id"), primary_key=True), + sa.Column("user_id", sa.Integer(), sa.ForeignKey("users.id"), primary_key=True), + sa.Column("telegram_id", sa.BigInteger()), + sa.Column("created_at", sa.TIMESTAMP(), nullable=False, server_default=sa.func.current_timestamp()), + sa.Column("updated_at", sa.TIMESTAMP(), nullable=False, server_default=sa.func.current_timestamp()), + ) + else: + if not column_exists("users_categories", "user_id"): + op.add_column("users_categories", sa.Column("user_id", sa.Integer())) + op.execute( + "UPDATE users_categories SET user_id = users.id FROM users WHERE users_categories.telegram_id = users.telegram_id" + ) + op.create_foreign_key("users_categories_user_id_fkey", "users_categories", "users", ["user_id"], ["id"]) + op.drop_constraint("users_categories_telegram_id_fkey", "users_categories", type_="foreignkey") + op.drop_constraint("users_pkey", "users", type_="primary") + op.create_primary_key("users_pkey", "users", ["id"]) + op.add_column( + "users_categories", + sa.Column("created_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + ) + op.add_column( + "users_categories", + sa.Column("updated_at", sa.Date(), server_default=sa.text("CURRENT_TIMESTAMP"), nullable=False), + ) + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("users_categories") + op.drop_table("unsubscribe_reason") + op.drop_table("tasks") + op.drop_table("users") + op.drop_table("external_site_users") + op.drop_table("categories") + op.drop_table("admin_users") + op.drop_table("admin_token_requests") + # ### end Alembic commands ### diff --git a/src/core/db/migrations/versions/2023-11-24_18.43.29_rename_password_in_hashed_pasword.py b/src/core/db/migrations/versions/2023-11-24_18.43.29_rename_password_in_hashed_pasword.py new file mode 100644 index 00000000..d760493e --- /dev/null +++ b/src/core/db/migrations/versions/2023-11-24_18.43.29_rename_password_in_hashed_pasword.py @@ -0,0 +1,37 @@ +"""rename password in hashed_pasword + +Revision ID: efd08301014a +Revises: 6c4739908b74 +Create Date: 2023-11-24 18:43:29.141884 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "efd08301014a" +down_revision = "6c4739908b74" +branch_labels = None +depends_on = None + + +def column_exists(table_name, column_name): + bind = op.get_context().bind + insp = sa.inspect(bind) + columns = insp.get_columns(table_name) + return any(c["name"] == column_name for c in columns) + + +def upgrade() -> None: + if column_exists("admin_users", "password"): + op.alter_column("admin_users", "password", new_column_name="hashed_password") + + if not column_exists("admin_users", "is_superuser"): + op.add_column("admin_users", sa.Column("is_superuser", sa.Boolean(), server_default="false", nullable=False)) + op.add_column("admin_users", sa.Column("is_verified", sa.Boolean(), server_default="false", nullable=False)) + op.add_column("admin_users", sa.Column("is_active", sa.Boolean(), server_default="false", nullable=False)) + + +def downgrade() -> None: + if column_exists("admin_users", "hashed_password"): + op.alter_column("admin_users", "hashed_password", new_column_name="password") diff --git a/src/core/db/models.py b/src/core/db/models.py new file mode 100644 index 00000000..add29544 --- /dev/null +++ b/src/core/db/models.py @@ -0,0 +1,171 @@ +from datetime import date + +from passlib.context import CryptContext +from sqlalchemy import ARRAY, BigInteger, ForeignKey, Integer, String +from sqlalchemy.ext.declarative import AbstractConcreteBase +from sqlalchemy.orm import DeclarativeBase, Mapped, backref, mapped_column, relationship +from sqlalchemy.sql import expression, func + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + +class Base(DeclarativeBase): + """Основа для базового класса.""" + + id: Mapped[int] = mapped_column(primary_key=True) + created_at: Mapped[date] = mapped_column(server_default=func.current_timestamp()) + updated_at: Mapped[date] = mapped_column( + server_default=func.current_timestamp(), + onupdate=func.current_timestamp(), + ) + __name__: Mapped[str] + + +class ContentBase(AbstractConcreteBase, Base): + """Базовый класс для контента (категорий и задач).""" + + is_archived: Mapped[bool] = mapped_column(server_default=expression.false()) + + +class UsersCategories(Base): + """Модель отношений пользователь-категория.""" + + __tablename__ = "users_categories" + + id = None + category_id: Mapped[int] = mapped_column(ForeignKey("categories.id"), primary_key=True) + user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), primary_key=True) + + def __repr__(self): + return f"" + + +class User(Base): + """Модель пользователя.""" + + __tablename__ = "users" + telegram_id: Mapped[int] = mapped_column(BigInteger, unique=True) + username: Mapped[str] = mapped_column(String(32), unique=True, nullable=True) + email: Mapped[str] = mapped_column(String(48), unique=True, nullable=True) + external_id: Mapped[int] = mapped_column(unique=True, nullable=True) + first_name: Mapped[str] = mapped_column(String(64), nullable=True) + last_name: Mapped[str] = mapped_column(String(64), nullable=True) + has_mailing: Mapped[bool] = mapped_column(default=False) + external_signup_date: Mapped[date] = mapped_column(nullable=True) + banned: Mapped[bool] = mapped_column(server_default=expression.false()) + + categories: Mapped[list["Category"]] = relationship( + "Category", secondary="users_categories", back_populates="users" + ) + unsubscribe_reason: Mapped["UnsubscribeReason"] = relationship(back_populates="user") + + def __repr__(self): + return f"" + + +class ExternalSiteUser(Base): + """Модель пользователя с сайта ProCharity.""" + + __tablename__ = "external_site_users" + + id_hash: Mapped[str] = mapped_column(String(256)) + email: Mapped[str] = mapped_column(String(48), unique=True) + first_name: Mapped[str] = mapped_column(String(64), nullable=True) + last_name: Mapped[str] = mapped_column(String(64), nullable=True) + specializations: Mapped[list[int]] = mapped_column(ARRAY(Integer), nullable=True) + source: Mapped[str] = mapped_column(nullable=True) + + def __repr__(self): + return f"" + + +class Task(ContentBase): + """Модель задач.""" + + __tablename__ = "tasks" + title: Mapped[str] + name_organization: Mapped[str] = mapped_column(nullable=True) + deadline: Mapped[date] = mapped_column(nullable=True) + + category_id: Mapped[int] = mapped_column(ForeignKey("categories.id")) + category: Mapped["Category"] = relationship(back_populates="tasks") + + bonus: Mapped[int] + location: Mapped[str] + link: Mapped[str] + description: Mapped[str] + + def __repr__(self): + return f"" + + +class Category(ContentBase): + """Модель категорий.""" + + __tablename__ = "categories" + name: Mapped[str] = mapped_column(String(100)) + + users: Mapped[list["User"]] = relationship("User", secondary="users_categories", back_populates="categories") + + tasks: Mapped[list["Task"]] = relationship(back_populates="category") + + parent_id: Mapped[int] = mapped_column(ForeignKey("categories.id"), nullable=True) + children: Mapped["Category"] = relationship("Category", backref=backref("parent", remote_side="Category.id")) + + def __repr__(self): + return f"" + + +class AdminUser(Base): + __tablename__ = "admin_users" + + email: Mapped[str] = mapped_column(String(48), unique=True) + first_name: Mapped[str] = mapped_column(String(64), nullable=True) + last_name: Mapped[str] = mapped_column(String(64), nullable=True) + password: Mapped[str] = mapped_column(String(128)) + last_login: Mapped[date] = mapped_column(nullable=True) + + def __repr__(self): + return f"" + + def set_password(self, password): + self.password = pwd_context.hash(password) + + def check_password(self, password): + return pwd_context.verify(password, self.password) + + +class AdminTokenRequest(Base): + __tablename__ = "admin_token_requests" + + email: Mapped[str] = mapped_column(String(48), unique=True) + token: Mapped[str] = mapped_column(String(128)) + token_expiration_date: Mapped[date] + + def __repr__(self): + return f"" + + +class UnsubscribeReason(Base): + __tablename__ = "unsubscribe_reason" + + user_id: Mapped[int] = mapped_column(Integer(), ForeignKey("users.id")) + user: Mapped["User"] = relationship("User", back_populates="unsubscribe_reason") + unsubscribe_reason: Mapped[str] = mapped_column(String(128), nullable=True) + + def __repr__(self): + return f"" + + +class Notification(Base): + """Модель уведомления.""" + + __tablename__ = "notifications" + + message: Mapped[str] = mapped_column(String(length=4096), nullable=False) + was_sent: Mapped[bool] = mapped_column(server_default=expression.false()) + sent_date: Mapped[date] + sent_by: Mapped[str] = mapped_column(String(length=48)) + + def __repr__(self): + return f"" diff --git a/src/core/db/repository/__init__.py b/src/core/db/repository/__init__.py new file mode 100644 index 00000000..a1a85d38 --- /dev/null +++ b/src/core/db/repository/__init__.py @@ -0,0 +1,18 @@ +from .admin_repository import AdminUserRepository +from .base import AbstractRepository, ContentRepository +from .category import CategoryRepository +from .external_site_user import ExternalSiteUserRepository +from .task import TaskRepository +from .unsubscribe_reason import UnsubscribeReasonRepository +from .user import UserRepository + +__all__ = ( + "AbstractRepository", + "ContentRepository", + "CategoryRepository", + "TaskRepository", + "UserRepository", + "ExternalSiteUserRepository", + "UnsubscribeReasonRepository", + "AdminUserRepository", +) diff --git a/src/core/db/repository/admin_repository.py b/src/core/db/repository/admin_repository.py new file mode 100644 index 00000000..0cf6c4b0 --- /dev/null +++ b/src/core/db/repository/admin_repository.py @@ -0,0 +1,16 @@ +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from src.core.db.models import AdminUser +from src.core.db.repository.base import AbstractRepository + + +class AdminUserRepository(AbstractRepository): + """Репозиторий для работы с моделью AdminUser.""" + + def __init__(self, session: AsyncSession) -> None: + super().__init__(session, AdminUser) + + async def get_by_email(self, email: str) -> AdminUser | None: + """Возвращает пользователя (или None) по email.""" + return await self._session.scalar(select(AdminUser).where(AdminUser.email == email)) diff --git a/src/core/db/repository/base.py b/src/core/db/repository/base.py new file mode 100644 index 00000000..d1f87331 --- /dev/null +++ b/src/core/db/repository/base.py @@ -0,0 +1,120 @@ +import abc +from typing import Sequence, TypeVar + +from sqlalchemy import func, select, update +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession + +from src.api.constants import DATE_FORMAT_FOR_STATISTICS +from src.core.exceptions import AlreadyExistsException, NotFoundException +from src.core.utils import auto_commit + +DatabaseModel = TypeVar("DatabaseModel") +DATE_TIME_FORMAT_LAST_UPDATE = "YYYY-MM-DD HH24:MI:SS" + + +class AbstractRepository(abc.ABC): + """Абстрактный класс, для реализации паттерна Repository.""" + + def __init__(self, session: AsyncSession, model: DatabaseModel) -> None: + self._session = session + self._model = model + + async def get_or_none(self, _id: int) -> DatabaseModel | None: + """Получает из базы объект модели по ID. В случае отсутствия возвращает None.""" + return await self._session.scalar(select(self._model).where(self._model.id == _id)) + + async def get(self, _id: int) -> DatabaseModel: + """Получает объект модели по ID. В случае отсутствия объекта бросает ошибку.""" + db_obj = await self.get_or_none(_id) + if db_obj is None: + raise NotFoundException(object_name=self._model.__name__, object_id=_id) + return db_obj + + async def create(self, instance: DatabaseModel) -> DatabaseModel: + """Создает новый объект модели и сохраняет в базе.""" + self._session.add(instance) + try: + await self._session.commit() + except IntegrityError as exc: + raise AlreadyExistsException(instance) from exc + + await self._session.refresh(instance) + return instance + + async def remove(self, instance: DatabaseModel) -> None: + """Удаляет объект модели из базы данных.""" + await self._session.delete(instance) + await self._session.commit() + + @auto_commit + async def update(self, _id: int, instance: DatabaseModel) -> DatabaseModel: + """Обновляет существующий объект модели в базе.""" + instance.id = _id + instance = await self._session.merge(instance) + return instance # noqa: R504 + + @auto_commit + async def update_all(self, instances: list[dict]) -> Sequence[DatabaseModel]: + """Обновляет несколько измененных объектов модели в базе.""" + await self._session.execute(update(self._model), instances) + return instances + + async def get_all(self) -> Sequence[DatabaseModel]: + """Возвращает все объекты модели из базы данных.""" + objects = await self._session.scalars(select(self._model)) + return objects.all() + + @auto_commit + async def create_all(self, objects: Sequence[DatabaseModel]) -> None: + """Создает несколько объектов модели в базе данных.""" + self._session.add_all(objects) + + async def count_all(self) -> int: + """Возвращает количество объектов модели в базе данных.""" + return await self._session.scalar(select(func.count()).select_from(self._model)) + + async def count_active_all(self) -> int: + """Возвращает количество неархивных (активных) объектов модели в базе данных.""" + return await self._session.scalar( + select(func.count()).select_from(self._model).where(self._model.is_archived == False) # noqa + ) + + async def get_last_update(self) -> str | None: + """Получает из базы отсортированный по времени обновления объект модели. + В случае отсутствия возвращает None.""" + return await self._session.scalar( + select(func.to_char(self._model.updated_at, DATE_TIME_FORMAT_LAST_UPDATE)).order_by( + self._model.updated_at.desc() + ) + ) + + async def get_statistics_by_days(self, date_begin, date_limit, column_name) -> dict[str, int]: + """Получает из базы отсортированный и отфильтрованный сводный набор записей модели + по полю column_name. + """ + column = self._model.__dict__[column_name] + db_data = await self._session.execute( + select(func.to_char(column, DATE_FORMAT_FOR_STATISTICS), func.count(column)) + .where(column >= date_begin, column <= date_limit) + .group_by(column) + .order_by(column) + ) + return dict(db_data.fetchall()) + + +class ContentRepository(AbstractRepository, abc.ABC): + @auto_commit + async def archive_by_ids(self, ids: Sequence[int]) -> None: + """Изменяет is_archived с False на True у не указанных ids.""" + await self._session.execute( + update(self._model) + .where(self._model.is_archived == False) # noqa + .where(self._model.id.not_in(ids)) + .values({"is_archived": True}) + ) + + async def get_by_ids(self, ids: list[int]) -> Sequence[int]: + """Возвращает id объектов модели из базы данных по указанным ids""" + filtered_ids = await self._session.scalars(select(self._model.id).where(self._model.id.in_(ids))) + return filtered_ids.all() diff --git a/src/core/db/repository/category.py b/src/core/db/repository/category.py new file mode 100644 index 00000000..814bbd82 --- /dev/null +++ b/src/core/db/repository/category.py @@ -0,0 +1,35 @@ +from typing import Sequence + +from sqlalchemy import false, func, null, select +from sqlalchemy.ext.asyncio import AsyncSession + +from src.core.db.models import Category +from src.core.db.repository.base import ContentRepository + + +class CategoryRepository(ContentRepository): + """Репозиторий для работы с моделью Category.""" + + def __init__(self, session: AsyncSession) -> None: + super().__init__(session, Category) + + async def get_unarchived_subcategories(self, parent_id: int) -> Sequence[Category]: + unarchived_subcategories = await self._session.scalars( + select(Category).where(Category.is_archived == false()).where(Category.parent_id == parent_id) + ) + return unarchived_subcategories.all() + + async def get_unarchived_parents_with_children_count(self): + parent_and_children_count_subquery = ( + select(Category.parent_id, func.count(Category.id).label("children_count")) + .where(Category.is_archived == false()) + .where(Category.parent_id != null()) + .group_by(Category.parent_id) + .subquery() + ) + parents_with_children_count = await self._session.execute( + select(Category.name, Category.id, parent_and_children_count_subquery.c.children_count) + .select_from(Category) + .join(parent_and_children_count_subquery, Category.id == parent_and_children_count_subquery.c.parent_id) + ) + return parents_with_children_count.all() diff --git a/src/core/db/repository/external_site_user.py b/src/core/db/repository/external_site_user.py new file mode 100644 index 00000000..4bedb3e4 --- /dev/null +++ b/src/core/db/repository/external_site_user.py @@ -0,0 +1,16 @@ +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from src.core.db.models import ExternalSiteUser +from src.core.db.repository.base import AbstractRepository + + +class ExternalSiteUserRepository(AbstractRepository): + """Репозиторий для работы с моделью ExternalSiteUser.""" + + def __init__(self, session: AsyncSession) -> None: + super().__init__(session, ExternalSiteUser) + + async def get_by_id_hash(self, id_hash: str) -> ExternalSiteUser | None: + """Возвращает пользователя (или None) по id_hash.""" + return await self._session.scalar(select(ExternalSiteUser).where(ExternalSiteUser.id_hash == id_hash)) diff --git a/src/core/db/repository/task.py b/src/core/db/repository/task.py new file mode 100644 index 00000000..37c3357d --- /dev/null +++ b/src/core/db/repository/task.py @@ -0,0 +1,67 @@ +from collections.abc import Sequence + +from sqlalchemy import func, select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import joinedload +from sqlalchemy.sql.expression import false + +from src.core.db.models import Category, Task, User +from src.core.db.repository.base import ContentRepository + + +class TaskRepository(ContentRepository): + """Репозиторий для работы с моделью Task.""" + + def __init__(self, session: AsyncSession) -> None: + super().__init__(session, Task) + + async def get_tasks_for_user(self, user_id: int, limit: int = 3, offset: int = 0) -> Sequence[Task]: + """Получить список задач из категорий на которые подписан пользователь.""" + tasks = await self._session.scalars( + select(Task) + .join(Category) + .where(Category.users.any(id=user_id)) + .where(Task.is_archived == false()) + .limit(limit) + .offset(offset) + ) + return tasks.all() + + async def get_all_user_tasks(self) -> Sequence[Task]: + """Получить список задач из категорий на которые подписан пользователь.""" + all_tasks = await self._session.scalars(select(Task).options(joinedload(Task.category))) + return all_tasks.all() + + async def get_tasks_limit_for_user(self, limit: int, offset: int, user: User) -> Sequence[Task]: + """Получить limit-выборку из списка всех задач пользователя.""" + task_limit_for_user = await self._session.scalars( + ( + select(Task) + .join(Category) + .options(joinedload(Task.category)) + .where(Category.users.any(id=user.id)) + .where(Task.is_archived == false()) + .limit(limit) + .offset(offset) + ) + ) + + return task_limit_for_user.all() + + async def get_user_tasks_count(self, user: User) -> int: + """Получить общее количество задач для пользователя.""" + return await self._session.scalar( + select(func.count(Task.id)) + .join(Category) + .where(Category.users.any(id=user.id)) + .where(Task.is_archived == false()) + ) + + async def get_user_task_id(self, task_id) -> Sequence[Task]: + """Получить задачу по id из категорий на которые подписан пользователь.""" + return await self._session.scalar(select(Task).options(joinedload(Task.category)).where(Task.id == task_id)) + + async def get_user_tasks_ids(self, ids: list[int]) -> Sequence[Task]: + """Получить список задач по ids из категорий на которые подписан пользователь.""" + tasks = await self._session.scalars(select(Task).options(joinedload(Task.category)).where(Task.id.in_(ids))) + return tasks.all() diff --git a/src/core/db/repository/unsubscribe_reason.py b/src/core/db/repository/unsubscribe_reason.py new file mode 100644 index 00000000..c42e1d22 --- /dev/null +++ b/src/core/db/repository/unsubscribe_reason.py @@ -0,0 +1,24 @@ +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.sql import func + +from src.core.db.models import UnsubscribeReason +from src.core.db.repository.base import AbstractRepository + + +class UnsubscribeReasonRepository(AbstractRepository): + """Репозиторий для работы с моделью UnsubscribeReason.""" + + def __init__(self, session: AsyncSession) -> None: + super().__init__(session, UnsubscribeReason) + + async def get_by_user(self, user): + return await self._session.scalar(select(UnsubscribeReason).where(UnsubscribeReason.user == user)) + + async def get_reason_cancelling_statistics(self) -> list[tuple[str, int]]: + query = select( + UnsubscribeReason.unsubscribe_reason.label("reason"), + func.count(UnsubscribeReason.unsubscribe_reason).label("count"), + ).group_by(UnsubscribeReason.unsubscribe_reason) + reasons = await self._session.execute(query) + return reasons.all() diff --git a/src/core/db/repository/user.py b/src/core/db/repository/user.py new file mode 100644 index 00000000..e6d240f0 --- /dev/null +++ b/src/core/db/repository/user.py @@ -0,0 +1,69 @@ +from collections.abc import Sequence + +from sqlalchemy import delete, select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from src.core.db.models import Category, User, UsersCategories +from src.core.db.repository.base import AbstractRepository +from src.core.utils import auto_commit + + +class UserRepository(AbstractRepository): + """Репозиторий для работы с моделью User.""" + + def __init__(self, session: AsyncSession) -> None: + super().__init__(session, User) + + async def get_by_telegram_id(self, telegram_id: int) -> User | None: + """Возвращает пользователя (или None) по telegram_id.""" + return await self._session.scalar(select(User).where(User.telegram_id == telegram_id)) + + async def restore_existing_user(self, user: User, username: str, first_name: str, last_name: str) -> User: + """Обновляет данные пользователя, который уже был в базе. + + Если ранее существовавший юзер делает /start в боте, то проверяются/обновляются его username, first_name, + last_name и сбрасывается флаг "banned" - признак, что бот у него был заблокирован. + """ + if user.username != username or user.first_name != first_name or user.last_name != last_name or user.banned: + user.username, user.first_name, user.last_name, user.banned = username, first_name, last_name, False + await self.update(user.id, user) + return user + + async def set_categories_to_user(self, telegram_id: int, categories_ids: list[int]) -> None: + """Присваивает пользователю список категорий.""" + user = await self._session.scalar( + select(User).options(selectinload(User.categories)).where(User.telegram_id == telegram_id) + ) + + categories = ( + (await self._session.scalars(select(Category).where(Category.id.in_(categories_ids)))).all() + if categories_ids + else [] + ) + + user.categories = categories + if user: + await self.update(user.id, user) + + @auto_commit + async def delete_category_from_user(self, user: User, category_id: int) -> None: + """Удаляет категорию у пользователя.""" + await self._session.execute( + delete(UsersCategories) + .where(UsersCategories.user_id == user.id) + .where(UsersCategories.category_id == category_id) + ) + + async def get_user_categories(self, user: User) -> Sequence[Category]: + """Возвращает список категорий пользователя.""" + user_categories = await self._session.scalars(select(Category).join(User.categories).where(User.id == user.id)) + return user_categories.all() + + async def set_mailing(self, user: User, has_mailing: bool) -> None: + """ + Присваивает пользователю статус получения + почтовой рассылки на задания. + """ + user.has_mailing = has_mailing + await self.update(user.id, user) diff --git a/src/core/depends/__init__.py b/src/core/depends/__init__.py new file mode 100644 index 00000000..86c14603 --- /dev/null +++ b/src/core/depends/__init__.py @@ -0,0 +1,17 @@ +from .api_services import APIServicesContainer +from .applications import ApplicationsContainer +from .bot_services import BotServicesContainer +from .container import Container +from .data_base_connection import DataBaseConnectionContainer +from .jwt_services import JWTServicesContainer +from .repositories import RepositoriesContainer + +__all__ = ( + "APIServicesContainer", + "ApplicationsContainer", + "BotServicesContainer", + "Container", + "DataBaseConnectionContainer", + "JWTServicesContainer", + "RepositoriesContainer", +) diff --git a/src/core/depends/api_services.py b/src/core/depends/api_services.py new file mode 100644 index 00000000..c6eec9eb --- /dev/null +++ b/src/core/depends/api_services.py @@ -0,0 +1,53 @@ +from dependency_injector import containers, providers + +from src.api.services import ( + AdminService, + AnalyticsService, + CategoryService, + ExternalSiteUserService, + HealthCheckService, + TaskService, + TelegramNotificationService, +) + + +class APIServicesContainer(containers.DeclarativeContainer): + """Контейнер зависимостей API Service.""" + + repositories = providers.DependenciesContainer() + data_base_connection = providers.DependenciesContainer() + applications = providers.DependenciesContainer() + admin_service = providers.Factory( + AdminService, + admin_repository=repositories.admin_repository, + ) + site_user_service = providers.Factory( + ExternalSiteUserService, + site_user_repository=repositories.site_user_repository, + session=data_base_connection.session, + ) + category_service = providers.Factory( + CategoryService, + category_repository=repositories.category_repository, + session=data_base_connection.session, + ) + task_service = providers.Factory( + TaskService, + task_repository=repositories.task_repository, + session=data_base_connection.session, + ) + message_service = providers.Factory( + TelegramNotificationService, + telegram_bot=applications.telegram_bot, + session=data_base_connection.session, + ) + analytic_service = providers.Factory( + AnalyticsService, + user_repository=repositories.user_repository, + unsubscribe_reason_repository=repositories.unsubscribe_reason_repository, + ) + health_check_service = providers.Factory( + HealthCheckService, + task_repository=repositories.task_repository, + telegram_bot=applications.telegram_bot, + ) diff --git a/src/core/depends/applications.py b/src/core/depends/applications.py new file mode 100644 index 00000000..7a672403 --- /dev/null +++ b/src/core/depends/applications.py @@ -0,0 +1,25 @@ +from dependency_injector import containers, providers +from fastapi import FastAPI + +from src.api.main import init_fastapi +from src.bot import create_bot +from src.bot.main import init_bot +from src.settings import Settings + + +class ApplicationsContainer(containers.DeclarativeContainer): + """Контейнер зависимостей Applications.""" + + settings = providers.Dependency(instance_of=Settings) + telegram_bot = providers.Singleton( + init_bot, + telegram_bot=providers.Singleton( + create_bot, + bot_token=settings.provided.BOT_TOKEN, + ), + ) + fastapi_app = providers.Singleton( + init_fastapi, + fastapi_app=providers.Singleton(FastAPI, debug=settings.provided.DEBUG), + settings=settings, + ) diff --git a/src/core/depends/bot_services.py b/src/core/depends/bot_services.py new file mode 100644 index 00000000..fe2a4761 --- /dev/null +++ b/src/core/depends/bot_services.py @@ -0,0 +1,35 @@ +from dependency_injector import containers, providers + +from src.bot.services import UnsubscribeReasonService +from src.bot.services.category import CategoryService as BotCategoryService +from src.bot.services.external_site_user import ExternalSiteUserService as BotExternalSiteUserService +from src.bot.services.task import TaskService as BotTaskService +from src.bot.services.user import UserService as BotUserService + + +class BotServicesContainer(containers.DeclarativeContainer): + """Контейнер зависимостей Bot services.""" + + repositories = providers.DependenciesContainer() + bot_category_service = providers.Factory( + BotCategoryService, + category_repository=repositories.category_repository, + ) + bot_user_service = providers.Factory( + BotUserService, + user_repository=repositories.user_repository, + ) + bot_task_service = providers.Factory( + BotTaskService, + task_repository=repositories.task_repository, + user_repository=repositories.user_repository, + ) + bot_site_user_service = providers.Factory( + BotExternalSiteUserService, + site_user_repository=repositories.site_user_repository, + ) + unsubscribe_reason_service = providers.Factory( + UnsubscribeReasonService, + unsubscribe_reason_repository=repositories.unsubscribe_reason_repository, + user_repository=repositories.user_repository, + ) diff --git a/src/core/depends/container.py b/src/core/depends/container.py new file mode 100644 index 00000000..f497f285 --- /dev/null +++ b/src/core/depends/container.py @@ -0,0 +1,28 @@ +from dependency_injector import containers, providers + +from src.core.depends.api_services import APIServicesContainer +from src.core.depends.applications import ApplicationsContainer +from src.core.depends.bot_services import BotServicesContainer +from src.core.depends.data_base_connection import DataBaseConnectionContainer +from src.core.depends.jwt_services import JWTServicesContainer +from src.core.depends.repositories import RepositoriesContainer +from src.settings import get_settings + + +class Container(containers.DeclarativeContainer): + """Главный контейнер приложения.""" + + settings = providers.Singleton(get_settings) + database_connection_container = providers.Container(DataBaseConnectionContainer, settings=settings) + applications_container = providers.Container(ApplicationsContainer, settings=settings) + repositories_container = providers.Container( + RepositoriesContainer, data_base_connection=database_connection_container + ) + api_services_container = providers.Container( + APIServicesContainer, + repositories=repositories_container, + data_base_connection=database_connection_container, + applications=applications_container, + ) + bot_services_container = providers.Container(BotServicesContainer, repositories=repositories_container) + jwt_services_container = providers.Container(JWTServicesContainer, settings=settings) diff --git a/src/core/depends/data_base_connection.py b/src/core/depends/data_base_connection.py new file mode 100644 index 00000000..008a3a19 --- /dev/null +++ b/src/core/depends/data_base_connection.py @@ -0,0 +1,24 @@ +from dependency_injector import containers, providers +from sqlalchemy.ext.asyncio import async_sessionmaker, create_async_engine + +from src.core.db import get_session +from src.settings import Settings + + +class DataBaseConnectionContainer(containers.DeclarativeContainer): + """Контейнер зависимостей DataBase connection.""" + + settings = providers.Dependency(instance_of=Settings) + engine = providers.Singleton( + create_async_engine, + url=settings.provided.database_url, + ) + sessionmaker = providers.Singleton( + async_sessionmaker, + bind=engine, + expire_on_commit=False, + ) + session = providers.Resource( + get_session, + sessionmaker=sessionmaker, + ) diff --git a/src/core/depends/jwt_services.py b/src/core/depends/jwt_services.py new file mode 100644 index 00000000..61fbe979 --- /dev/null +++ b/src/core/depends/jwt_services.py @@ -0,0 +1,23 @@ +from datetime import timedelta + +from dependency_injector import containers, providers +from fastapi_jwt import JwtAccessBearerCookie, JwtRefreshBearer + +from src.settings import Settings + + +class JWTServicesContainer(containers.DeclarativeContainer): + """Контейнер зависимостей JWT services.""" + + settings = providers.Dependency(instance_of=Settings) + access_security = providers.Factory( + JwtAccessBearerCookie, + secret_key=settings.provided.SECRET_KEY, + auto_error=False, + access_expires_delta=timedelta(hours=1), + ) + refresh_security = providers.Factory( + JwtRefreshBearer, + secret_key=settings.provided.SECRET_KEY, + auto_error=True, + ) diff --git a/src/core/depends/repositories.py b/src/core/depends/repositories.py new file mode 100644 index 00000000..1925d55c --- /dev/null +++ b/src/core/depends/repositories.py @@ -0,0 +1,40 @@ +from dependency_injector import containers, providers + +from src.core.db.repository import ( + AdminUserRepository, + CategoryRepository, + ExternalSiteUserRepository, + TaskRepository, + UnsubscribeReasonRepository, + UserRepository, +) + + +class RepositoriesContainer(containers.DeclarativeContainer): + """Контейнер зависимостей Repositories.""" + + data_base_connection = providers.DependenciesContainer() + user_repository = providers.Factory( + UserRepository, + session=data_base_connection.session, + ) + site_user_repository = providers.Factory( + ExternalSiteUserRepository, + session=data_base_connection.session, + ) + category_repository = providers.Factory( + CategoryRepository, + session=data_base_connection.session, + ) + task_repository = providers.Factory( + TaskRepository, + session=data_base_connection.session, + ) + unsubscribe_reason_repository = providers.Factory( + UnsubscribeReasonRepository, + session=data_base_connection.session, + ) + admin_repository = providers.Factory( + AdminUserRepository, + session=data_base_connection.session, + ) diff --git a/src/core/enums.py b/src/core/enums.py new file mode 100644 index 00000000..da567266 --- /dev/null +++ b/src/core/enums.py @@ -0,0 +1,10 @@ +from enum import StrEnum + + +class TelegramNotificationUsersGroups(StrEnum): + """Класс с доступными категориями пользователелей, + которым будет отправлено сообщение""" + + ALL = "all" + SUBSCRIBED = "subscribed" + UNSUBSCRIBED = "unsubscribed" diff --git a/src/core/exceptions/__init__.py b/src/core/exceptions/__init__.py new file mode 100644 index 00000000..ff7ccedc --- /dev/null +++ b/src/core/exceptions/__init__.py @@ -0,0 +1,3 @@ +from .exceptions import AlreadyExistsException, InvalidToken, NotFoundException, TokenNotProvided + +__all__ = ("AlreadyExistsException", "NotFoundException", "InvalidToken", "TokenNotProvided") diff --git a/src/core/exceptions/exceptions.py b/src/core/exceptions/exceptions.py new file mode 100644 index 00000000..b524e0aa --- /dev/null +++ b/src/core/exceptions/exceptions.py @@ -0,0 +1,63 @@ +from http import HTTPStatus +from typing import Any + +from pydantic import EmailStr +from starlette.exceptions import HTTPException + +from src.core.db.models import Base as DatabaseModel + + +class ApplicationException(HTTPException): + status_code: int = None + detail: str = None + headers: dict[str, Any] = None + + def __init__(self): + super().__init__(status_code=self.status_code, detail=self.detail, headers=self.headers) + + +class NotFoundException(ApplicationException): + def __init__(self, object_name: str, object_id: int): + self.status_code = HTTPStatus.NOT_FOUND + self.detail = f"Объект {object_name} с id: {object_id} не найден" + + +class AlreadyExistsException(ApplicationException): + def __init__(self, obj: DatabaseModel): + self.status_code = HTTPStatus.BAD_REQUEST + self.detail = f"Объект {obj} уже существует" + + +class EmailSendError(ApplicationException): + status_code: HTTPStatus = HTTPStatus.BAD_REQUEST + + def __init__(self, recipients: EmailStr | list[EmailStr], exc: Exception): + self.detail = f"Возникла ошибка {exc} при отправке email на адрес {recipients}." + + +class UnauthorizedError(ApplicationException): + status_code: HTTPStatus = HTTPStatus.UNAUTHORIZED + detail = "У Вас нет прав для просмотра запрошенной страницы." + + +class WebhookOnError(ApplicationException): + status_code: HTTPStatus = HTTPStatus.NO_CONTENT + detail = "Telegram Webhook выключен." + + +class CredentialsException(ApplicationException): + status_code: HTTPStatus = HTTPStatus.UNAUTHORIZED + headers = {"WWW-Authenticate": "Bearer"} + + def __init__(self, detail: str): + self.detail = detail + + +class TokenNotProvided(ApplicationException): + status_code: HTTPStatus = HTTPStatus.UNAUTHORIZED + detail = "В заголовке запроса не содержится токен." + + +class InvalidToken(ApplicationException): + status_code: HTTPStatus = HTTPStatus.FORBIDDEN + detail = "Токен в заголовке запроса неверный." diff --git a/src/core/logging/__init__.py b/src/core/logging/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/core/logging/middleware.py b/src/core/logging/middleware.py new file mode 100644 index 00000000..79d86754 --- /dev/null +++ b/src/core/logging/middleware.py @@ -0,0 +1,47 @@ +import time + +import structlog +from asgi_correlation_id.context import correlation_id +from fastapi import Request, Response +from starlette.middleware.base import BaseHTTPMiddleware +from uvicorn.protocols.utils import get_path_with_query_string + +access_logger = structlog.stdlib.get_logger("api.access") + + +class LoggingMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next) -> Response: + """Настройка логирования для Uvicorn.""" + structlog.contextvars.clear_contextvars() + request_id = correlation_id.get() + structlog.contextvars.bind_contextvars(request_id=request_id) + + start_time = time.perf_counter_ns() + response = Response(status_code=500) + try: + response = await call_next(request) + except Exception as error: + structlog.stdlib.get_logger("api.error").exception("Непойманное исключение") + raise error + finally: + process_time = time.perf_counter_ns() - start_time + status_code = response.status_code + url = get_path_with_query_string(request.scope) + client_host = request.client.host + client_port = request.client.port + http_method = request.method + http_version = request.scope["http_version"] + access_logger.info( + f'{client_host}:{client_port} - "{http_method} {url} ' f'HTTP/{http_version}" {status_code}', + http={ + "url": str(request.url), + "status_code": status_code, + "method": http_method, + "request_id": request_id, + "version": http_version, + }, + network={"client": {"ip": client_host, "port": client_port}}, + duration=process_time, + ) + response.headers["X-Process-Time"] = str(process_time / 10**9) + return response diff --git a/src/core/logging/setup.py b/src/core/logging/setup.py new file mode 100644 index 00000000..437727b0 --- /dev/null +++ b/src/core/logging/setup.py @@ -0,0 +1,138 @@ +import logging +import logging.config +import os +import sys + +import structlog +from structlog.types import EventDict, Processor + +from src.settings import settings + +os.makedirs(settings.LOG_DIR, exist_ok=True) + + +def _drop_color_message_key(_, __, event_dict: EventDict) -> EventDict: + """ + Uvicorn логирует сообщение повторно в дополнительной секции + `color_message`, но нам это не нужно. Данная функция ("процессор") + убирает данный ключ из event dict. + """ + event_dict.pop("color_message", None) + return event_dict + + +TIMESTAMPER = structlog.processors.TimeStamper(fmt="iso") + +PRE_CHAIN: list[Processor] = [ + structlog.contextvars.merge_contextvars, + structlog.stdlib.add_logger_name, + structlog.stdlib.add_log_level, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.stdlib.ExtraAdder(), + _drop_color_message_key, + TIMESTAMPER, + structlog.processors.StackInfoRenderer(), +] + + +LOGGING_DICTCONFIG = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "plain": { + "()": structlog.stdlib.ProcessorFormatter, + "processors": [ + structlog.stdlib.ProcessorFormatter.remove_processors_meta, + structlog.dev.ConsoleRenderer( + colors=False, + exception_formatter=structlog.dev.plain_traceback, + ), + ], + "foreign_pre_chain": PRE_CHAIN, + }, + "colored": { + "()": structlog.stdlib.ProcessorFormatter, + "processors": [ + structlog.stdlib.ProcessorFormatter.remove_processors_meta, + structlog.dev.ConsoleRenderer(colors=True), + ], + "foreign_pre_chain": PRE_CHAIN, + }, + }, + "handlers": { + "default": { + "level": settings.LOG_LEVEL, + "class": "logging.StreamHandler", + "formatter": "colored", + }, + "file": { + "level": settings.LOG_LEVEL, + "class": "logging.handlers.RotatingFileHandler", + "filename": os.path.join(settings.LOG_DIR, settings.LOG_FILE), + "mode": "a", + "maxBytes": settings.LOG_FILE_SIZE, + "backupCount": settings.LOG_FILES_TO_KEEP, + "encoding": "UTF-8", + "formatter": "plain", + }, + }, + "loggers": { + "": { + "handlers": ["default", "file"], + "level": settings.LOG_LEVEL, + "propagate": True, + }, + }, +} + + +def _setup_structlog(): + """Настройки structlog.""" + + logging.config.dictConfig(LOGGING_DICTCONFIG) + + structlog.configure( + processors=PRE_CHAIN + + [ + structlog.stdlib.ProcessorFormatter.wrap_for_formatter, + ], + logger_factory=structlog.stdlib.LoggerFactory(), + wrapper_class=structlog.stdlib.BoundLogger, + cache_logger_on_first_use=True, + ) + + +def _setup_uvicorn_logging(): + """Настройки логирования uvicorn.""" + for _log in logging.root.manager.loggerDict.keys(): + logging.getLogger(_log).handlers.clear() + logging.getLogger(_log).propagate = True + + logging.getLogger("uvicorn.access").handlers.clear() + logging.getLogger("uvicorn.access").propagate = False + + +def setup_logging(): + """Основные настройки логирования.""" + _setup_structlog() + _setup_uvicorn_logging() + + root_logger = logging.getLogger() + + def handle_exception(exc_type, exc_value, exc_traceback): + """ + Логирует любое непойманное исключение вместо его вывода на печать + Python'ом (кроме KeyboardInterrupt, чтобы позволить Ctrl+C + для остановки). + См. https://stackoverflow.com/a/16993115/3641865 + """ + if issubclass(exc_type, KeyboardInterrupt): + sys.__excepthook__(exc_type, exc_value, exc_traceback) + return + + root_logger.error( + "Непойманное исключение", + exc_info=(exc_type, exc_value, exc_traceback), + ) + + sys.excepthook = handle_exception diff --git a/src/core/logging/utils.py b/src/core/logging/utils.py new file mode 100644 index 00000000..0fbde67e --- /dev/null +++ b/src/core/logging/utils.py @@ -0,0 +1,29 @@ +from collections.abc import Awaitable, Callable +from functools import wraps +from typing import ParamSpec, TypeVar + +import structlog + +ReturnType = TypeVar("ReturnType") +ParameterTypes = ParamSpec("ParameterTypes") + +log = structlog.get_logger() + + +async def logging_updates(*args, **kwargs): + await log.ainfo("Следующие Updates не были пойманы ни одним из обработчиков", args=args, kwargs=kwargs) + + +def logger_decor( + coroutine: Callable[ParameterTypes, Awaitable[ReturnType]] +) -> Callable[ParameterTypes, Awaitable[ReturnType]]: + @wraps(coroutine) + async def wrapper(*args: ParameterTypes.args, **kwargs: ParameterTypes.kwargs) -> ReturnType: + await log.ainfo( + f"Запущенна функция {coroutine.__name__}", + args=args, + kwargs=kwargs, + ) + return await coroutine(*args, **kwargs) + + return wrapper diff --git a/src/core/services/__init__.py b/src/core/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/core/services/email.py b/src/core/services/email.py new file mode 100644 index 00000000..998be648 --- /dev/null +++ b/src/core/services/email.py @@ -0,0 +1,85 @@ +import contextlib +from typing import Any, Generator + +from fastapi_mail import ConnectionConfig, FastMail, MessageSchema, MessageType +from pydantic import BaseModel, EmailStr +from sqlalchemy.ext.asyncio import AsyncSession + +from src.core.db.db import get_session +from src.core.db.repository.user import UserRepository +from src.core.exceptions import exceptions +from src.settings import settings + + +class EmailSchema(BaseModel): + recipients: EmailStr | list[EmailStr] + template_body: dict[str, Any] | None + + +class EmailProvider: + """Класс для отправки электронных писем.""" + + def __init__(self, sessionmaker: Generator[AsyncSession, None, None] = get_session): + conf = ConnectionConfig( + MAIL_USERNAME=settings.MAIL_LOGIN, + MAIL_PASSWORD=settings.MAIL_PASSWORD, + MAIL_FROM=settings.ORGANIZATIONS_EMAIL, + MAIL_PORT=settings.MAIL_PORT, + MAIL_SERVER=settings.MAIL_SERVER, + MAIL_FROM_NAME="ProCharity Bot", + MAIL_STARTTLS=settings.MAIL_STARTTLS, + MAIL_SSL_TLS=settings.MAIL_SSL_TLS, + USE_CREDENTIALS=settings.USE_CREDENTIALS, + VALIDATE_CERTS=settings.VALIDATE_CERTS, + ) + self.fastmail = FastMail(conf) + self._sessionmaker = contextlib.asynccontextmanager(sessionmaker) + + async def __send_mail( + self, + email_obj: EmailSchema, + subject: str, + template_name: str | None, + body: str | None, + ) -> None: + """Базовый метод отправки сообщения на электронную почту. + Аргументы: + recipients (list[EmailStr]): список email получателей + subject (str): тема сообщения + template_body (dict[str, Any]): значения переменных для шаблона сообщения + template_name (str): название шаблона для сообщения + body (str): тело электронного письма + """ + message = MessageSchema( + subject=subject, + recipients=email_obj.recipients, + template_body=email_obj.template_body, + body=body, + subtype=MessageType.html, + ) + + try: + await self.fastmail.send_message(message, template_name) + except Exception as exc: + raise exceptions.EmailSendError(email_obj.recipients, exc) + + async def send_question_feedback(self, telegram_id: int, message: str, email: EmailStr | list[EmailStr]) -> None: + """Отправляет email на почтовый ящик администратора с отзывом/вопросом.""" + if isinstance(email, str): + recipients = [email] + elif isinstance(email, list): + recipients = email + else: + raise ValueError("Invalid email format") + email_obj = EmailSchema(recipients=recipients, template_body=None) + async with self._sessionmaker() as session: + user_repository = UserRepository(session) + user = await user_repository.get_by_telegram_id(telegram_id) + await self.__send_mail( + email_obj, + subject=( + f"Сообщение от пользователя {user.first_name} ({user.email or 'пользователь не указал свой email'})" + ), + body=message, + template_name=None, + ) diff --git a/src/core/services/notification.py b/src/core/services/notification.py new file mode 100644 index 00000000..c47a80d1 --- /dev/null +++ b/src/core/services/notification.py @@ -0,0 +1,53 @@ +import asyncio + +import structlog +from telegram.constants import ParseMode +from telegram.error import BadRequest, Forbidden, TelegramError +from telegram.ext import Application + +from src.core.db.models import User + +log = structlog.get_logger(module=__name__) + + +class TelegramNotification: + def __init__(self, telegram_bot: Application): + self.__bot_application = telegram_bot + self.__bot = telegram_bot.bot + + async def __send_message(self, user_id: int, message: str) -> tuple[bool, str]: + try: + await self.__bot.send_message( + chat_id=user_id, text=message, parse_mode=ParseMode.HTML, disable_web_page_preview=True + ) + msg = f"Отправлено оповещение пользователю {user_id}" + await log.adebug(msg) + return True, msg + except TelegramError as exc: + msg = f"Ошибка отправки сообщения пользователю {user_id}." + match exc: + case BadRequest(): + msg += " Некорректный id." + case Forbidden(): + msg += " Бот заблокирован." + msg += " " + exc.message + await log.ainfo(msg) + return False, msg + + async def send_messages( + self, + message: str, + users: list[User], + ) -> list[tuple[bool, str]]: + """Делает массовую рассылку сообщения message пользователям users.""" + send_message_tasks = [self.__send_message(user.telegram_id, message) for user in users] + result = await asyncio.gather(*send_message_tasks) + return result + + async def send_message( + self, + message: str, + user_id: int, + ) -> tuple[bool, str]: + """Отправляет сообщение message конкретному пользователю user.""" + return await self.__send_message(user_id, message) diff --git a/src/core/utils.py b/src/core/utils.py new file mode 100644 index 00000000..a5dbba4c --- /dev/null +++ b/src/core/utils.py @@ -0,0 +1,51 @@ +import sys +from functools import wraps + +from src.core.db.models import Task +from src.settings import settings + +TASK_DEADLINE_FORMAT = "%d.%m.%y" + + +def display_tasks(task: Task, url: str) -> str: + deadline = task.deadline.strftime(TASK_DEADLINE_FORMAT) + bonus_link = f"{url}article/10053" + return ( + f"{task.title}\n\n" + f"От фонда: {task.name_organization}\n\n" + f"Бонусы: {task.bonus * '💎'}\n" + f"Категория: {task.category.name}\n" + f"Срок: {deadline}\n\n" + f"{'Посмотреть задание'}" + ) + + +def display_task_verbosely(task: Task, url: str) -> str: + deadline = task.deadline.strftime(TASK_DEADLINE_FORMAT) + bonus_link = f"{url}article/10053" + return ( + f"{task.title}\n\n" + f"От фонда: {task.name_organization}, {task.location}\n\n" + f"Бонусы: {task.bonus * '💎'}\n" + f"Категория: {task.category.name}\n" + f"Срок: {deadline}\n\n" + f"{task.description}" + ) + + +def auto_commit(func): + @wraps(func) + async def auto_commit_wraps(self, *args, commit=True): + result = await func(self, *args) + if commit: + await self._session.commit() + return result + + return auto_commit_wraps + + +def set_ngrok(): + from pyngrok import ngrok + + port = sys.argv[sys.argv.index("--port") + 1] if "--port" in sys.argv else 8000 + settings.APPLICATION_URL = ngrok.connect(port).public_url diff --git a/src/main.py b/src/main.py new file mode 100644 index 00000000..03f05698 --- /dev/null +++ b/src/main.py @@ -0,0 +1,9 @@ +from fastapi import FastAPI + +from src.core.depends import Container + + +def main(run_bot: bool = True) -> FastAPI: + container = Container() + container.wire(packages=(__package__,)) + return container.applications_container.fastapi_app(run_bot=run_bot) diff --git a/src/settings.py b/src/settings.py new file mode 100644 index 00000000..2784cbda --- /dev/null +++ b/src/settings.py @@ -0,0 +1,136 @@ +from functools import lru_cache +from pathlib import Path +from typing import Annotated +from urllib.parse import urljoin + +from pydantic import AnyHttpUrl, BeforeValidator, EmailStr, TypeAdapter, field_validator, validator +from pydantic_settings import BaseSettings + +BASE_DIR = Path(__file__).resolve().parent.parent + +Url = Annotated[str, BeforeValidator(lambda value: str(TypeAdapter(AnyHttpUrl).validate_python(value)))] + + +@lru_cache +def get_env_path() -> Path | None: + import importlib + + try: + importlib.import_module("dotenv") + except ImportError: + return + if Path.exists(BASE_DIR / ".env"): + return BASE_DIR / ".env" + + +class Settings(BaseSettings): + """Настройки проекта.""" + + APPLICATION_URL: str = "http://localhost:8000" + SECRET_KEY: str = "secret_key" + ROOT_PATH: str = "/api" + DEBUG: bool = False + USE_NGROK: bool = False + STATIC_DIR: str | Path = BASE_DIR / "templates/" + STATIC_URL: str = "static/" + + # Токен доступа к API + ACCESS_TOKEN_FOR_PROCHARITY: str = "" + + # Параметры подключения к БД + POSTGRES_DB: str + POSTGRES_USER: str + POSTGRES_PASSWORD: str + DB_HOST: str = "localhost" + DB_PORT: int = 5432 + + # Настройки бота + BOT_TOKEN: str + BOT_WEBHOOK_MODE: bool = False + + # Настройки jwt + ALGORITHM: str = "HS256" + ACCESS_TOKEN_EXPIRE_MINUTES: int = 30 + + # Настройки логирования + LOG_LEVEL: str = "INFO" + LOG_DIR: str | Path = BASE_DIR / "logs" + LOG_FILE: str = "app.log" + LOG_FILE_SIZE: int = 10 * 2**20 + LOG_FILES_TO_KEEP: int = 5 + + # Organization data + ORGANIZATIONS_EMAIL: EmailStr + + # Настройки отправки сообщений через электронную почту + MAIL_SERVER: str = "" + MAIL_PORT: int = 465 + MAIL_LOGIN: str = "" + MAIL_PASSWORD: str = "" + MAIL_STARTTLS: bool = False + MAIL_SSL_TLS: bool = True + USE_CREDENTIALS: bool = True + VALIDATE_CERTS: bool = True + + # Адреса электронной почты администраторов + EMAIL_ADMIN: EmailStr + + # Настройки получения коммитов + LAST_COMMIT: str = "" + COMMIT_DATE: str = "" + TAGS: list[str] = [] + + # URLs проекта Procharity + PROCHARITY_URL: Url = "https://procharity.ru" + YA_PRAKTIKUM_URL: Url = "https://praktikum.yandex.ru/" + HELP_PROCHARITY_URL: Url = "https://help.procharity.ru/" + + @field_validator("PROCHARITY_URL", "HELP_PROCHARITY_URL") + def check_last_slash_url(cls, v) -> str: + """Кастомный валидатор-добавлятор последнего слэша в константе URL.""" + + if v[-1] != "/": + return urljoin(v, "/") + return v + + @validator("APPLICATION_URL") + def check_domain_startswith_https_or_add_https(cls, v) -> str: + """Добавить 'https://' к домену.""" + if "https://" in v or "http://" in v: + return v + return urljoin("https://", f"//{v}") + + @property + def database_url(self) -> str: + """Получить ссылку для подключения к DB.""" + return ( + "postgresql+asyncpg://" + f"{self.POSTGRES_USER}:{self.POSTGRES_PASSWORD}" + f"@{self.DB_HOST}:{self.DB_PORT}/{self.POSTGRES_DB}" + ) + + @property + def api_url(self) -> str: + return urljoin(self.APPLICATION_URL, self.ROOT_PATH + "/") + + @property + def static_url(self) -> str: + return urljoin(self.APPLICATION_URL, settings.STATIC_URL) + + @property + def telegram_webhook_url(self) -> str: + """Получить url-ссылку на эндпоинт для работы telegram в режиме webhook.""" + return urljoin(self.api_url, "telegram/webhook") + + @property + def feedback_form_template_url(self) -> str: + """Получить url-ссылку на HTML шаблон формы обратной связи.""" + return urljoin(self.static_url, "feedback_form/feedback_form.html") + + +@lru_cache() +def get_settings(): + return Settings(_env_file=get_env_path()) # type: ignore + + +settings = get_settings() diff --git a/templates/email/base.html b/templates/email/base.html new file mode 100644 index 00000000..e69de29b diff --git a/templates/feedback_form/feedback_form.html b/templates/feedback_form/feedback_form.html new file mode 100644 index 00000000..825482b2 --- /dev/null +++ b/templates/feedback_form/feedback_form.html @@ -0,0 +1,74 @@ + + + + + + ProCharity Bot + + + + + +
+
+
+

+
+
+ + + +
+
+ + + +
+
+ + + +
+ + +
+
+ + + + diff --git a/templates/feedback_form/pages/feedback_form.css b/templates/feedback_form/pages/feedback_form.css new file mode 100644 index 00000000..62ff78da --- /dev/null +++ b/templates/feedback_form/pages/feedback_form.css @@ -0,0 +1,113 @@ +body { + background: var(--tg-theme-bg-color) !important; + -moz-osx-font-smoothing: grayscale; + -webkit-font-smoothing: antialiased; + -webkit-text-size-adjust: 100%; + -ms-text-size-adjust: 100%; + -moz-text-size-adjust: 100%; + text-rendering: optimizeLegibility; +} + +.form { + max-width: 450px; + padding: 20px; + margin: 0 auto; +} + +.input-field input[type='text']:focus + label { + color: var(--tg-theme-button-color) !important; +} + +.input-field input[type='text']:focus { + border-bottom: 1px solid var(--tg-theme-button-color) !important; + box-shadow: 0 1px 0 0 var(--tg-theme-button-color) !important; +} + +.input-field input[type='text'].invalid + label { + color: #e53935 !important; +} + +.input-field input[type='text'].invalid { + border-bottom: 1px solid #e53935 !important; + box-shadow: 0 1px 0 0 #e53935 !important; +} + +.helper-text { + color: #e53935 !important; +} + +.input-field input[type='text'].valid { + border-bottom: 1px solid #9e9e9e; + box-shadow: none; +} + +.input-field input[type='text'] { + color: var(--tg-theme-text-color) !important; +} + +.datepicker-modal { + top: 5% !important; +} + +.datepicker-date-display { + background-color: var(--tg-theme-button-color) !important; +} + +.datepicker-calendar-container { + background: var(--tg-theme-bg-color) !important; +} + +.datepicker-table td { + color: var(--tg-theme-text-color) !important; +} + +.datepicker-table td.is-disabled { + color: #9e9e9e !important; +} + +.datepicker-table td.is-selected { + background-color: var(--tg-theme-button-color) !important; + color: var(--tg-theme-button-text-color) !important; +} + +.datepicker-table td.is-today, +.datepicker-cancel, +.datepicker-done { + color: var(--tg-theme-button-color) !important; +} + +.datepicker-table td.is-today.is-selected { + color: var(--tg-theme-button-text-color) !important; +} + +.dropdown-content li > a, +.dropdown-content li > span { + color: var(--tg-theme-text-color) !important; +} + +.datepicker-controls button > svg { + fill: var(--tg-theme-text-color) !important; +} + +.datepicker-controls button:focus { + background-color: transparent; +} + +.browser-default { + outline: none !important; + background: var(--tg-theme-bg-color) !important; + border: none !important; + max-height: 50px !important; + color: var(--tg-theme-text-color) !important; + cursor: pointer; + box-shadow: none !important; +} + +.form-title { + text-align: center; + color: var(--tg-theme-text-color) !important; +} + +.hidden { + display: none; +} diff --git a/templates/feedback_form/scripts/feedback_form.js b/templates/feedback_form/scripts/feedback_form.js new file mode 100644 index 00000000..4aa72cb4 --- /dev/null +++ b/templates/feedback_form/scripts/feedback_form.js @@ -0,0 +1,268 @@ +// validation settings and check actions +const validationConfig = { + name: { + isCapitalize: true, + }, + surname: { + isCapitalize: true, + }, + email: { + isCapitalize: false, + }, + feedback: { + isCapitalize: false, + }, +}; + +const errMsg = { + name: { + required: 'Пожалуйста, укажите имя', + min: 'Введите не менее 2 символов', + max: 'Допускается ввод не более 100 символов', + capsPattern: 'Убедитесь, что y Bac выключен CAPS LOCK', + dashPattern: 'Убедитесь, что дефис находится в нужном месте', + nameSurnamePattern: 'Доступно использование только кириллицы, латиницы и "-"', + }, + surname: { + required: 'Пожалуйста, укажите фамилию', + min: 'Введите не менее 2 символов', + max: 'Допускается ввод не более 100 символов', + capsPattern: 'Убедитесь, что y Bac выключен CAPS LOCK', + dashPattern: 'Убедитесь, что дефис находится в нужном месте', + nameSurnamePattern: 'Доступно использование только кириллицы, латиницы и "-"', + }, + email: { + required: 'Пожалуйста, укажите ваш email', + capsPattern: 'Убедитесь, что y Bac выключен CAPS LOCK', + emailPattern: + 'Неверный формат адреса email. Используйте только латиницу, "-", "@" и "_"', + }, + feedback: { + required: 'Пожалуйста, напишите ваш отзыв', + min: 'Введите не менее 10 символов', + max: 'Допускается ввод не более 500 символов', + capsPattern: 'Убедитесь, что y Bac выключен CAPS LOCK', + feedbackPattern: + 'Доступно использование только кириллицы, латиницы и символов: "-", "_", ".", "," и "!"', + }, +}; + +const disabledTgButtonColor = '#9e9e9e'; +const disabledTgButtonTextColor = '#eceff1'; + +const setValid = (element, errElement) => { + element.classList.remove('invalid'); + errElement.textContent = ''; +}; + +const setInvalid = (element, errElement, errName) => { + element.classList.add('invalid'); + errElement.textContent = errMsg[element.name][errName]; +}; + +const checkInputValidity = (element, errElement, pattern, isCapitalize) => { + let newValue = element.value; + const minlength = element.getAttribute('minlength'); + const maxlength = element.getAttribute('maxlength'); + const capsPattern = /[А-ЯЁ]{2,}/g; + const dashPattern = /( -)|(- )|(^-)|(-$)/g; + const phonePattern = /^\+7 [3489]\d{2} \d{3}-\d{2}-\d{2}$/; + const nameSurnamePattern = /^[А-ЯЁA-Z][а-яёa-z]*([-][А-ЯЁA-Z][а-яёa-z]+)*$/g; + const emailPattern = /^[\w-\.]+@([\w-]+\.)+[\w-]{2,4}$/g; + const feedbackPattern = /[А-ЯЁа-яёa-zA-Z0-9._!@/-]/g; + + if (pattern) { + newValue = newValue.trimStart().replace(pattern, ''); + } + + // convert phone number to international format: +7 DEF XXX-XX-XX + if (newValue && element.name === 'phone_number') { + newValue = newValue + .replace(/^(?:7|8)?(\d{0,3})?(\d{0,3})(\d{0,2})(\d*)$/, '+7 $1 $2-$3-$4') + .replace(/\D(?!\d)/g, ''); // remove trailing hyphens (non-digit not followed by a digit) + } + + if (isCapitalize && newValue) { + newValue = newValue + .split('-') + .map((word) => + word + ? word.replace( + /^(?!на)[а-яё]{2}/, + (letter) => letter[0].toUpperCase() + letter.slice(1) + ) + : '' + ) + .join('-') + .split(' ') + .map((word) => (word ? word[0].toUpperCase() + word.slice(1) : '')) + .join(' '); + } + + if (!newValue) { + setInvalid(element, errElement, 'required'); + } else if (minlength && newValue.length < minlength) { + setInvalid(element, errElement, 'min'); + } else if (maxlength && newValue.length > maxlength) { + setInvalid(element, errElement, 'max'); + } else if (newValue.match(capsPattern)) { + setInvalid(element, errElement, 'capsPattern'); + } else if (newValue.match(dashPattern)) { + setInvalid(element, errElement, 'dashPattern'); + } else if (element.name === 'phone_number' && !newValue.match(phonePattern)) { + setInvalid(element, errElement, 'phonePattern'); + } else if ( + (element.name === 'name' || element.name === 'surname') && + !newValue.match(nameSurnamePattern) + ) { + setInvalid(element, errElement, 'nameSurnamePattern'); + } else if (element.name === 'email' && !newValue.match(emailPattern)) { + setInvalid(element, errElement, 'emailPattern'); + } else if (element.name === 'feedback' && !newValue.match(feedbackPattern)) { + setInvalid(element, errElement, 'feedbackPattern'); + } else { + setValid(element, errElement); + } + + element.value = newValue; +}; + +const toggleSubmitState = ( + inputs, + tgMainButton, + defaultButtonColor, + defaultButtonTextColor +) => { + const isValidationError = Array.from(inputs).some( + (input) => !input.validity.valid || input.classList.contains('invalid') + ); + + if (isValidationError) { + tgMainButton.setParams({ + is_active: false, + color: disabledTgButtonColor, + text_color: disabledTgButtonTextColor, + }); + } else { + tgMainButton.setParams({ + is_active: true, + color: defaultButtonColor, + text_color: defaultButtonTextColor, + }); + } +}; + +const setValidation = ( + inputs, + tgMainButton, + defaultButtonColor, + defaultButtonTextColor +) => { + toggleSubmitState( + inputs, + tgMainButton, + defaultButtonColor, + defaultButtonTextColor + ); + + inputs.forEach((input) => { + const { pattern, isCapitalize } = validationConfig[input.name]; + + const errElement = document.querySelector(`.helper-text.${input.name}`); + + const check = () => { + checkInputValidity(input, errElement, pattern, isCapitalize); + toggleSubmitState( + inputs, + tgMainButton, + defaultButtonColor, + defaultButtonTextColor + ); + }; + + input.oninput = () => check(); + input.onblur = () => check(); + input.onchange = () => check(); + input.onpaste = () => check(); + }); +}; + +const params = new Proxy(new URLSearchParams(window.location.search), { + get: (searchParams, prop) => searchParams.get(prop), +}); + +if (params.surname) { + document.getElementById('name').value = params.name; + document.getElementById('surname').value = params.surname; + document.getElementById('email').value = params.email; + document.getElementById('feedback').value = params.feedback; +} + +if (params.update) { + var formText = `Рады снова видеть Вас в нашем проекте.
+ Пожалуйста, проверьте свои данные.`; + var buttonText = 'Подать заявку на участие в смене'; +} else { + var formText = '* необходимо заполнить поля'; + var buttonText = 'Зарегистрироваться в проекте'; +} + +document.getElementById('formtitle').innerHTML = formText; + +const showTgButton = (tgMainButton) => { + tgMainButton.setText(buttonText); + tgMainButton.show(); +}; + +// send data to server +const handleSubmit = (inputs, tg) => { + tg.MainButton.disable(); + + const data = Array.from(inputs).reduce((data, input) => { + data[input.name] = input.value.trim(); + return data; + }, {}); + + tg.sendData(JSON.stringify(data)); + tg.close(); +}; + +// content loaded, main actions +document.addEventListener('DOMContentLoaded', function () { + const tg = window.Telegram.WebApp; + tg.ready(); + tg.expand(); + + const tgMainButton = tg.MainButton; + const defaultButtonColor = tg.themeParams.button_color; + const defaultButtonTextColor = tg.themeParams.button_text_color; + + const inputElements = document.querySelectorAll('.validate'); + + setValidation( + inputElements, + tgMainButton, + defaultButtonColor, + defaultButtonTextColor + ); + + tgMainButton.onClick(() => handleSubmit(inputElements, tg)); + showTgButton(tgMainButton); +}); + +document.addEventListener('DOMContentLoaded', function () { + const checkbox = document.querySelector('#checkbox'); + const emailSection = document.querySelector('#email_section'); + const email = document.querySelector('#email'); + checkbox.addEventListener('change', function (element) { + if (checkbox.checked) { + emailSection.style.display = 'block'; + } else { + email.value = ''; + const emailErr = document.querySelector('.helper-text.email'); + setValid(email, emailErr); + email.classList.remove('valid'); + emailSection.style.display = 'none'; + } + }); +}); diff --git a/templates/registration/registration.html b/templates/registration/registration.html new file mode 100644 index 00000000..e69de29b diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b