diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 1822a7c..f60167b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,4 +1,4 @@ -name: Tests +name: Python package on: pull_request: @@ -8,9 +8,10 @@ jobs: test: name: Unit tests runs-on: ubuntu-latest + continue-on-error: true steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@master - name: Set up docker uses: docker-practice/actions-setup-docker@master - name: Run postgres @@ -18,7 +19,7 @@ jobs: docker run -d -p 5432:5432 -e POSTGRES_HOST_AUTH_METHOD=trust --name db-test postgres:15-alpine - uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: '3.11' - name: Install dependencies run: | python -m ensurepip @@ -29,10 +30,39 @@ jobs: DB_DSN=postgresql://postgres@localhost:5432/postgres alembic upgrade head - name: Build coverage file run: | - DB_DSN=postgresql://postgres@localhost:5432/postgres pytest --cache-clear --cov=aciniformes_backend tests > pytest-coverage.txt + DB_DSN=postgresql://postgres@localhost:5432/postgres pytest --junitxml=pytest.xml --cov-report=term-missing:skip-covered tests/ | tee pytest-coverage.txt - name: Print report if: always() run: | cat pytest-coverage.txt - - name: Comment coverage - uses: coroo/pytest-coverage-commentator@v1.0.2 + - name: Pytest coverage comment + uses: MishaKav/pytest-coverage-comment@main + with: + pytest-coverage-path: ./pytest-coverage.txt + title: Coverage Report + badge-title: Code Coverage + hide-badge: false + hide-report: false + create-new-comment: false + hide-comment: false + report-only-changed-files: false + remove-link-from-badge: false + junitxml-path: ./pytest.xml + junitxml-title: Summary + linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: 3.11 + - uses: isort/isort-action@master + with: + requirementsFiles: "requirements.txt requirements.dev.txt" + - uses: psf/black@stable + - name: Comment if linting failed + if: ${{ failure() }} + uses: thollander/actions-comment-pull-request@v2 + with: + message: | + :poop: Code linting failed, use `black` and `isort` to fix it. \ No newline at end of file diff --git a/.gitignore b/.gitignore index 2acdcbe..b6e4761 100644 --- a/.gitignore +++ b/.gitignore @@ -127,5 +127,3 @@ dmypy.json # Pyre type checker .pyre/ -client_secret.json -static/** diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..fe6bb89 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,16 @@ +FROM tiangolo/uvicorn-gunicorn-fastapi:python3.11 +ARG APP_VERSION=dev +ENV APP_VERSION=${APP_VERSION} +ENV APP_NAME=aciniformes_backend +ENV APP_MODULE=${APP_NAME}.routes.base:app + +COPY ./requirements.txt /app/ +COPY ./logging_prod.conf /app/ +COPY ./logging_test.conf /app/ +COPY ./.env /app/ +RUN pip install -U -r /app/requirements.txt + +COPY ./alembic.ini /alembic.ini +COPY ./migrations /migrations/ + +COPY . /app diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..5494ac6 --- /dev/null +++ b/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2022, Профком студентов физфака МГУ +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/Makefile b/Makefile index e69de29..46fa713 100644 --- a/Makefile +++ b/Makefile @@ -0,0 +1,24 @@ +run: + source ./venv/bin/activate && uvicorn --reload --log-config logging_dev.conf aciniformes_backend.routes.base:app + +configure: venv + source ./venv/bin/activate && pip install -r requirements.dev.txt -r requirements.txt + +venv: + python3.11 -m venv venv + +atomic-format: + autoflake -r --in-place --remove-all-unused-imports ./$(module) + isort ./$(module) + black ./$(module) + +format: + make atomic-format module=pinger_backend + make atomic-format module=aciniformes_backend + make atomic-format module=settings.py + +db: + docker run -d -p 5432:5432 -e POSTGRES_HOST_AUTH_METHOD=trust --name db-pinger_backend postgres:15 + +migrate: + alembic upgrade head diff --git a/README.md b/README.md new file mode 100644 index 0000000..c45eb52 --- /dev/null +++ b/README.md @@ -0,0 +1,49 @@ +# Aciniformes-project + +Проект пингера сервисов профкома ФФ МГУ. Позволяет пользователю просто и быстро проверять работоспособность любого сайта и сервиса и получать отчет через telegram бота. + + +# Функционал + +1. Опрос любого сервиса или сайта на работоспособность +2. Создание расписания проверок указанных сайтов/сервисов +3. Получение удобного отчета о проверке через telegram бота + +# Разработка +Backend разработка – https://github.com/profcomff/.github/wiki/%5Bdev%5D-Backend-разработка + + +# Quick Start +1. Перейдите в папку проекта + +2. Создайте виртуальное окружение командой: +`foo@bar:~$ python3 -m venv ./venv/` +3. Установите библиотеки командой: +`foo@bar:~$ pip install -m requirements.txt` +4. Установите все переменные окружения (см. CONTRIBUTING.md) +5. Запускайте приложение! +`foo@bar:~$ python -m services-backend` + + +# Использование +1. Создание получателя сообщений + 1. Получить или узнать токен telegram бота, через которого будет посылаться сообщение + 2. Узнать id чата-получателя в telegram + 3. Создать получателя сообщений, выполнив запрос POST /receiver с телом: `{"url": "https://api.telegram.org/bot{токен_бота}/sendMessage", "method": "post", "receiver_body": {"chat_id": id_получателя, "text": текст_сообщения}` + +2. Создание опрашиваемого сервиса + 1. Выполнить запрос POST /fetcher с телом: `"{ + "type_": "get/post/ping", + "address": "ссылка на опрашиваемый сайт", + "fetch_data": "{}" (Имеет смысла заполнять только если в type_ указан post запрос), + "delay_ok": частота опроса при успешном запросе, + "delay_fail": частота опроса при неудавшемся запросе +}"` + +# Параметризация и плагины +BOT_TOKEN - токен бота-отправителя отчетов + +# Ссылки +Документация проекта - https://api.test.profcomff.com/?urls.primaryName=pinger# + +Backend разработка – https://github.com/profcomff/.github/wiki/%5Bdev%5D-Backend-разработка \ No newline at end of file diff --git a/aciniformes_backend/__init__.pyc b/aciniformes_backend/__init__.pyc deleted file mode 100644 index 65e5b72..0000000 Binary files a/aciniformes_backend/__init__.pyc and /dev/null differ diff --git a/aciniformes_backend/__main__.py b/aciniformes_backend/__main__.py index d75eb22..a0a3f7d 100644 --- a/aciniformes_backend/__main__.py +++ b/aciniformes_backend/__main__.py @@ -2,5 +2,6 @@ from .routes import app + if __name__ == "__main__": uvicorn.run(app) diff --git a/aciniformes_backend/__pycache__/__init__.cpython-310.pyc b/aciniformes_backend/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index ca02b2f..0000000 Binary files a/aciniformes_backend/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/aciniformes_backend/models/__init__.py b/aciniformes_backend/models/__init__.py index 0708500..160d034 100644 --- a/aciniformes_backend/models/__init__.py +++ b/aciniformes_backend/models/__init__.py @@ -1,7 +1,7 @@ -from .metric import Metric -from .fetcher import Fetcher from .alerts import Alert, Receiver from .base import BaseModel -from .auth import Auth +from .fetcher import Fetcher, FetcherType +from .metric import Metric + -__all__ = ["Metric", "Fetcher", "Alert", "Receiver", "BaseModel", "Auth"] +__all__ = ["Metric", "Fetcher", "Alert", "Receiver", "BaseModel", "FetcherType"] diff --git a/aciniformes_backend/models/alerts.py b/aciniformes_backend/models/alerts.py index 12b29ee..017f7ed 100644 --- a/aciniformes_backend/models/alerts.py +++ b/aciniformes_backend/models/alerts.py @@ -1,29 +1,31 @@ """Классы хранения настроек нотификаций """ from datetime import datetime -from .base import BaseModel -from sqlalchemy import JSON, DateTime, ForeignKey, Integer, String +from enum import Enum + +from sqlalchemy import JSON, DateTime +from sqlalchemy import Enum as DbEnum +from sqlalchemy import Integer, String from sqlalchemy.orm import Mapped, mapped_column +from .base import BaseModel + + +class Method(str, Enum): + POST: str = "post" + GET: str = "get" + class Receiver(BaseModel): id_: Mapped[int] = mapped_column("id", Integer, primary_key=True) - name: Mapped[str] = mapped_column(String, nullable=False) - chat_id: Mapped[int] = mapped_column(Integer, nullable=False) + url: Mapped[str] = mapped_column(String, nullable=False) + method: Mapped[Method] = mapped_column(DbEnum(Method, native_enum=False), nullable=False) + receiver_body: Mapped[dict] = mapped_column(JSON, nullable=False) create_ts: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) - modify_ts: Mapped[datetime] = mapped_column( - DateTime, default=datetime.utcnow, onupdate=datetime.utcnow - ) class Alert(BaseModel): id_: Mapped[int] = mapped_column("id", Integer, primary_key=True) data = mapped_column(JSON, nullable=False) - receiver: Mapped[int] = mapped_column( - Integer, ForeignKey("receiver.id", ondelete="CASCADE"), nullable=False - ) filter = mapped_column(String, nullable=False) create_ts = mapped_column(DateTime, default=datetime.utcnow) - modify_ts = mapped_column( - DateTime, default=datetime.utcnow, onupdate=datetime.utcnow - ) diff --git a/aciniformes_backend/models/auth.py b/aciniformes_backend/models/auth.py deleted file mode 100644 index 7dadf00..0000000 --- a/aciniformes_backend/models/auth.py +++ /dev/null @@ -1,15 +0,0 @@ -from .base import BaseModel -from sqlalchemy import Integer, String -from sqlalchemy.orm import Mapped, mapped_column - - -class Auth(BaseModel): - id_: Mapped[int] = mapped_column( - Integer, - primary_key=True, - ) - username: Mapped[str] = mapped_column( - String, - nullable=False, - ) - password: Mapped[str] = mapped_column(String, nullable=False, doc="Hashed password") diff --git a/aciniformes_backend/models/base.py b/aciniformes_backend/models/base.py index 96dcde6..99400d2 100644 --- a/aciniformes_backend/models/base.py +++ b/aciniformes_backend/models/base.py @@ -1,5 +1,6 @@ import re -from sqlalchemy.orm import declared_attr, as_declarative + +from sqlalchemy.orm import as_declarative, declared_attr @as_declarative() diff --git a/aciniformes_backend/models/fetcher.py b/aciniformes_backend/models/fetcher.py index 6f84ee6..af0573c 100644 --- a/aciniformes_backend/models/fetcher.py +++ b/aciniformes_backend/models/fetcher.py @@ -2,34 +2,25 @@ """ from datetime import datetime from enum import Enum -from .base import BaseModel + import sqlalchemy -from sqlalchemy import JSON, DateTime, Integer, String +from sqlalchemy import DateTime, Integer, String from sqlalchemy.orm import Mapped, mapped_column +from .base import BaseModel + class FetcherType(str, Enum): - GET = "get_ok" # Пишет True, если GET запрос вернул статус 200..299 - POST = "post_ok" # Пишет True, если POST запрос вернул статус 200..299 - PING = "ping_ok" # Пишет True, если PING успешный + GET = "get" # Пишет положительную метрику, если GET запрос вернул статус 200..299 + POST = "post" # Пишет положительную метрику, если POST запрос вернул статус 200..299 + PING = "ping" # Пишет положительную метрику, если PING успешный class Fetcher(BaseModel): id_: Mapped[int] = mapped_column("id", Integer, primary_key=True) - name: Mapped[str] = mapped_column(String, nullable=False) - type_: Mapped[FetcherType] = mapped_column( - "type", sqlalchemy.Enum(FetcherType, native_enum=False), nullable=False - ) + type_: Mapped[FetcherType] = mapped_column("type", sqlalchemy.Enum(FetcherType, native_enum=False), nullable=False) address: Mapped[str] = mapped_column(String, nullable=False) - fetch_data: Mapped[str] = mapped_column( - String - ) # Данные, которые передаются в теле POST запроса - metrics: Mapped[dict] = mapped_column( - JSON, default={}, nullable=False - ) # Статическая часть метрик - metric_name: Mapped[str] = mapped_column( - String, nullable=False - ) # Название динамической части метрик + fetch_data: Mapped[str] = mapped_column(String, nullable=True) # Данные, которые передаются в теле POST запроса delay_ok: Mapped[int] = mapped_column( Integer, default=300, nullable=False ) # Через сколько секунд повторить запрос, если предыдущий успешный @@ -37,6 +28,3 @@ class Fetcher(BaseModel): Integer, default=30, nullable=False ) # Через сколько секунд повторить запрос, если предыдущий неуспешный create_ts: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) - modify_ts: Mapped[datetime] = mapped_column( - DateTime, default=datetime.utcnow, onupdate=datetime.utcnow - ) diff --git a/aciniformes_backend/models/metric.py b/aciniformes_backend/models/metric.py index 3cc6931..96b0ffa 100644 --- a/aciniformes_backend/models/metric.py +++ b/aciniformes_backend/models/metric.py @@ -2,12 +2,15 @@ """ from datetime import datetime -from .base import BaseModel -from sqlalchemy import Integer, JSON, DateTime + +from sqlalchemy import Boolean, Float, Integer, String from sqlalchemy.orm import Mapped, mapped_column +from .base import BaseModel + class Metric(BaseModel): id_: Mapped[int] = mapped_column("id", Integer, primary_key=True) - metrics: Mapped[dict] = mapped_column(JSON, nullable=False) - create_ts: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) + name: Mapped[str] = mapped_column("name", String, nullable=False) + ok: Mapped[bool] = mapped_column("ok", Boolean, nullable=False, default=True) + time_delta: Mapped[float] = mapped_column(Float, default=datetime.utcnow) diff --git a/aciniformes_backend/routes/__init__.py b/aciniformes_backend/routes/__init__.py index 9b3a8a0..5086daf 100644 --- a/aciniformes_backend/routes/__init__.py +++ b/aciniformes_backend/routes/__init__.py @@ -1 +1,4 @@ from .base import app + + +__all__ = ["app"] diff --git a/aciniformes_backend/routes/__pycache__/__init__.cpython-310.pyc b/aciniformes_backend/routes/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 619ad74..0000000 Binary files a/aciniformes_backend/routes/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/aciniformes_backend/routes/__pycache__/base.cpython-310.pyc b/aciniformes_backend/routes/__pycache__/base.cpython-310.pyc deleted file mode 100644 index 7944233..0000000 Binary files a/aciniformes_backend/routes/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/aciniformes_backend/routes/__pycache__/fetcher.cpython-310.pyc b/aciniformes_backend/routes/__pycache__/fetcher.cpython-310.pyc deleted file mode 100644 index c0c86b3..0000000 Binary files a/aciniformes_backend/routes/__pycache__/fetcher.cpython-310.pyc and /dev/null differ diff --git a/aciniformes_backend/routes/__pycache__/mectric.cpython-310.pyc b/aciniformes_backend/routes/__pycache__/mectric.cpython-310.pyc deleted file mode 100644 index 4775410..0000000 Binary files a/aciniformes_backend/routes/__pycache__/mectric.cpython-310.pyc and /dev/null differ diff --git a/aciniformes_backend/routes/alert/__pycache__/__init__.cpython-310.pyc b/aciniformes_backend/routes/alert/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 250dc04..0000000 Binary files a/aciniformes_backend/routes/alert/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/aciniformes_backend/routes/alert/__pycache__/alert.cpython-310.pyc b/aciniformes_backend/routes/alert/__pycache__/alert.cpython-310.pyc deleted file mode 100644 index 21a7322..0000000 Binary files a/aciniformes_backend/routes/alert/__pycache__/alert.cpython-310.pyc and /dev/null differ diff --git a/aciniformes_backend/routes/alert/__pycache__/reciever.cpython-310.pyc b/aciniformes_backend/routes/alert/__pycache__/reciever.cpython-310.pyc deleted file mode 100644 index a75858c..0000000 Binary files a/aciniformes_backend/routes/alert/__pycache__/reciever.cpython-310.pyc and /dev/null differ diff --git a/aciniformes_backend/routes/alert/alert.py b/aciniformes_backend/routes/alert/alert.py index b8c1474..4b7f366 100644 --- a/aciniformes_backend/routes/alert/alert.py +++ b/aciniformes_backend/routes/alert/alert.py @@ -1,29 +1,31 @@ -from fastapi import APIRouter +from __future__ import annotations + +import logging + +from fastapi import APIRouter, Depends from fastapi.exceptions import HTTPException from pydantic import BaseModel -from fastapi import Depends from starlette import status -from aciniformes_backend.serivce import ( - alert_service, - AlertServiceInterface, - exceptions as exc, -) + +from aciniformes_backend.serivce import AlertServiceInterface, alert_service +from aciniformes_backend.serivce import exceptions as exc + + +logger = logging.getLogger(__name__) class CreateSchema(BaseModel): - data: dict[str, int | str | list] - receiver: int + data: dict[str, str | list | dict | bool | int | float] filter: str class PostResponseSchema(CreateSchema): - id: int | None + id: int | None = None class UpdateSchema(BaseModel): - data: dict[str, int | str | list] | None - receiver: int | None - filter: str | None + data: dict[str, str | list | dict] | None = None + filter: str | None = None class GetSchema(BaseModel): @@ -41,8 +43,8 @@ async def create( create_schema: CreateSchema, alert: AlertServiceInterface = Depends(alert_service), ): - id_ = await alert.create(create_schema.dict(exclude_unset=True)) - return PostResponseSchema(**create_schema.dict(), id=id_) + id_ = await alert.create(create_schema.model_dump(exclude_unset=True)) + return PostResponseSchema(**create_schema.model_dump(), id=id_) @router.get("") @@ -67,12 +69,15 @@ async def update( alert: AlertServiceInterface = Depends(alert_service), ): try: - res = await alert.update(id, update_schema.dict(exclude_unset=True)) + res = await alert.update(id, update_schema.model_dump(exclude_unset=True)) except exc.ObjectNotFound: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) return res @router.delete("/{id}") -async def delete(id: int, alert: AlertServiceInterface = Depends(alert_service)): +async def delete( + id: int, + alert: AlertServiceInterface = Depends(alert_service), +): await alert.delete(id) diff --git a/aciniformes_backend/routes/alert/reciever.py b/aciniformes_backend/routes/alert/reciever.py index 65f3952..2788590 100644 --- a/aciniformes_backend/routes/alert/reciever.py +++ b/aciniformes_backend/routes/alert/reciever.py @@ -1,46 +1,55 @@ +import logging +from enum import Enum + from fastapi import APIRouter, Depends -from pydantic import BaseModel from fastapi.exceptions import HTTPException +from pydantic import BaseModel from starlette import status -from aciniformes_backend.routes.auth import get_current_user -from aciniformes_backend.serivce import ( - receiver_service, - ReceiverServiceInterface, - exceptions as exc, -) + +from aciniformes_backend.serivce import ReceiverServiceInterface +from aciniformes_backend.serivce import exceptions as exc +from aciniformes_backend.serivce import receiver_service + + +logger = logging.getLogger(__name__) + + +class Method(str, Enum): + POST: str = "post" + GET: str = "get" class CreateSchema(BaseModel): - name: str - chat_id: int + url: str + method: Method + receiver_body: dict[str, str | int | list] class PostResponseSchema(CreateSchema): - id: int | None + url: str | None = None + method: Method + receiver_body: dict[str, str | int | list] | None = None class UpdateSchema(BaseModel): - name: str | None - chat_id: int | None + url: str | None + method: Method | None + receiver_body: dict[str, str | int | list] | None = None class GetSchema(BaseModel): - id: int + url: str + method: Method + receiver_body: dict[str, str | int | list] | None = None router = APIRouter() @router.post("", response_model=PostResponseSchema) -async def create( - create_schema: CreateSchema, - receiver: ReceiverServiceInterface = Depends(receiver_service), - token=Depends(get_current_user), -): - if not token: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) - id_ = await receiver.create(create_schema.dict()) - return PostResponseSchema(**create_schema.dict(), id=id_) +async def create(create_schema: CreateSchema, receiver: ReceiverServiceInterface = Depends(receiver_service)): + id_ = await receiver.create(create_schema.model_dump()) + return PostResponseSchema(**create_schema.model_dump(), id=id_) @router.get("") @@ -65,12 +74,9 @@ async def update( id: int, update_schema: UpdateSchema, receiver: ReceiverServiceInterface = Depends(receiver_service), - token=Depends(get_current_user), ): - if not token: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: - res = await receiver.update(id, update_schema.dict(exclude_unset=True)) + res = await receiver.update(id, update_schema.model_dump(exclude_unset=True)) except exc.ObjectNotFound: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) return res @@ -80,8 +86,5 @@ async def update( async def delete( id: int, receiver: ReceiverServiceInterface = Depends(receiver_service), - token=Depends(get_current_user), ): - if not token: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) await receiver.delete(id) diff --git a/aciniformes_backend/routes/auth.py b/aciniformes_backend/routes/auth.py deleted file mode 100644 index d0ae1b6..0000000 --- a/aciniformes_backend/routes/auth.py +++ /dev/null @@ -1,129 +0,0 @@ -from fastapi import APIRouter, Depends -from fastapi.security import OAuth2PasswordRequestForm, OAuth2PasswordBearer -from datetime import datetime -from jose import JWTError, jwt -from passlib.context import CryptContext -from starlette import status -from fastapi.requests import Request -from pydantic import BaseModel -from fastapi.exceptions import HTTPException -from aciniformes_backend.serivce import AuthServiceInterface, auth_service -import aciniformes_backend.serivce.exceptions as exc -from aciniformes_backend.settings import get_settings -from pydantic import validator - - -class Token(BaseModel): - access_token: str - token_type: str - - -class User(BaseModel): - id: str - username: str - email: str | None = None - - -class RegistrationForm(BaseModel): - username: str - password: str - - @validator("password") - def validate_password(cls, password): - settings = get_settings() - password = settings.PWD_CONTEXT.hash(password) - return password - - -settings = get_settings() -auth_router = APIRouter(tags=["Authentication"]) -oauth2bearer = OAuth2PasswordBearer(tokenUrl="token") -pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") -oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") - - -async def create_token(**kwargs): - payload = kwargs.copy() - expire_date = datetime.utcnow() + settings.EXPIRY_TIMEDELTA - payload.update({"exp": expire_date}) - token = jwt.encode(payload, key=settings.JWT_KEY) - return token - - -async def get_current_user( - token: str = Depends(oauth2_scheme), - auth: AuthServiceInterface = Depends(auth_service), -): - credentials_exception = HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Could not validate credentials", - headers={"WWW-Authenticate": "Bearer"}, - ) - try: - payload = jwt.decode(token, settings.JWT_KEY, algorithms=[settings.ALGORITHM]) - username: str = payload.get("username") - if username is None: - raise credentials_exception - except JWTError: - raise credentials_exception - user = await auth.get_user(username) - if user is None: - raise credentials_exception - return user - - -@auth_router.post( - "/token", - status_code=status.HTTP_200_OK, - response_model=Token, - responses={status.HTTP_401_UNAUTHORIZED: {"description": "Incorrect params"}}, -) -async def get_token( - _: Request, - form: OAuth2PasswordRequestForm = Depends(), - auth: AuthServiceInterface = Depends(auth_service), -): - try: - user = await auth.authenticate_user(form.username, form.password) - except exc.WrongPassword: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Incorrect username or password", - headers={"WWW-Authenticate": "Bearer"}, - ) - except exc.NotRegistered: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Username not found", - headers={"WWW-Authenticate": "Bearer"}, - ) - access_token = await create_token(username=user.username) - return {"access_token": access_token, "token_type": "bearer"} - - -@auth_router.post( - "/register", - status_code=status.HTTP_201_CREATED, - response_model=None, - responses={status.HTTP_400_BAD_REQUEST: {"description": "Incorrect params"}}, -) -async def register( - _: Request, - data: RegistrationForm, - auth: AuthServiceInterface = Depends(auth_service), -) -> None: - username, password = data.username, data.password - try: - await auth.registrate_user(username, password) - except exc.AlreadyRegistered as e: - raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=repr(e)) - - -@auth_router.get( - "/whoami", - status_code=status.HTTP_200_OK, - response_model=User, - responses={status.HTTP_401_UNAUTHORIZED: {"detail": "Unauthorized"}}, -) -async def get_current_user_info(_: Request, current_user=Depends(get_current_user)): - return User(id=current_user.id, username=current_user.username) diff --git a/aciniformes_backend/routes/base.py b/aciniformes_backend/routes/base.py index b264a00..0314ce4 100644 --- a/aciniformes_backend/routes/base.py +++ b/aciniformes_backend/routes/base.py @@ -1,22 +1,22 @@ from fastapi import FastAPI +from fastapi_sqlalchemy import DBSessionMiddleware + +from settings import get_settings + from .alert.alert import router as alert_router from .alert.reciever import router as receiver_router from .fetcher import router as fetcher_router from .mectric import router as metric_router -from .auth import auth_router -from fastapi_sqlalchemy import DBSessionMiddleware -from aciniformes_backend.settings import get_settings app = FastAPI() -app.include_router(alert_router, prefix="/alert") -app.include_router(auth_router, prefix="/auth") -app.include_router(receiver_router, prefix="/receiver") -app.include_router(fetcher_router, prefix="/fetcher") -app.include_router(metric_router, prefix="/metric") +app.include_router(alert_router, prefix="/alert", tags=["Alert"]) +app.include_router(receiver_router, prefix="/receiver", tags=["Receiver"]) +app.include_router(fetcher_router, prefix="/fetcher", tags=["Fetcher"]) +app.include_router(metric_router, prefix="/metric", tags=["Metric"]) app.add_middleware( DBSessionMiddleware, - db_url=get_settings().DB_DSN, + db_url=str(get_settings().DB_DSN), engine_args={"pool_pre_ping": True, "isolation_level": "AUTOCOMMIT"}, ) diff --git a/aciniformes_backend/routes/fetcher.py b/aciniformes_backend/routes/fetcher.py index 6e26166..a7c02d1 100644 --- a/aciniformes_backend/routes/fetcher.py +++ b/aciniformes_backend/routes/fetcher.py @@ -1,58 +1,53 @@ +import logging + from fastapi import APIRouter, Depends from fastapi.exceptions import HTTPException -from starlette import status from pydantic import BaseModel, HttpUrl -from .auth import get_current_user -from aciniformes_backend.serivce import ( - FetcherServiceInterface, - fetcher_service, - exceptions as exc, -) +from pydantic.functional_serializers import PlainSerializer +from starlette import status +from typing_extensions import Annotated + +from aciniformes_backend.models.fetcher import FetcherType +from aciniformes_backend.serivce import FetcherServiceInterface +from aciniformes_backend.serivce import exceptions as exc +from aciniformes_backend.serivce import fetcher_service + + +logger = logging.getLogger(__name__) +router = APIRouter() class CreateSchema(BaseModel): - name: str - type_: str + type_: FetcherType address: str fetch_data: str - metrics: dict[str, int | str | list] - metric_name: str delay_ok: int delay_fail: int class ResponsePostSchema(CreateSchema): - id: int | None + id: int | None = None class UpdateSchema(BaseModel): - name: str | None - type_: str | None - address: HttpUrl | None - fetch_data: str | None - metrics: dict[str, int | str | list] | None - metric_name: str | None - delay_ok: int | None - delay_fail: int | None + type_: FetcherType | None = None + address: Annotated[HttpUrl, PlainSerializer(lambda x: str(x), return_type=str)] | None = None + fetch_data: str | None = None + delay_ok: int | None = None + delay_fail: int | None = None class GetSchema(BaseModel): id: int -router = APIRouter() - - @router.post("", response_model=ResponsePostSchema) async def create( create_schema: CreateSchema, fetcher: FetcherServiceInterface = Depends(fetcher_service), - token=Depends(get_current_user), ): - if not token: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) - id_ = await fetcher.create(create_schema.dict()) - return ResponsePostSchema(**create_schema.dict(), id=id_) + id_ = await fetcher.create(create_schema.model_dump()) + return ResponsePostSchema(**create_schema.model_dump(), id=id_) @router.get("") @@ -80,12 +75,9 @@ async def update( id: int, update_schema: UpdateSchema, fetcher: FetcherServiceInterface = Depends(fetcher_service), - token=Depends(get_current_user), ): - if not token: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: - res = await fetcher.update(id, update_schema.dict(exclude_unset=True)) + res = await fetcher.update(id, update_schema.model_dump(exclude_unset=True)) except exc.ObjectNotFound: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) return res @@ -95,8 +87,5 @@ async def update( async def delete( id: int, fetcher: FetcherServiceInterface = Depends(fetcher_service), - token=Depends(get_current_user), ): - if not token: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) await fetcher.delete(id) diff --git a/aciniformes_backend/routes/mectric.py b/aciniformes_backend/routes/mectric.py index b62f51f..f9edc85 100644 --- a/aciniformes_backend/routes/mectric.py +++ b/aciniformes_backend/routes/mectric.py @@ -1,25 +1,28 @@ from fastapi import APIRouter, Depends from fastapi.exceptions import HTTPException +from pydantic import BaseModel from starlette import status -from pydantic import BaseModel, Json -from typing import Any -from aciniformes_backend.serivce import ( - MetricServiceInterface, - metric_service, - exceptions as exc, -) + +from aciniformes_backend.serivce import MetricServiceInterface +from aciniformes_backend.serivce import exceptions as exc +from aciniformes_backend.serivce import metric_service class CreateSchema(BaseModel): - metrics: dict[str, int | str | list] + name: str + ok: bool + time_delta: float class ResponsePostSchema(CreateSchema): - id: int | None + id: int | None = None class GetSchema(BaseModel): id: int + name: str + ok: bool + time_delta: float router = APIRouter() @@ -30,8 +33,8 @@ async def create( metric_schema: CreateSchema, metric: MetricServiceInterface = Depends(metric_service), ): - id_ = await metric.create(metric_schema.metrics) - return ResponsePostSchema(**metric_schema.dict(), id=id_) + id_ = await metric.create(metric_schema.model_dump()) + return ResponsePostSchema(**metric_schema.model_dump(), id=id_) @router.get("") diff --git a/aciniformes_backend/serivce/__init__.py b/aciniformes_backend/serivce/__init__.py index 823260b..547f6a6 100644 --- a/aciniformes_backend/serivce/__init__.py +++ b/aciniformes_backend/serivce/__init__.py @@ -1,20 +1,5 @@ -from .base import ( - BaseService, - AlertServiceInterface, - FetcherServiceInterface, - MetricServiceInterface, - ReceiverServiceInterface, - AuthServiceInterface, -) - -from .bootstrap import ( - receiver_service, - alert_service, - metric_service, - fetcher_service, - auth_service, - Config, -) +from .base import AlertServiceInterface, FetcherServiceInterface, MetricServiceInterface, ReceiverServiceInterface +from .bootstrap import alert_service, fetcher_service, metric_service, receiver_service __all__ = [ @@ -22,12 +7,9 @@ "FetcherServiceInterface", "MetricServiceInterface", "ReceiverServiceInterface", - "AuthServiceInterface", - "auth_service", "metric_service", "receiver_service", "alert_service", "fetcher_service", "exceptions", - "Config", ] diff --git a/aciniformes_backend/serivce/alert.py b/aciniformes_backend/serivce/alert.py index 255b736..e300b5b 100644 --- a/aciniformes_backend/serivce/alert.py +++ b/aciniformes_backend/serivce/alert.py @@ -1,8 +1,9 @@ import sqlalchemy as sa -from .base import AlertServiceInterface -import aciniformes_backend.serivce.exceptions as exc import aciniformes_backend.models as db_models +import aciniformes_backend.serivce.exceptions as exc + +from .base import AlertServiceInterface class PgAlertService(AlertServiceInterface): @@ -25,13 +26,8 @@ async def delete(self, id_: int) -> None: self.session.flush() async def update(self, id_: int, item: dict) -> db_models.Alert: - q = ( - sa.update(db_models.Alert) - .where(db_models.Alert.id_ == id_) - .values(**item) - .returning(db_models.Alert) - ) - if not self.get_by_id(id_): + q = sa.update(db_models.Alert).where(db_models.Alert.id_ == id_).values(**item).returning(db_models.Alert) + if not await self.get_by_id(id_): raise exc.ObjectNotFound(id_) res = self.session.execute(q).scalar() return res diff --git a/aciniformes_backend/serivce/auth.py b/aciniformes_backend/serivce/auth.py deleted file mode 100644 index 1732b62..0000000 --- a/aciniformes_backend/serivce/auth.py +++ /dev/null @@ -1,37 +0,0 @@ -from .base import AuthServiceInterface -import sqlalchemy as sa -import aciniformes_backend.serivce.exceptions as exc -from aciniformes_backend.settings import get_settings -import aciniformes_backend.models as db_models - -settings = get_settings() - - -class PgAuthService(AuthServiceInterface): - async def registrate_user(self, username, password) -> db_models.Auth | None: - q = ( - sa.insert(db_models.Auth) - .values(username=username, password=password) - .returning(db_models.Auth) - ) - if await self.get_user(username): - raise exc.AlreadyRegistered(username) - else: - return self.session.scalar(q) - - async def authenticate_user(self, username, password) -> db_models.Auth | None: - db_user = await self.get_user(username) - if not db_user: - raise exc.NotRegistered(username) - if not await self._validate_password(db_user.password, password): - raise exc.WrongPassword() - return db_user - - async def get_user(self, username) -> db_models.Auth | None: - return self.session.scalar( - sa.select(db_models.Auth).where(db_models.Auth.username == username) - ) - - @staticmethod - async def _validate_password(db_password, inp_password): - return settings.PWD_CONTEXT.verify(inp_password, db_password) diff --git a/aciniformes_backend/serivce/base.py b/aciniformes_backend/serivce/base.py index 5face23..d48596a 100644 --- a/aciniformes_backend/serivce/base.py +++ b/aciniformes_backend/serivce/base.py @@ -1,9 +1,8 @@ -from abc import abstractmethod, ABC +from abc import ABC, abstractmethod -import pydantic +import sqlalchemy.orm import aciniformes_backend.models as db_models -import sqlalchemy.orm class BaseService(ABC): @@ -65,24 +64,3 @@ class MetricServiceInterface(BaseService): @abstractmethod async def get_by_id(self, id_: int) -> db_models.Metric: raise NotImplementedError - - -class AuthServiceInterface(ABC): - def __init__(self, session: sqlalchemy.orm.Session | None): - self.session = session - - @abstractmethod - async def registrate_user(self, username, password) -> db_models.Auth | None: - raise NotImplementedError - - @abstractmethod - async def authenticate_user(self, username, password) -> db_models.Auth | None: - raise NotImplementedError - - @abstractmethod - async def get_user(self, username) -> db_models.Auth | None: - raise NotImplementedError - - @staticmethod - async def _validate_password(db_password, inp_password): - raise NotImplementedError diff --git a/aciniformes_backend/serivce/bootstrap.py b/aciniformes_backend/serivce/bootstrap.py index 77a20cf..7d1febe 100644 --- a/aciniformes_backend/serivce/bootstrap.py +++ b/aciniformes_backend/serivce/bootstrap.py @@ -1,52 +1,22 @@ from fastapi_sqlalchemy import db -from .metric import PgMetricService + from .alert import PgAlertService -from .receiver import PgReceiverService from .fetcher import PgFetcherService -from .auth import PgAuthService -from .fake import ( - FakeAlertService, - FakeMetricService, - FakeReceiverService, - FakeFetcherService, - FakeAuthService, -) - - -class Config: - fake: bool = False +from .metric import PgMetricService +from .receiver import PgReceiverService def metric_service(): - if Config.fake: - return FakeMetricService(None) - with db(): - return PgMetricService(db.session) + return PgMetricService(db.session) def alert_service(): - if Config.fake: - return FakeAlertService(None) - with db(): - return PgAlertService(db.session) + return PgAlertService(db.session) def receiver_service(): - if Config.fake: - return FakeReceiverService(None) - with db(): - return PgReceiverService(db.session) + return PgReceiverService(db.session) def fetcher_service(): - if Config.fake: - return FakeFetcherService(None) - with db(): - return PgFetcherService(db.session) - - -def auth_service(): - if Config.fake: - return FakeAuthService(None) - with db(): - return PgAuthService(db.session) + return PgFetcherService(db.session) diff --git a/aciniformes_backend/serivce/exceptions.py b/aciniformes_backend/serivce/exceptions.py index dca80c7..e4148ac 100644 --- a/aciniformes_backend/serivce/exceptions.py +++ b/aciniformes_backend/serivce/exceptions.py @@ -1,23 +1,3 @@ -class SessionNotInitializedError(Exception): - def __init__(self): - super().__init__(f"DB Session not initialized") - - class ObjectNotFound(Exception): def __init__(self, key): super().__init__(f"Object not found: {key}") - - -class AlreadyRegistered(Exception): - def __init__(self, username): - super().__init__(f"User with {username} already registered") - - -class NotRegistered(Exception): - def __init__(self, username): - super().__init__(f"Username {username} not registered yet") - - -class WrongPassword(Exception): - def __init__(self): - super().__init__(f"Incorrect password") diff --git a/aciniformes_backend/serivce/fake.py b/aciniformes_backend/serivce/fake.py deleted file mode 100644 index 602322b..0000000 --- a/aciniformes_backend/serivce/fake.py +++ /dev/null @@ -1,151 +0,0 @@ -import pydantic - -from .base import ( - AlertServiceInterface, - ReceiverServiceInterface, - FetcherServiceInterface, - MetricServiceInterface, - AuthServiceInterface, -) -import aciniformes_backend.serivce.exceptions as exc -import aciniformes_backend.models as db_models -from aciniformes_backend.settings import get_settings - - -class FakeAlertService(AlertServiceInterface): - id_incr = 0 - repository = dict() - - def __init__(self, session): - super().__init__(session) - - async def create(self, item: dict) -> int: - self.repository[self.id_incr] = db_models.Alert(**item) - self.id_incr += 1 - return self.id_incr - - async def get_by_id(self, id_: int) -> db_models.Alert: - if id_ in self.repository: - return self.repository[id_] - raise exc.ObjectNotFound(id_) - - async def delete(self, id_: int) -> None: - self.repository[id_] = None - - async def update(self, id_: int, item: dict) -> db_models.Alert: - if id_ in self.repository: - self.repository[id_] = db_models.Alert(**item) - return self.repository[id_] - raise exc.ObjectNotFound(id_) - - async def get_all(self) -> list[db_models.BaseModel]: - return list(self.repository.values()) - - -class FakeReceiverService(ReceiverServiceInterface): - id_incr = 0 - repository = dict() - - def __init__(self, session): - super().__init__(session) - - async def create(self, item: dict) -> int: - self.repository[self.id_incr] = db_models.Receiver(**item) - self.id_incr += 1 - return self.id_incr - - async def get_by_id(self, id_: int) -> db_models.Receiver: - if id_ in self.repository: - return self.repository[id_] - raise exc.ObjectNotFound(id_) - - async def delete(self, id_: int) -> None: - self.repository[id_] = None - - async def update(self, id_: int, item: dict) -> db_models.Receiver: - if id_ in self.repository: - self.repository[id_] = db_models.Receiver(**item) - return self.repository[id_] - raise exc.ObjectNotFound(id_) - - async def get_all(self) -> list[db_models.BaseModel]: - return list(self.repository.values()) - - -class FakeFetcherService(FetcherServiceInterface): - id_incr = 0 - repository = dict() - - def __init__(self, session): - super().__init__(session) - - async def create(self, item: dict) -> int: - self.repository[self.id_incr] = db_models.Fetcher(**item) - self.id_incr += 1 - return self.id_incr - - async def get_by_id(self, id_: int) -> db_models.Fetcher: - if id_ in self.repository: - return self.repository[id_] - raise exc.ObjectNotFound(id_) - - async def delete(self, id_: int) -> None: - self.repository[id_] = None - - async def update(self, id_: int, item: dict) -> db_models.Fetcher: - if id_ in self.repository: - self.repository[id_] = db_models.Fetcher(**item) - return self.repository[id_] - raise exc.ObjectNotFound(id_) - - async def get_all(self) -> list[db_models.BaseModel]: - return list(self.repository.values()) - - -class FakeMetricService(MetricServiceInterface): - id_incr = 0 - repository = dict() - - def __init__(self, session): - super().__init__(session) - - async def create(self, item: dict) -> int: - self.repository[self.id_incr] = db_models.Fetcher(**item) - self.id_incr += 1 - return self.id_incr - - async def get_by_id(self, id_: int) -> db_models.Metric: - if id_ in self.repository: - return self.repository[id_] - raise exc.ObjectNotFound(id_) - - async def get_all(self) -> list[db_models.BaseModel]: - return list(self.repository.values()) - - -class FakeAuthService(AuthServiceInterface): - repository = [] - - async def registrate_user(self, username, password) -> db_models.Auth | None: - db_user = db_models.Auth(username=username, password=password) - self.repository.append(db_user) - return db_user - - async def authenticate_user(self, username, password) -> db_models.Auth | None: - for auth in self.repository: - if ( - self._validate_password(auth.password, password) - and auth.username == username - ): - return auth - raise exc.NotRegistered(username) - - async def get_user(self, username) -> db_models.Auth | None: - for auth in self.repository: - if auth.username == username: - return auth - raise exc.NotRegistered(username) - - @staticmethod - async def _validate_password(db_password, inp_password): - return get_settings().PWD_CONTEXT.verify(inp_password, db_password) diff --git a/aciniformes_backend/serivce/fetcher.py b/aciniformes_backend/serivce/fetcher.py index 6487008..fbb1cf6 100644 --- a/aciniformes_backend/serivce/fetcher.py +++ b/aciniformes_backend/serivce/fetcher.py @@ -1,8 +1,9 @@ import sqlalchemy as sa -from .base import FetcherServiceInterface -import aciniformes_backend.serivce.exceptions as exc import aciniformes_backend.models as db_models +import aciniformes_backend.serivce.exceptions as exc + +from .base import FetcherServiceInterface class PgFetcherService(FetcherServiceInterface): @@ -25,12 +26,7 @@ async def delete(self, id_: int) -> None: self.session.flush() async def update(self, id_: int, item: dict) -> db_models.Fetcher: - q = ( - sa.update(db_models.Fetcher) - .where(db_models.Fetcher.id_ == id_) - .values(**item) - .returning(db_models.Fetcher) - ) + q = sa.update(db_models.Fetcher).where(db_models.Fetcher.id_ == id_).values(**item).returning(db_models.Fetcher) if not await self.get_by_id(id_): raise exc.ObjectNotFound(id_) res = self.session.execute(q).scalar() diff --git a/aciniformes_backend/serivce/metric.py b/aciniformes_backend/serivce/metric.py index c5d5c84..95527b8 100644 --- a/aciniformes_backend/serivce/metric.py +++ b/aciniformes_backend/serivce/metric.py @@ -1,9 +1,10 @@ import sqlalchemy as sa -from .base import MetricServiceInterface import aciniformes_backend.models as db_models import aciniformes_backend.serivce.exceptions as exc +from .base import MetricServiceInterface + class PgMetricService(MetricServiceInterface): async def create(self, item: dict) -> int: diff --git a/aciniformes_backend/serivce/receiver.py b/aciniformes_backend/serivce/receiver.py index aea6005..ac52723 100644 --- a/aciniformes_backend/serivce/receiver.py +++ b/aciniformes_backend/serivce/receiver.py @@ -2,9 +2,10 @@ import sqlalchemy as sa -from .base import ReceiverServiceInterface -import aciniformes_backend.serivce.exceptions as exc import aciniformes_backend.models as db_models +import aciniformes_backend.serivce.exceptions as exc + +from .base import ReceiverServiceInterface class PgReceiverService(ReceiverServiceInterface): @@ -33,7 +34,7 @@ async def update(self, id_: int, item: dict) -> Type[db_models.Receiver]: .values(**item) .returning(db_models.Receiver) ) - if not self.get_by_id(id_): + if not await self.get_by_id(id_): raise exc.ObjectNotFound(id_) res = self.session.execute(q).scalar() return res diff --git a/aciniformes_backend/settings.py b/aciniformes_backend/settings.py deleted file mode 100644 index b6b34b5..0000000 --- a/aciniformes_backend/settings.py +++ /dev/null @@ -1,23 +0,0 @@ -from pydantic import BaseSettings, PostgresDsn -from functools import lru_cache -from passlib.context import CryptContext -import datetime - - -class Settings(BaseSettings): - DB_DSN: PostgresDsn - PWD_CONTEXT = CryptContext(schemes=["bcrypt"], deprecated="auto") - EXPIRY_TIMEDELTA: datetime.timedelta = datetime.timedelta(days=7) - JWT_KEY = "42" - ALGORITHM: str = "HS256" - - class Config: - """Pydantic BaseSettings config""" - - case_sensitive = True - env_file = ".env" - - -@lru_cache() -def get_settings(): - return Settings() diff --git a/alembic.ini b/alembic.ini index c56e92b..f6c8899 100644 --- a/alembic.ini +++ b/alembic.ini @@ -2,13 +2,10 @@ [alembic] # path to migration scripts -script_location = migrator +script_location = migrations -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file -# for all available tokens -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s # sys.path path, will be prepended to sys.path if present. # defaults to the current working directory. @@ -36,10 +33,10 @@ prepend_sys_path = . # sourceless = false # version location specification; This defaults -# to migrator/versions. When using multiple version +# to migrations/versions. When using multiple version # directories, initial revisions must be specified with --version-path. # The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:migrator/versions +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions # version path separator; As mentioned above, this is the character used to split # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. diff --git a/alert_bot/__main__.py b/alert_bot/__main__.py deleted file mode 100644 index e69de29..0000000 diff --git a/alert_bot/asgi.py b/alert_bot/asgi.py deleted file mode 100644 index d301dde..0000000 --- a/alert_bot/asgi.py +++ /dev/null @@ -1,9 +0,0 @@ -from fastapi import FastAPI - - -app = FastAPI - - -@app.post("/alert") -def post_alert(): - pass diff --git a/flake8.conf b/flake8.conf new file mode 100644 index 0000000..547208a --- /dev/null +++ b/flake8.conf @@ -0,0 +1,35 @@ +[flake8] +select = + E, W, # pep8 errors and warnings + F, # pyflakes + C9, # McCabe + N8, # Naming Conventions + #B, S, # bandit + #C, # commas + #D, # docstrings + #P, # string-format + #Q, # quotes + +ignore = + E122, # continuation line missing indentation or outdented + E123, # closing bracket does not match indentation of opening bracket's line + E127, # continuation line over-indented for visual indent + E131, # continuation line unaligned for hanging + E203, # whitespace before ':' + E225, # missing whitespace around operator + E226, # missing whitespace around arithmetic operator + E24, # multiple spaces after ',' or tab after ',' + E275, # missing whitespace after keyword + E305, # expected 2 blank lines after end of function or class + E306, # expected 1 blank line before a nested definition + E402, # module level import not at top of file + E722, # do not use bare except, specify exception instead + E731, # do not assign a lambda expression, use a def + E741, # do not use variables named 'l', 'O', or 'I' + + F722, # syntax error in forward annotation + + W503, # line break before binary operator + W504, # line break after binary operator + +max-line-length = 120 \ No newline at end of file diff --git a/logging_dev.conf b/logging_dev.conf new file mode 100644 index 0000000..7837272 --- /dev/null +++ b/logging_dev.conf @@ -0,0 +1,21 @@ +[loggers] +keys=root + +[handlers] +keys=all + +[formatters] +keys=main + +[logger_root] +level=DEBUG +handlers=all + +[handler_all] +class=StreamHandler +formatter=main +level=DEBUG +args=(sys.stdout,) + +[formatter_main] +format=%(asctime)s %(levelname)-8s %(name)-15s %(message)s diff --git a/logging_pinger.conf b/logging_pinger.conf new file mode 100644 index 0000000..8432334 --- /dev/null +++ b/logging_pinger.conf @@ -0,0 +1,21 @@ +[loggers] +keys=root + +[handlers] +keys=all + +[formatters] +keys=json + +[logger_root] +level=INFO +handlers=all + +[handler_all] +class=StreamHandler +formatter=json +level=INFO +args=(sys.stdout,) + +[formatter_json] +class=logger.formatter.JSONLogFormatter diff --git a/logging_prod.conf b/logging_prod.conf new file mode 100644 index 0000000..971f309 --- /dev/null +++ b/logging_prod.conf @@ -0,0 +1,35 @@ +[loggers] +keys=root,gunicorn.error,gunicorn.access + +[handlers] +keys=all + +[formatters] +keys=json + +[logger_root] +level=INFO +handlers=all + +[logger_gunicorn.error] +level=INFO +handlers=all +propagate=0 +qualname=gunicorn.error +formatter=json + +[logger_gunicorn.access] +level=INFO +handlers=all +propagate=0 +qualname=gunicorn.access +formatter=json + +[handler_all] +class=StreamHandler +formatter=json +level=INFO +args=(sys.stdout,) + +[formatter_json] +class=logger.formatter.JSONLogFormatter diff --git a/logging_test.conf b/logging_test.conf new file mode 100644 index 0000000..6bbe691 --- /dev/null +++ b/logging_test.conf @@ -0,0 +1,36 @@ +[loggers] +keys=root,gunicorn.error,gunicorn.access + +[handlers] +keys=all + +[formatters] +keys=json + +[logger_root] +level=DEBUG +handlers=all +formatter=json + +[logger_gunicorn.error] +level=DEBUG +handlers=all +propagate=0 +qualname=gunicorn.error +formatter=json + +[logger_gunicorn.access] +level=DEBUG +handlers=all +propagate=0 +qualname=gunicorn.access +formatter=json + +[handler_all] +class=StreamHandler +formatter=json +level=DEBUG +args=(sys.stdout,) + +[formatter_json] +class=logger.formatter.JSONLogFormatter diff --git a/migrator/README b/migrations/README similarity index 100% rename from migrator/README rename to migrations/README diff --git a/migrator/env.py b/migrations/env.py similarity index 90% rename from migrator/env.py rename to migrations/env.py index 9d1ba3e..e97c43b 100644 --- a/migrator/env.py +++ b/migrations/env.py @@ -1,15 +1,15 @@ from logging.config import fileConfig -from aciniformes_backend.settings import get_settings -from aciniformes_backend.models import BaseModel -from sqlalchemy import engine_from_config -from sqlalchemy import pool + from alembic import context +from sqlalchemy import engine_from_config, pool + +from aciniformes_backend.models import BaseModel +from settings import get_settings config = context.config settings = get_settings() - if config.config_file_name is not None: fileConfig(config.config_file_name) @@ -48,7 +48,7 @@ def run_migrations_online() -> None: """ configuration = config.get_section(config.config_ini_section) - configuration["sqlalchemy.url"] = settings.DB_DSN + configuration["sqlalchemy.url"] = str(settings.DB_DSN) connectable = engine_from_config( configuration, prefix="sqlalchemy.", diff --git a/migrator/script.py.mako b/migrations/script.py.mako similarity index 100% rename from migrator/script.py.mako rename to migrations/script.py.mako diff --git a/migrations/versions/febba504289a_init.py b/migrations/versions/febba504289a_init.py new file mode 100644 index 0000000..36a7166 --- /dev/null +++ b/migrations/versions/febba504289a_init.py @@ -0,0 +1,61 @@ +"""Init + +Revision ID: febba504289a +Revises: +Create Date: 2023-04-24 13:29:41.968973 + +""" +import sqlalchemy as sa +from alembic import op + + +revision = 'febba504289a' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + 'alert', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('data', sa.JSON(), nullable=False), + sa.Column('filter', sa.String(), nullable=False), + sa.Column('create_ts', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id'), + ) + op.create_table( + 'fetcher', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('type', sa.Enum('GET', 'POST', 'PING', name='fetchertype', native_enum=False), nullable=False), + sa.Column('address', sa.String(), nullable=False), + sa.Column('fetch_data', sa.String(), nullable=True), + sa.Column('delay_ok', sa.Integer(), nullable=False), + sa.Column('delay_fail', sa.Integer(), nullable=False), + sa.Column('create_ts', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + op.create_table( + 'metric', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('ok', sa.Boolean(), nullable=False), + sa.Column('time_delta', sa.Float(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + op.create_table( + 'receiver', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('url', sa.String(), nullable=False), + sa.Column('method', sa.Enum('POST', 'GET', name='method', native_enum=False), nullable=False), + sa.Column('receiver_body', sa.JSON(), nullable=False), + sa.Column('create_ts', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + + +def downgrade() -> None: + op.drop_table('receiver') + op.drop_table('metric') + op.drop_table('fetcher') + op.drop_table('alert') diff --git a/migrator/versions/85489ec3d0d0_auth.py b/migrator/versions/85489ec3d0d0_auth.py deleted file mode 100644 index 568f929..0000000 --- a/migrator/versions/85489ec3d0d0_auth.py +++ /dev/null @@ -1,84 +0,0 @@ -"""auth - -Revision ID: 85489ec3d0d0 -Revises: -Create Date: 2023-02-01 14:35:19.439045 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = "85489ec3d0d0" -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "auth", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("username", sa.String(), nullable=False), - sa.Column("password", sa.String(), nullable=False), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "fetcher", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("name", sa.String(), nullable=False), - sa.Column( - "type", - sa.Enum("GET", "POST", "PING", name="fetchertype", native_enum=False), - nullable=False, - ), - sa.Column("address", sa.String(), nullable=False), - sa.Column("fetch_data", sa.String(), nullable=False), - sa.Column("metrics", sa.JSON(), nullable=False), - sa.Column("metric_name", sa.String(), nullable=False), - sa.Column("delay_ok", sa.Integer(), nullable=False), - sa.Column("delay_fail", sa.Integer(), nullable=False), - sa.Column("create_ts", sa.DateTime(), nullable=False), - sa.Column("modify_ts", sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "metric", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("metrics", sa.JSON(), nullable=False), - sa.Column("create_ts", sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "receiver", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("name", sa.String(), nullable=False), - sa.Column("chat_id", sa.Integer(), nullable=False), - sa.Column("create_ts", sa.DateTime(), nullable=False), - sa.Column("modify_ts", sa.DateTime(), nullable=False), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "alert", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("data", sa.JSON(), nullable=False), - sa.Column("receiver", sa.Integer(), nullable=False), - sa.Column("filter", sa.String(), nullable=False), - sa.Column("create_ts", sa.DateTime(), nullable=True), - sa.Column("modify_ts", sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint(["receiver"], ["receiver.id"], ondelete="CASCADE"), - sa.PrimaryKeyConstraint("id"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("alert") - op.drop_table("receiver") - op.drop_table("metric") - op.drop_table("fetcher") - op.drop_table("auth") - # ### end Alembic commands ### diff --git a/ping/__main__.py b/ping/__main__.py deleted file mode 100644 index e69de29..0000000 diff --git a/ping/asgi.py b/ping/asgi.py deleted file mode 100644 index 4200270..0000000 --- a/ping/asgi.py +++ /dev/null @@ -1,4 +0,0 @@ -from fastapi import FastAPI - - -ping_app = FastAPI() diff --git a/ping/service/bootstrap.py b/ping/service/bootstrap.py deleted file mode 100644 index e69de29..0000000 diff --git a/ping/service/crud.py b/ping/service/crud.py deleted file mode 100644 index ebc689d..0000000 --- a/ping/service/crud.py +++ /dev/null @@ -1,27 +0,0 @@ -from abc import ABC, abstractmethod - - -class CrudServiceInterface(ABC): - @abstractmethod - async def get_fetcher(self): - raise NotImplementedError - - @abstractmethod - async def get_metric(self): - raise NotImplementedError - - @abstractmethod - async def add_metric(self): - raise NotImplementedError - - @abstractmethod - async def add_alert(self): - raise NotImplementedError - - -class CrudService(CrudServiceInterface): - pass - - -class FakeCrudService(CrudServiceInterface): - pass diff --git a/ping/service/ping.py b/ping/service/ping.py deleted file mode 100644 index 2f38252..0000000 --- a/ping/service/ping.py +++ /dev/null @@ -1,17 +0,0 @@ -from abc import ABC, abstractmethod -import asyncio -import aioschedule as schedule - - -class PingServiceInterface(ABC): - event_loop: asyncio.BaseEventLoop - - @abstractmethod - async def add_event(self, event): - raise NotImplementedError - - -class PingService(PingServiceInterface): - async def add_event(self, event): - schedule.every(1).second.run(event) - self.event_loop.run_until_complete(schedule.run_pending()) diff --git a/ping/settings.py b/ping/settings.py deleted file mode 100644 index e69de29..0000000 diff --git a/alert_bot/__init__.py b/pinger_backend/__init__.py similarity index 100% rename from alert_bot/__init__.py rename to pinger_backend/__init__.py diff --git a/pinger_backend/__main__.py b/pinger_backend/__main__.py new file mode 100644 index 0000000..18ac5b7 --- /dev/null +++ b/pinger_backend/__main__.py @@ -0,0 +1,29 @@ +import asyncio +import signal +from logging.config import fileConfig +from pathlib import Path + +from settings import get_settings + +from .service.scheduler import ApSchedulerService + + +path = Path(__file__).resolve().parents[1] + + +fileConfig(f"{path}/logging_pinger.conf") + + +def sigint_callback(scheduler: ApSchedulerService) -> None: + scheduler.stop() + exit(0) + + +if __name__ == "__main__": + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + scheduler = ApSchedulerService() + scheduler.backend_url = get_settings().BACKEND_URL + loop.add_signal_handler(signal.SIGINT, callback=lambda: sigint_callback(scheduler)) + loop.create_task(scheduler.start()) + loop.run_forever() diff --git a/pinger_backend/exceptions.py b/pinger_backend/exceptions.py new file mode 100644 index 0000000..7f37e19 --- /dev/null +++ b/pinger_backend/exceptions.py @@ -0,0 +1,8 @@ +class AlreadyRunning(Exception): + def __init__(self): + super().__init__("Scheduler is already running") + + +class AlreadyStopped(Exception): + def __init__(self): + super().__init__("Scheduler is already stopped") diff --git a/ping/__init__.py b/pinger_backend/service/__init__.py similarity index 100% rename from ping/__init__.py rename to pinger_backend/service/__init__.py diff --git a/pinger_backend/service/ping.py b/pinger_backend/service/ping.py new file mode 100644 index 0000000..6a6b161 --- /dev/null +++ b/pinger_backend/service/ping.py @@ -0,0 +1,14 @@ +import asyncio +from concurrent.futures import ThreadPoolExecutor +from functools import partial +from typing import Literal + +from ping3 import ping as sync_ping + + +thread_pool = ThreadPoolExecutor() + + +async def ping(host: str) -> float | Literal[True, None]: + loop = asyncio.get_event_loop() + return await loop.run_in_executor(thread_pool, partial(sync_ping, host)) diff --git a/pinger_backend/service/scheduler.py b/pinger_backend/service/scheduler.py new file mode 100644 index 0000000..4cdec2d --- /dev/null +++ b/pinger_backend/service/scheduler.py @@ -0,0 +1,174 @@ +import time +from abc import ABC +from contextlib import asynccontextmanager +from typing import AsyncIterator + +import aiohttp +from apscheduler.schedulers.asyncio import AsyncIOScheduler + +from aciniformes_backend.models import Alert, Fetcher, FetcherType, Metric, Receiver +from aciniformes_backend.routes.alert.alert import CreateSchema as AlertCreateSchema +from aciniformes_backend.routes.mectric import CreateSchema as MetricCreateSchema +from pinger_backend.exceptions import AlreadyRunning, AlreadyStopped +from settings import get_settings + +from .ping import ping +from .session import dbsession + + +class ApSchedulerService(ABC): + scheduler = AsyncIOScheduler() + settings = get_settings() + backend_url = str + fetchers: list + + def add_fetcher(self, fetcher: Fetcher): + self.scheduler.add_job( + self._fetch_it, + args=[fetcher], + id=f"{fetcher.address} {fetcher.create_ts}", + seconds=fetcher.delay_ok, + trigger="interval", + ) + + def delete_fetcher(self, fetcher: Fetcher): + self.scheduler.remove_job(f"{fetcher.address} {fetcher.create_ts}") + + def get_jobs(self): + return [j.id for j in self.scheduler.get_jobs()] + + async def start(self): + if self.scheduler.running: + raise AlreadyRunning + self.scheduler.add_job( + self._fetcher_update_job, + id="check_fetchers", + seconds=self.settings.FETCHERS_UPDATE_DELAY_IN_SECONDS, + trigger="interval", + ) + self.fetchers = dbsession().query(Fetcher).all() + self.scheduler.start() + for fetcher in self.fetchers: + self.add_fetcher(fetcher) + await self._fetch_it(fetcher) + + def stop(self): + if not self.scheduler.running: + raise AlreadyStopped + for job in self.scheduler.get_jobs(): + job.remove() + self.scheduler.shutdown() + + async def write_alert(self, alert: AlertCreateSchema): + receivers = dbsession().query(Receiver).all() + session = dbsession() + alert = Alert(**alert.model_dump(exclude_none=True)) + session.add(alert) + session.flush() + for receiver in receivers: + async with aiohttp.ClientSession() as s: + async with s.request(method=receiver.method, url=receiver.url, data=receiver.receiver_body): + pass + + @staticmethod + def _parse_timedelta(fetcher: Fetcher) -> tuple[int, int]: + return fetcher.delay_ok, fetcher.delay_fail + + @asynccontextmanager + async def __update_fetchers(self) -> AsyncIterator[None]: + jobs = [job.id for job in self.scheduler.get_jobs()] + old_fetchers = self.fetchers + new_fetchers = dbsession().query(Fetcher).all() + + # Проверка на удаление фетчера + for fetcher in old_fetchers: + if (fetcher.address not in [ftch.address for ftch in new_fetchers]) and ( + f"{fetcher.address} {fetcher.create_ts}" in jobs + ): + self.scheduler.remove_job(job_id=f"{fetcher.address} {fetcher.create_ts}") + + jobs = [job.id for job in self.scheduler.get_jobs()] + # Проверка на добавление нового фетчера + for fetcher in new_fetchers: + if (f"{fetcher.address} {fetcher.create_ts}" not in jobs) and ( + fetcher.address not in [ftch.address for ftch in old_fetchers] + ): + self.add_fetcher(fetcher) + self.fetchers.append(fetcher) + yield + self.scheduler.reschedule_job( + "check_fetchers", seconds=self.settings.FETCHERS_UPDATE_DELAY_IN_SECONDS, trigger="interval" + ) + + async def _fetcher_update_job(self) -> None: + async with self.__update_fetchers(): + pass + + @staticmethod + def create_metric(prev: float, fetcher: Fetcher, res: aiohttp.ClientResponse) -> MetricCreateSchema: + cur = time.time() + timing = cur - prev + if fetcher.type_ != FetcherType.PING: + return MetricCreateSchema( + name=fetcher.address, + ok=True if res and (200 <= res.status <= 300) else False, + time_delta=timing, + ) + return MetricCreateSchema( + name=fetcher.address, + ok=res is not False and res is not None, + time_delta=timing, + ) + + def _reschedule_job(self, fetcher: Fetcher, ok: bool): + self.scheduler.reschedule_job( + f"{fetcher.address} {fetcher.create_ts}", + seconds=fetcher.delay_ok if ok else fetcher.delay_fail, + trigger="interval", + ) + + async def _process_fail( + self, fetcher: Fetcher, metric: MetricCreateSchema, res: aiohttp.ClientResponse | None | float + ) -> None: + if fetcher.type_ != FetcherType.PING: + alert = AlertCreateSchema(data=metric.model_dump(), filter="500" if res is None else str(res.status)) + else: + _filter = "Service Unavailable" if res is False else "Timeout Error" if res is None else "Unknown Error" + alert = AlertCreateSchema(data=metric.model_dump(), filter=_filter) + await self.write_alert(alert) + self._reschedule_job(fetcher, False) + + def add_metric(self, metric: MetricCreateSchema): + session = dbsession() + metric = Metric(**metric.model_dump(exclude_none=True)) + session.add(metric) + session.commit() + session.flush() + return metric + + async def _fetch_it(self, fetcher: Fetcher): + prev = time.time() + res = None + try: + match fetcher.type_: + case FetcherType.GET: + async with aiohttp.ClientSession() as session: + async with session.get(url=fetcher.address) as res: + pass + case FetcherType.POST: + async with aiohttp.ClientSession() as session: + async with session.post(url=fetcher.address, data=fetcher.fetch_data) as res: + pass + case FetcherType.PING: + res = await ping(fetcher.address) + except Exception: + metric = ApSchedulerService.create_metric(prev, fetcher, res) + self.add_metric(metric) + await self._process_fail(fetcher, metric, None) + else: + metric = ApSchedulerService.create_metric(prev, fetcher, res) + self.add_metric(metric) + if not metric.ok: + await self._process_fail(fetcher, metric, res) + else: + self._reschedule_job(fetcher, True) diff --git a/pinger_backend/service/session.py b/pinger_backend/service/session.py new file mode 100644 index 0000000..29b32b3 --- /dev/null +++ b/pinger_backend/service/session.py @@ -0,0 +1,15 @@ +from sqlalchemy import create_engine +from sqlalchemy.orm import Session, declarative_base, sessionmaker + +from settings import get_settings as db_settings + + +def dbsession() -> Session: + settings = db_settings() + engine = create_engine(str(settings.DB_DSN), execution_options={"isolation_level": "AUTOCOMMIT"}) + session = sessionmaker(bind=engine) + Base = declarative_base() + localsession = session() + Base.metadata.create_all(engine) + + return localsession diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..bf2a06d --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,23 @@ +[tool.black] +line-length = 120 +target-version = ['py311'] +skip-string-normalization = true + +[tool.isort] +line_length = 120 +multi_line_output = 3 +profile = "black" +lines_after_imports = 2 +include_trailing_comma = true + +[tool.pytest.ini_options] +minversion = "7.0" +python_files = "*.py" +testpaths = [ + "tests" +] +pythonpath = [ + "." +] +log_cli=true +log_level=0 diff --git a/requirements.dev.txt b/requirements.dev.txt index e8134f9..a26c4e6 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -1,4 +1,5 @@ pytest pytest-cov pytest-asyncio +pytest_mock httpx \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 92d9a3f..f147dce 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,16 @@ fastapi sqlalchemy pydantic +pydantic-settings uvicorn alembic python-dotenv fastapi-sqlalchemy psycopg2-binary gunicorn -starlette -aioschedule pytest -python-jose python-multipart -passlib -bcrypt \ No newline at end of file +APScheduler +ping3 +auth-lib-profcomff +requests \ No newline at end of file diff --git a/settings.py b/settings.py new file mode 100644 index 0000000..dd49b95 --- /dev/null +++ b/settings.py @@ -0,0 +1,16 @@ +from functools import lru_cache + +from pydantic import ConfigDict, HttpUrl, PostgresDsn +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + DB_DSN: PostgresDsn + BACKEND_URL: HttpUrl = "http://127.0.0.1:8000" + FETCHERS_UPDATE_DELAY_IN_SECONDS: int = 10 + model_config = ConfigDict(case_sensitive=True, env_file=".env", extra="ignore") + + +@lru_cache() +def get_settings(): + return Settings() diff --git a/tests/backend/api/conftest.py b/tests/backend/api/conftest.py deleted file mode 100644 index 36842e0..0000000 --- a/tests/backend/api/conftest.py +++ /dev/null @@ -1,27 +0,0 @@ -import pytest -import json - - -@pytest.fixture -def auth_user(client): - body = {"username": "test", "password": "test"} - res = client.post("/auth/register", data=json.dumps(body)) - assert res.status_code == 201 - return body - - -@pytest.fixture -def auth_header(client, auth_user): - beaver = client.post( - f"/auth/token", - data={ - "username": auth_user["username"], - "password": auth_user["password"], - "grant_type": "password", - }, - headers={"content-type": "application/x-www-form-urlencoded"}, - ) - assert beaver.status_code == 200 - auth_data = json.loads(beaver.content) - auth_headers = {"Authorization": f"Bearer {auth_data.get('access_token')}"} - return auth_headers diff --git a/tests/backend/api/test_alert.py b/tests/backend/api/test_alert.py index ebb5c18..2474fec 100644 --- a/tests/backend/api/test_alert.py +++ b/tests/backend/api/test_alert.py @@ -1,150 +1,147 @@ import json -import pytest + +import pytest_asyncio from starlette import status -from aciniformes_backend.settings import get_settings -from aciniformes_backend.serivce import alert_service, receiver_service, Config + +from aciniformes_backend.serivce.alert import PgAlertService +from aciniformes_backend.serivce.receiver import PgReceiverService +from settings import get_settings -def test_fake_service(fake_config): - s1 = alert_service() - s2 = receiver_service() - assert s1.session is None - assert s2.session is None - assert type(s1.repository) is dict - assert type(s2.repository) is dict +alert = { + "data": {"type": "string", "name": "string"}, + "filter": "string", +} -@pytest.fixture -def this_alert(): - body = { - "id": 666, - "data": {"type": "string", "name": "string"}, - "receiver": 0, - "filter": "string", - } - alert_service().repository[666] = body - return body +@pytest_asyncio.fixture +async def this_alert(dbsession): + global alert + _alert = await PgAlertService(dbsession).create(item=alert) + yield _alert class TestAlert: _url = "/alert" settings = get_settings() - Config.fake = True - s = alert_service() - def test_post_success(self, client): + def test_post_success(self, crud_client): body = { "data": {"type": "string", "name": "string"}, - "receiver": 0, "filter": "string", } - res = client.post(self._url, data=json.dumps(body)) + res = crud_client.post(self._url, json=body) res_body = res.json() assert res.status_code == status.HTTP_200_OK assert res_body["data"] == body["data"] - assert res_body["id"] is not None assert res_body["filter"] == body["filter"] - assert res_body["receiver"] == body["receiver"] - def test_get_by_id_success(self, client, this_alert): - body = this_alert - res = client.get(f"{self._url}/{body['id']}") + def test_get_by_id_success(self, crud_client, this_alert): + res = crud_client.get(f"{self._url}/{this_alert}") assert res.status_code == status.HTTP_200_OK res_body = res.json() - assert res_body["data"] == body["data"] - assert res_body["receiver"] == body["receiver"] - assert res_body["filter"] == body["filter"] + assert res_body["data"] == alert["data"] + assert res_body["filter"] == alert["filter"] - def test_delete_by_id_success(self, client, this_alert): - res = client.delete(f"{self._url}/{this_alert['id']}") + def test_delete_by_id_success(self, crud_client, this_alert): + res = crud_client.delete(f"{self._url}/{this_alert}") assert res.status_code == status.HTTP_200_OK - assert self.s.repository[666] is None + get = crud_client.get(f"{self._url}/{this_alert}") + assert get.status_code == status.HTTP_404_NOT_FOUND - def test_get_success(self, client, this_alert): - res = client.get(self._url) + def test_get_success(self, crud_client, this_alert): + res = crud_client.get(self._url) assert res.status_code == status.HTTP_200_OK res_body = res.json() assert len(res_body) + get = crud_client.get(f"{self._url}/{this_alert}") + assert get.json() in res_body - def test_patch_by_id_success(self, client, this_alert): + def test_patch_by_id_success(self, crud_client, this_alert): body = { - "data": {"type": "g", "name": "s"}, + "data": {"type": "string", "name": "string"}, + "filter": "string", } - res = client.patch(f"{self._url}/{this_alert['id']}", data=json.dumps(body)) + res = crud_client.patch(f"{self._url}/{this_alert}", data=json.dumps(body)) assert res.status_code == status.HTTP_200_OK res_body = res.json() assert res_body["data"] == body["data"] - assert self.s.repository[this_alert["id"]].data == body["data"] + get = crud_client.get(f"{self._url}/{this_alert}") + assert get.status_code == status.HTTP_200_OK + assert get.json() == res_body - def test_get_by_id_not_found(self, client, this_alert): - res = client.get(f"{self._url}/{888}") + def test_get_by_id_not_found(self, crud_client, this_alert): + res = crud_client.get(f"{self._url}/{this_alert+2}") assert res.status_code == status.HTTP_404_NOT_FOUND - def test_patch_by_id_not_found(self, client, this_alert): + def test_patch_by_id_not_found(self, crud_client, this_alert): body = { - "data": {}, + "data": {"type": "string", "name": "string"}, + "filter": "string", } - res = client.patch(f"{self._url}/{888}", data=json.dumps(body)) + res = crud_client.patch(f"{self._url}/{888}", data=json.dumps(body)) assert res.status_code == status.HTTP_404_NOT_FOUND -@pytest.fixture -def this_receiver(): - body = {"id": 66, "name": "string", "chat_id": 0} - receiver_service().repository[body["id"]] = body - return body +reciever = {"url": "https://google.com", "method": "post", "receiver_body": {}} + + +@pytest_asyncio.fixture +async def this_receiver(dbsession): + global reciever + _reciever = await PgReceiverService(dbsession).create(item=reciever) + yield _reciever class TestReceiver: _url = "/receiver" - Config.fake = True - s = receiver_service() - def test_post_success(self, client, auth_header): - body = {"name": "string", "chat_id": 0} - res = client.post(self._url, data=json.dumps(body), headers=auth_header) + def test_post_success(self, crud_client): + body = {"url": "https://google.com", "method": "post", "receiver_body": {}} + res = crud_client.post(self._url, json=body) assert res.status_code == status.HTTP_200_OK res_body = res.json() - assert res_body["name"] == body["name"] - assert res_body["id"] is not None - assert res_body["chat_id"] == body["chat_id"] + assert res_body["url"] == body["url"] + assert res_body["receiver_body"] == body["receiver_body"] - def test_get_by_id_success(self, client, this_receiver): - res = client.get(f"{self._url}/{this_receiver['id']}") + def test_get_by_id_success(self, crud_client, this_receiver): + res = crud_client.get(f"{self._url}/{this_receiver}") assert res.status_code == status.HTTP_200_OK res_body = res.json() - assert res_body["name"] == this_receiver["name"] - assert res_body["chat_id"] == this_receiver["chat_id"] + assert res_body["url"] == reciever["url"] + assert res_body["receiver_body"] == reciever["receiver_body"] - def test_delete_by_id_success(self, client, this_receiver, auth_header): - res = client.delete(f"{self._url}/{this_receiver['id']}", headers=auth_header) + def test_delete_by_id_success(self, crud_client, this_receiver): + res = crud_client.delete(f"{self._url}/{this_receiver}") assert res.status_code == status.HTTP_200_OK - assert self.s.repository[this_receiver["id"]] is None + res = crud_client.get(f"{self._url}/{this_receiver}") + assert res.status_code == status.HTTP_404_NOT_FOUND - def test_get_success(self, client, this_receiver): - res = client.get(self._url) + def test_get_success(self, crud_client, this_receiver): + res = crud_client.get(self._url) assert res.status_code == status.HTTP_200_OK assert len(res.json()) + get = crud_client.get(f"{self._url}/{this_receiver}") + assert get.json() in res.json() - def test_patch_by_id_success(self, client, this_receiver, auth_header): - body = {"name": "s", "chat_id": 11} - res = client.patch( - f"{self._url}/{this_receiver['id']}", + def test_patch_by_id_success(self, crud_client, this_receiver): + body = {"url": "https://google.ru", "method": "post", "receiver_body": {}} + res = crud_client.patch( + f"{self._url}/{this_receiver}", data=json.dumps(body), - headers=auth_header, ) assert res.status_code == status.HTTP_200_OK res_body = res.json() - assert res_body["name"] == body["name"] - assert res_body["chat_id"] == body["chat_id"] + assert res_body["url"] == body["url"] + assert res_body["receiver_body"] == body["receiver_body"] + get = crud_client.get(f"{self._url}/{this_receiver}") + assert get.json() == res.json() - def test_get_by_id_not_found(self, client): - res = client.get(f"{self._url}/{888}") + def test_get_by_id_not_found(self, crud_client): + res = crud_client.get(f"{self._url}/{888}") assert res.status_code == status.HTTP_404_NOT_FOUND - def test_patch_by_id_not_found(self, client, auth_header): - body = {"name": "st", "chat_id": 0} - res = client.patch( - f"{self._url}/{888}", data=json.dumps(body), headers=auth_header - ) + def test_patch_by_id_not_found(self, crud_client): + body = {"url": "https://nf.nf", "method": "post", "receiver_body": {}} + res = crud_client.patch(f"{self._url}/{888}", data=json.dumps(body)) assert res.status_code == status.HTTP_404_NOT_FOUND diff --git a/tests/backend/api/test_auth.py b/tests/backend/api/test_auth.py deleted file mode 100644 index 606c4a6..0000000 --- a/tests/backend/api/test_auth.py +++ /dev/null @@ -1,31 +0,0 @@ -import json - -import pytest -import json - -import sqlalchemy -from aciniformes_backend.models import Auth -from aciniformes_backend.serivce import auth_service -from starlette import status - - -def test_auth_service(fake_config): - s = auth_service() - assert s.session is None - assert type(s.repository) is list - - -@pytest.fixture -def registered_user(client): - body = {"username": "test", "password": "test"} - res = client.post("/auth/register", data=json.dumps(body)) - assert res.status_code == status.HTTP_201_CREATED - - -class TestAuth: - _url = "/auth" - - def test_create_user(self, dbsession, client): - body = {"username": "test", "password": "test"} - res = client.post("/auth/register", data=json.dumps(body)) - assert res.status_code == status.HTTP_201_CREATED diff --git a/tests/backend/api/test_fetcher.py b/tests/backend/api/test_fetcher.py index 24cad30..0dd6947 100644 --- a/tests/backend/api/test_fetcher.py +++ b/tests/backend/api/test_fetcher.py @@ -1,87 +1,95 @@ import json -import pytest +from copy import deepcopy + +import pytest_asyncio from starlette import status -from aciniformes_backend.serivce import fetcher_service, Config + +from aciniformes_backend.serivce.fetcher import PgFetcherService -def test_fake_service(fake_config): - s = fetcher_service() - assert s.session is None - assert type(s.repository) is dict +fetcher = { + "type_": "ping", + "address": "https://www.python.org", + "fetch_data": "string", + "delay_ok": 30, + "delay_fail": 40, +} -@pytest.fixture -def this_fetcher(): - body = { - "id": 6, - "name": "string", - "type_": "get_ok", - "address": "string", - "fetch_data": "string", - "metrics": {}, - "metric_name": "string", - "delay_ok": 0, - "delay_fail": 0, - } - fetcher_service().repository[body["id"]] = body - return body +@pytest_asyncio.fixture +async def this_fetcher(dbsession): + yield await PgFetcherService(dbsession).create(item=fetcher) class TestFetcher: _url = "/fetcher" - Config.fake = True - s = fetcher_service() - def test_post_success(self, client, auth_header): + def test_post_success(self, crud_client): body = { - "name": "string", - "type_": "get_ok", - "address": "string", + "type_": "get", + "address": "https://google.com", "fetch_data": "string", - "metrics": {}, - "metric_name": "string", - "delay_ok": 0, - "delay_fail": 0, + "delay_ok": 300, + "delay_fail": 30, } - res = client.post(self._url, data=json.dumps(body), headers=auth_header) + res = crud_client.post(self._url, json=body) assert res.status_code == status.HTTP_200_OK res_body = res.json() assert res_body["id"] is not None - def test_get_by_id_success(self, client, this_fetcher): - res = client.get(f"{self._url}/{this_fetcher['id']}") + def test_get_by_id_success(self, crud_client, this_fetcher): + res = crud_client.get(f"{self._url}/{this_fetcher}") assert res.status_code == status.HTTP_200_OK - assert res.json()["address"] == this_fetcher["address"] + _new_fetcher = deepcopy(fetcher) + for k, v in _new_fetcher.items(): + assert v == res.json()[k] - def test_delete_by_id_success(self, client, this_fetcher, auth_header): - res = client.delete(f"{self._url}/{this_fetcher['id']}", headers=auth_header) + def test_delete_by_id_success(self, crud_client, this_fetcher): + res = crud_client.delete(f"{self._url}/{this_fetcher}") assert res.status_code == status.HTTP_200_OK - assert self.s.repository[this_fetcher["id"]] is None + res = crud_client.get(f"{self._url}/{this_fetcher}") + assert res.status_code == status.HTTP_404_NOT_FOUND - def test_get_success(self, client, this_fetcher): - res = client.get(self._url) + def test_get_success(self, crud_client, this_fetcher): + res = crud_client.get(self._url) assert res.status_code == status.HTTP_200_OK assert len(res.json()) + get = crud_client.get(f"{self._url}/{this_fetcher}") + assert get.status_code == status.HTTP_200_OK + assert get.json() in res.json() - def test_patch_by_id_success(self, client, this_fetcher, auth_header): - body = {"name": "string", "type_": "post_ok", "delay_fail": 0} - res = client.patch( - f"{self._url}/{this_fetcher['id']}", - data=json.dumps(body), - headers=auth_header, + def test_patch_by_id_success(self, crud_client, this_fetcher): + body = { + "type_": "post", + "address": "https://api.test.profcomff.com/services/category", + "fetch_data": "string", + "delay_ok": 300, + "delay_fail": 30, + } + res = crud_client.patch( + f"{self._url}/{this_fetcher}", + json=body, ) assert res.status_code == status.HTTP_200_OK res_body = res.json() - assert res_body["name"] == body["name"] + assert res_body["address"] == body["address"] assert res_body["type_"] == body["type_"] + res = crud_client.get(f"{self._url}/{this_fetcher}") + assert res.status_code == status.HTTP_200_OK + for k, v in body.items(): + assert v == res.json()[k] - def test_get_by_id_not_found(self, client): - res = client.get(f"{self._url}/{888}") + def test_get_by_id_not_found(self, crud_client): + res = crud_client.get(f"{self._url}/{888}") assert res.status_code == status.HTTP_404_NOT_FOUND - def test_patch_by_id_not_found(self, client, auth_header): - body = {"name": "s"} - res = client.patch( - f"{self._url}/{888}", data=json.dumps(body), headers=auth_header - ) + def test_patch_by_id_not_found(self, crud_client): + body = { + "type_": "post", + "address": "https://api.test.profcomff.com/services/category", + "fetch_data": "string", + "delay_ok": 300, + "delay_fail": 30, + } + res = crud_client.patch(f"{self._url}/{888}", data=json.dumps(body)) assert res.status_code == status.HTTP_404_NOT_FOUND diff --git a/tests/backend/api/test_metric.py b/tests/backend/api/test_metric.py index b74e0eb..b2312c8 100644 --- a/tests/backend/api/test_metric.py +++ b/tests/backend/api/test_metric.py @@ -1,39 +1,44 @@ -import json -import pytest +import pytest_asyncio from starlette import status -from aciniformes_backend.serivce import metric_service, Config +from aciniformes_backend.serivce.metric import PgMetricService -@pytest.fixture -def this_metric(): - body = {"id": 44, "metrics": {}} - metric_service().repository[body["id"]] = body - return body + +metric = {"name": "string", "ok": True, "time_delta": 0} + + +@pytest_asyncio.fixture +async def this_metric(dbsession): + yield await PgMetricService(dbsession).create(item=metric) class TestMetric: _url = "/metric" - Config.fake = True - s = metric_service() - def test_post_success(self, client): - body = {"metrics": {}} - res = client.post(self._url, data=json.dumps(body)) + def test_post_success(self, crud_client): + body = {"name": "string", "ok": True, "time_delta": 0} + res = crud_client.post(self._url, json=body) assert res.status_code == status.HTTP_200_OK res_body = res.json() assert res_body["id"] is not None - assert res_body["metrics"] == body["metrics"] + assert res_body["name"] == body["name"] + assert res_body["ok"] == body["ok"] + assert res_body["time_delta"] == body["time_delta"] - def test_get_by_id_success(self, client, this_metric): - res = client.get(f"{self._url}/{this_metric['id']}") + def test_get_by_id_success(self, crud_client, this_metric): + res = crud_client.get(f"{self._url}/{this_metric}") assert res.status_code == status.HTTP_200_OK - assert res.json()["metrics"] == this_metric["metrics"] + for k, v in metric.items(): + assert v == res.json()[k] - def test_get_success(self, client, this_metric): - res = client.get(self._url) + def test_get_success(self, crud_client, this_metric): + res = crud_client.get(self._url) assert res.status_code == status.HTTP_200_OK assert len(res.json()) + get = crud_client.get(f"{self._url}/{this_metric}") + assert res.status_code == status.HTTP_200_OK + assert get.json() in res.json() - def test_get_by_id_not_found(self, client): - res = client.get(f"{self._url}/{333}") + def test_get_by_id_not_found(self, crud_client, this_metric): + res = crud_client.get(f"{self._url}/{this_metric+2}") assert res.status_code == status.HTTP_404_NOT_FOUND diff --git a/tests/backend/service/conftest.py b/tests/backend/service/conftest.py deleted file mode 100644 index 2f0af92..0000000 --- a/tests/backend/service/conftest.py +++ /dev/null @@ -1,42 +0,0 @@ -import pytest -from aciniformes_backend.serivce import ( - alert_service, - fetcher_service, - receiver_service, - metric_service, - Config, -) - - -@pytest.fixture -def pg_config(): - Config.fake = False - yield Config() - - -@pytest.fixture -def pg_alert_service(pg_config): - s = alert_service() - assert s.session is not None - yield s - - -@pytest.fixture -def pg_fetcher_service(pg_config): - s = fetcher_service() - assert s.session is not None - yield s - - -@pytest.fixture -def pg_receiver_service(pg_config): - s = receiver_service() - assert s.session is not None - yield s - - -@pytest.fixture -def pg_metric_service(pg_config): - s = metric_service() - assert s.session is not None - yield s diff --git a/tests/backend/service/test_alert_serivce.py b/tests/backend/service/test_alert_serivce.py index 63bddd6..3a0e326 100644 --- a/tests/backend/service/test_alert_serivce.py +++ b/tests/backend/service/test_alert_serivce.py @@ -1,29 +1,24 @@ -import json import pytest import sqlalchemy -from aciniformes_backend.routes.alert.alert import CreateSchema as AlertCreateSchema -from aciniformes_backend.routes.alert.reciever import ( - CreateSchema as ReceiverCreateSchema, -) -from aciniformes_backend.models import Alert, Receiver import aciniformes_backend.serivce.exceptions as exc +from aciniformes_backend.models import Alert, Receiver +from aciniformes_backend.routes.alert.alert import CreateSchema as AlertCreateSchema +from aciniformes_backend.routes.alert.reciever import CreateSchema as ReceiverCreateSchema +from aciniformes_backend.serivce.alert import PgAlertService +from aciniformes_backend.serivce.receiver import PgReceiverService @pytest.fixture def receiver_schema(): - body = {"name": "string", "chat_id": 0} + body = {"url": "https://google.com", "method": "post", "receiver_body": {}} schema = ReceiverCreateSchema(**body) return schema @pytest.fixture def db_receiver(dbsession, receiver_schema): - q = ( - sqlalchemy.insert(Receiver) - .values(**receiver_schema.dict(exclude_unset=True)) - .returning(Receiver) - ) + q = sqlalchemy.insert(Receiver).values(**receiver_schema.model_dump(exclude_unset=True)).returning(Receiver) receiver = dbsession.execute(q).scalar() dbsession.flush() yield receiver @@ -33,10 +28,9 @@ def db_receiver(dbsession, receiver_schema): @pytest.fixture -def alert_schema(receiver_schema, db_receiver): +def alert_schema(receiver_schema): body = { "data": {"type": "string", "name": "string"}, - "receiver": db_receiver.id_, "filter": "string", } schema = AlertCreateSchema(**body) @@ -45,11 +39,7 @@ def alert_schema(receiver_schema, db_receiver): @pytest.fixture def db_alert(db_receiver, dbsession, alert_schema): - q = ( - sqlalchemy.insert(Alert) - .values(**alert_schema.dict(exclude_unset=True)) - .returning(Alert) - ) + q = sqlalchemy.insert(Alert).values(**alert_schema.model_dump(exclude_unset=True)).returning(Alert) alert = dbsession.execute(q).scalar() dbsession.flush() yield alert @@ -60,71 +50,70 @@ def db_alert(db_receiver, dbsession, alert_schema): class TestReceiverService: @pytest.mark.asyncio - async def test_create(self, pg_receiver_service, receiver_schema, dbsession): - res = await pg_receiver_service.create(receiver_schema.dict()) + async def test_create(self, receiver_schema, dbsession): + res = await PgReceiverService(dbsession).create(item=receiver_schema.model_dump()) assert res is not None assert type(res) == int q = dbsession.query(Receiver).filter(Receiver.id_ == res).one_or_none() assert q is not None @pytest.mark.asyncio - async def test_get_all(self, pg_receiver_service, db_receiver, db_alert): - res = await pg_receiver_service.get_all() + async def test_get_all(self, db_receiver, dbsession): + res = await PgReceiverService(dbsession).get_all() assert len(res) assert type(res) is list assert type(res[0]) is Receiver @pytest.mark.asyncio - async def test_get_by_id(self, pg_receiver_service, db_receiver): - res = await pg_receiver_service.get_by_id(db_receiver.id_) + async def test_get_by_id(self, db_receiver, dbsession): + res = await PgReceiverService(dbsession).get_by_id(db_receiver.id_) assert res is not None - assert res.name == db_receiver.name + assert res.url == db_receiver.url with pytest.raises(exc.ObjectNotFound): - await pg_receiver_service.get_by_id(db_receiver.id_ + 1000) + await PgReceiverService(dbsession).get_by_id(db_receiver.id_ + 1000) @pytest.mark.asyncio - async def test_delete(self, pg_receiver_service, db_receiver): - await pg_receiver_service.delete(db_receiver.id_) + async def test_delete(self, db_receiver, dbsession): + await PgReceiverService(dbsession).delete(db_receiver.id_) @pytest.mark.asyncio - async def test_update(self, pg_receiver_service, db_receiver, dbsession): - res = await pg_receiver_service.update( - db_receiver.id_, {"name": "Alex", "chat_id": 11} + async def test_update(self, db_receiver, dbsession): + res = await PgReceiverService(dbsession).update( + db_receiver.id_, {"url": "Alex", "method": "post", "receiver_body": {}} ) - assert res.name == "Alex" - assert res.chat_id == 11 + assert res.url == "Alex" + assert res.receiver_body == {} class TestAlertService: @pytest.mark.asyncio - async def test_create(self, pg_alert_service, alert_schema, db_receiver): - res = await pg_alert_service.create( - alert_schema.dict(exclude_unset=True), + async def test_create(self, alert_schema, db_receiver, dbsession): + res = await PgAlertService(dbsession).create( + alert_schema.model_dump(exclude_unset=True), ) assert type(res) == int @pytest.mark.asyncio - async def test_get_all(self, pg_alert_service, db_alert): - res = await pg_alert_service.get_all() + async def test_get_all(self, db_alert, dbsession): + res = await PgAlertService(dbsession).get_all() assert len(res) assert type(res) is list assert type(res[0]) is Alert @pytest.mark.asyncio - async def test_get_by_id(self, pg_alert_service, db_alert): - res = await pg_alert_service.get_by_id(db_alert.id_) + async def test_get_by_id(self, dbsession, db_alert): + res = await PgAlertService(dbsession).get_by_id(db_alert.id_) assert res is not None - assert res.receiver == db_alert.receiver + assert res.data == db_alert.data + assert res.filter == db_alert.filter with pytest.raises(exc.ObjectNotFound): - await pg_alert_service.get_by_id(db_alert.id_ + 1000) + await PgAlertService(dbsession).get_by_id(db_alert.id_ + 1000) @pytest.mark.asyncio - async def test_delete(self, pg_alert_service, db_alert): - await pg_alert_service.delete(db_alert.id_) + async def test_delete(self, dbsession, db_alert): + await PgAlertService(dbsession).delete(db_alert.id_) @pytest.mark.asyncio - async def test_update(self, pg_alert_service, db_alert): - res = await pg_alert_service.update( - db_alert.id_, {"data": {"type": "stig", "name": "stig"}} - ) + async def test_update(self, dbsession, db_alert): + res = await PgAlertService(dbsession).update(db_alert.id_, {"data": {"type": "stig", "name": "stig"}}) assert res.data == {"type": "stig", "name": "stig"} diff --git a/tests/backend/service/test_fetcher_service.py b/tests/backend/service/test_fetcher_service.py index 86e7806..e54a210 100644 --- a/tests/backend/service/test_fetcher_service.py +++ b/tests/backend/service/test_fetcher_service.py @@ -1,23 +1,20 @@ import pytest import sqlalchemy -from aciniformes_backend.routes.fetcher import CreateSchema as FetcherCreateSchema from aciniformes_backend.models import Fetcher -import aciniformes_backend.serivce.exceptions as exc +from aciniformes_backend.routes.fetcher import CreateSchema as FetcherCreateSchema +from aciniformes_backend.serivce.fetcher import PgFetcherService @pytest.fixture def fetcher_schema(): body = { "id": 6, - "name": "string", - "type_": "get_ok", - "address": "string", + "type_": "ping", + "address": "https://www.python.org", "fetch_data": "string", - "metrics": {}, - "metric_name": "string", - "delay_ok": 0, - "delay_fail": 0, + "delay_ok": 30, + "delay_fail": 40, } schema = FetcherCreateSchema(**body) return schema @@ -25,11 +22,7 @@ def fetcher_schema(): @pytest.fixture() def db_fetcher(dbsession, fetcher_schema): - q = ( - sqlalchemy.insert(Fetcher) - .values(**fetcher_schema.dict(exclude_unset=True)) - .returning(Fetcher) - ) + q = sqlalchemy.insert(Fetcher).values(**fetcher_schema.dict(exclude_unset=True)).returning(Fetcher) fetcher = dbsession.scalar(q) dbsession.flush() yield fetcher @@ -40,30 +33,30 @@ def db_fetcher(dbsession, fetcher_schema): class TestFetcherService: @pytest.mark.asyncio - async def test_create(self, pg_fetcher_service, fetcher_schema, dbsession): - res = await pg_fetcher_service.create(fetcher_schema.dict(exclude_unset=True)) + async def test_create(self, dbsession, fetcher_schema): + res = await PgFetcherService(dbsession).create(fetcher_schema.model_dump(exclude_unset=True)) assert res is not None assert type(res) is int q = dbsession.scalar(sqlalchemy.select(Fetcher).where(Fetcher.id_ == res)) assert q is not None @pytest.mark.asyncio - async def test_get_all(self, pg_fetcher_service, db_fetcher): - res = await pg_fetcher_service.get_all() + async def test_get_all(self, dbsession, db_fetcher): + res = await PgFetcherService(dbsession).get_all() assert type(res) is list assert type(res[0]) is Fetcher @pytest.mark.asyncio - async def test_get_by_id(self, pg_fetcher_service, db_fetcher): - res = await pg_fetcher_service.get_by_id(db_fetcher.id_) - assert res.name == db_fetcher.name + async def test_get_by_id(self, dbsession, db_fetcher): + res = await PgFetcherService(dbsession).get_by_id(db_fetcher.id_) + assert res.address == db_fetcher.address assert res.type_ == db_fetcher.type_ @pytest.mark.asyncio - async def test_delete(self, pg_fetcher_service, db_fetcher): - await pg_fetcher_service.delete(db_fetcher.id_) + async def test_delete(self, dbsession, db_fetcher): + await PgFetcherService(dbsession).delete(db_fetcher.id_) @pytest.mark.asyncio - async def test_update(self, pg_fetcher_service, db_fetcher): - res = await pg_fetcher_service.update(db_fetcher.id_, {"type_": "post_ok"}) - assert res.type_ == "post_ok" + async def test_update(self, dbsession, db_fetcher): + res = await PgFetcherService(dbsession).update(db_fetcher.id_, {"type_": "post"}) + assert res.type_ == "post" diff --git a/tests/backend/service/test_metric_service.py b/tests/backend/service/test_metric_service.py index cea243d..3969dbe 100644 --- a/tests/backend/service/test_metric_service.py +++ b/tests/backend/service/test_metric_service.py @@ -1,25 +1,21 @@ import pytest import sqlalchemy -from aciniformes_backend.routes.mectric import CreateSchema as MetricCreateSchema from aciniformes_backend.models import Metric -import aciniformes_backend.serivce.exceptions as exc +from aciniformes_backend.routes.mectric import CreateSchema as MetricCreateSchema +from aciniformes_backend.serivce.metric import PgMetricService @pytest.fixture def metric_schema(): - body = {"id": 44, "metrics": {}} + body = {"id": 44, "name": "string", "ok": True, "time_delta": 0} schema = MetricCreateSchema(**body) return schema @pytest.fixture() def db_metric(dbsession, metric_schema): - q = ( - sqlalchemy.insert(Metric) - .values(**metric_schema.dict(exclude_unset=True)) - .returning(Metric) - ) + q = sqlalchemy.insert(Metric).values(**metric_schema.dict(exclude_unset=True)).returning(Metric) metric = dbsession.scalar(q) dbsession.flush() yield metric @@ -30,20 +26,21 @@ def db_metric(dbsession, metric_schema): class TestMetricService: @pytest.mark.asyncio - async def test_create(self, pg_metric_service, metric_schema, dbsession): - res = await pg_metric_service.create(metric_schema.dict(exclude_unset=True)) + async def test_create(self, metric_schema, dbsession): + res = await PgMetricService(dbsession).create(metric_schema.model_dump(exclude_unset=True)) assert res is not None assert type(res) is int q = dbsession.scalar(sqlalchemy.select(Metric).where(Metric.id_ == res)) assert q is not None @pytest.mark.asyncio - async def test_get_all(self, pg_metric_service): - res = await pg_metric_service.get_all() + async def test_get_all(self, dbsession): + res = await PgMetricService(dbsession).get_all() assert type(res) is list assert type(res[0]) is Metric @pytest.mark.asyncio - async def test_get_by_id(self, pg_metric_service, db_metric): - res = await pg_metric_service.get_by_id(db_metric.id_) - assert res.metrics == db_metric.metrics + async def test_get_by_id(self, dbsession, db_metric): + res = await PgMetricService(dbsession).get_by_id(db_metric.id_) + assert res.name == db_metric.name + assert res.ok == db_metric.ok diff --git a/tests/bot/__init__.py b/tests/bot/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/backend/conftest.py b/tests/conftest.py similarity index 66% rename from tests/backend/conftest.py rename to tests/conftest.py index 36acf56..99a6d25 100644 --- a/tests/backend/conftest.py +++ b/tests/conftest.py @@ -1,25 +1,25 @@ import pytest -from sqlalchemy.orm import Session +from fastapi.testclient import TestClient from sqlalchemy import create_engine -from aciniformes_backend.settings import get_settings +from sqlalchemy.orm import Session + from aciniformes_backend.models.base import BaseModel from aciniformes_backend.routes.base import app -from aciniformes_backend.serivce import Config -from fastapi.testclient import TestClient +from settings import get_settings @pytest.fixture(scope="session") def engine(): - return create_engine( - get_settings().DB_DSN, execution_options={"isolation_level": "AUTOCOMMIT"} - ) + return create_engine(str(get_settings().DB_DSN), execution_options={"isolation_level": "AUTOCOMMIT"}) @pytest.fixture(scope="session") def tables(engine): BaseModel.metadata.create_all(engine) yield + # truncate all tables BaseModel.metadata.drop_all(engine) + BaseModel.metadata.create_all(engine) @pytest.fixture(scope="session") @@ -31,14 +31,9 @@ def dbsession(engine, tables): connection.close() -@pytest.fixture(scope="session") -def fake_config(): - Config.fake = True - conf = Config() - yield conf - - @pytest.fixture -def client(fake_config): +def crud_client(): client = TestClient(app) + settings = get_settings() + settings.BACKEND_URL = "http://testserver" return client diff --git a/ping/service/__init__.py b/tests/ping_service/service/__init__.py similarity index 100% rename from ping/service/__init__.py rename to tests/ping_service/service/__init__.py diff --git a/tests/ping_service/service/conftest.py b/tests/ping_service/service/conftest.py new file mode 100644 index 0000000..27364f8 --- /dev/null +++ b/tests/ping_service/service/conftest.py @@ -0,0 +1,11 @@ +import pytest + +from pinger_backend.service.scheduler import ApSchedulerService + + +@pytest.fixture +def pg_scheduler_service(): + s = ApSchedulerService() + s.backend_url = "http://testserver" + assert s.scheduler is not dict + yield s diff --git a/tests/ping_service/service/test_scheduler.py b/tests/ping_service/service/test_scheduler.py new file mode 100644 index 0000000..5f9b7ce --- /dev/null +++ b/tests/ping_service/service/test_scheduler.py @@ -0,0 +1,65 @@ +import pytest + +from aciniformes_backend.models import Fetcher, Metric +from pinger_backend.exceptions import AlreadyRunning + + +@pytest.fixture() +def fetcher_obj(): + yield Fetcher( + **{ + "type_": "ping", + "address": "https://www.google.com", + "fetch_data": "string", + "delay_ok": 30, + "delay_fail": 40, + } + ) + + +class TestSchedulerService: + @pytest.mark.asyncio + async def test_add_fetcher_success(self, pg_scheduler_service, fetcher_obj): + pg_scheduler_service.add_fetcher(fetcher_obj) + fetchers = pg_scheduler_service.get_jobs() + assert f'{fetcher_obj.address} None' in fetchers + + @pytest.mark.asyncio + async def test_delete_fetcher(self, pg_scheduler_service, fetcher_obj): + pg_scheduler_service.add_fetcher(fetcher_obj) + fetchers = pg_scheduler_service.get_jobs() + assert f"{fetcher_obj.address} {fetcher_obj.create_ts}" in fetchers + + pg_scheduler_service.delete_fetcher(fetcher_obj) + fetchers = pg_scheduler_service.get_jobs() + assert fetcher_obj not in fetchers + + @pytest.mark.asyncio + async def test_get_jobs(self, pg_scheduler_service): + res = pg_scheduler_service.get_jobs() + assert type(res) is list + + @pytest.mark.asyncio + async def test_start_already_started(self, pg_scheduler_service, crud_client): + await pg_scheduler_service.start() + with pytest.raises(AlreadyRunning): + await pg_scheduler_service.start() + pg_scheduler_service.stop() + + @pytest.mark.asyncio + async def test_ping_fail(self, pg_scheduler_service, fetcher_obj, dbsession): + fetcher = Fetcher( + **{ + "type_": "ping", + "address": "fasdlj", + "fetch_data": "string", + "delay_ok": 30, + "delay_fail": 40, + } + ) + pg_scheduler_service.add_fetcher(fetcher) + pg_scheduler_service._fetch_it(fetcher) + metrics = dbsession.query(Metric).all() + for metric in metrics: + if metric.name == fetcher.address: + assert not metric['ok']