-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
- Loading branch information
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,16 +1,19 @@ | ||
import threading | ||
from contextlib import asynccontextmanager | ||
from typing import List, Optional | ||
|
||
import psycopg2 | ||
from fastapi import FastAPI | ||
from fastapi.middleware.cors import CORSMiddleware | ||
from sqlalchemy import select | ||
from sqlalchemy.orm import Session, subqueryload | ||
|
||
from .config import settings | ||
from .database import SessionLocal | ||
from .enum import FilterEnum | ||
from .dynamic_routes import create_dynamic_router | ||
from .models import Schema, AttributeDefinition, AttrType | ||
from .enums import FilterEnum | ||
from .general_routes import router | ||
from .models import Schema, AttributeDefinition, AttrType | ||
|
||
|
||
def load_schemas(db: Session) -> List[models.Schema]: | ||
|
@@ -40,14 +43,101 @@ def load_dynamic_routes(db: Session, app: FastAPI): | |
create_dynamic_router(schema=schema, app=app) | ||
|
||
|
||
""" | ||
POSTGRES LISTEN/NOTIFY | ||
SQL script to create triggers | ||
============================== | ||
-- Create a trigger function to handle insert and update events | ||
CREATE OR REPLACE FUNCTION notify_changes() | ||
RETURNS TRIGGER AS $$ | ||
BEGIN | ||
-- Notify listening clients about the change | ||
NOTIFY schema_changes; | ||
RETURN NEW; | ||
END; | ||
$$ LANGUAGE plpgsql; | ||
-- Create a trigger for insert events | ||
CREATE TRIGGER schema_insert_trigger | ||
AFTER INSERT ON schemas | ||
FOR EACH ROW | ||
EXECUTE FUNCTION notify_changes(); | ||
-- Create a trigger for update events | ||
CREATE TRIGGER schema_update_trigger | ||
AFTER UPDATE ON schemas | ||
FOR EACH ROW | ||
EXECUTE FUNCTION notify_changes(); | ||
============================== | ||
This comment has been minimized.
Sorry, something went wrong.
This comment has been minimized.
Sorry, something went wrong.
jonas-brr
Author
Contributor
|
||
""" | ||
|
||
|
||
class Watcher(threading.Thread): | ||
|
||
def __init__(self, app_reference): | ||
super().__init__() | ||
self.app_reference = app_reference | ||
|
||
def run(self): | ||
conn = psycopg2.connect( | ||
dbname=settings.pg_db, | ||
user=settings.pg_user, | ||
password=settings.pg_password, | ||
host=settings.pg_host, | ||
port=settings.pg_port, | ||
) | ||
|
||
cur = conn.cursor() | ||
channel = 'schema_changes' | ||
|
||
# Execute the LISTEN query | ||
listen_query = f"LISTEN {channel};" | ||
cur.execute(listen_query) | ||
conn.commit() | ||
|
||
print(f"Listening for notifications on channel '{channel}'...") | ||
try: | ||
while True: | ||
# Wait for notifications | ||
conn.poll() | ||
while conn.notifies: | ||
notify = conn.notifies.pop(0) | ||
# | ||
# On schema update/create, refresh the dynamic routes | ||
# and clear the openapi cache -> new one will be regenerated | ||
# during the next [GET /docs] | ||
# | ||
print(f"Received notification: {notify.payload}") | ||
with SessionLocal() as db: | ||
load_dynamic_routes(db=db, app=self.app_reference) | ||
This comment has been minimized.
Sorry, something went wrong.
der-gabe
Member
|
||
self.app_reference.openapi_schema = None | ||
finally: | ||
# Close cursor and connection | ||
cur.close() | ||
conn.close() | ||
|
||
|
||
@asynccontextmanager | ||
async def lifespan(app: FastAPI): | ||
# start the thread before the app startup -> can still be run as a single ASGI app. | ||
watcher_thread = Watcher(app) | ||
watcher_thread.start() | ||
yield | ||
|
||
|
||
def create_app(session: Optional[Session] = None) -> FastAPI: | ||
app = FastAPI(description=generate_api_description()) | ||
app = FastAPI(description=generate_api_description(), lifespan=lifespan) | ||
origins = ['*'] | ||
app.add_middleware(CORSMiddleware, | ||
app.add_middleware( | ||
CORSMiddleware, | ||
allow_origins=origins, | ||
allow_credentials=True, | ||
allow_methods=['*'], | ||
allow_headers=['*']) | ||
allow_headers=['*'] | ||
) | ||
|
||
if session: | ||
load_dynamic_routes(db=session, app=app) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,8 +1,10 @@ | ||
import logging | ||
from typing import Optional, Union | ||
from dataclasses import make_dataclass | ||
|
||
from fastapi import APIRouter, Depends, HTTPException, status, Query, Response | ||
from fastapi.applications import FastAPI | ||
from fastapi.openapi.utils import get_openapi | ||
from fastapi_pagination import Page, Params | ||
from sqlalchemy.exc import DataError | ||
from sqlalchemy.orm.session import Session | ||
|
@@ -11,7 +13,7 @@ | |
from .auth.enum import PermissionType | ||
from .auth.models import User | ||
from .database import get_db | ||
from .enum import FilterEnum, ModelVariant | ||
from .enums import FilterEnum, ModelVariant | ||
from .models import AttrType, Schema, Entity | ||
from .schemas.auth import RequirePermission | ||
from .schemas.entity import EntityModelFactory, EntityBaseSchema | ||
|
@@ -357,7 +359,7 @@ def create_dynamic_router(schema: Schema, app: FastAPI, old_slug: str = None): | |
route_update_entity(router=router, schema=schema) | ||
route_delete_entity(router=router, schema=schema) | ||
|
||
router_routes = [(r.path, r.methods) for r in router.routes] | ||
router_routes = [(f"/entity{r.path}", r.methods) for r in router.routes] | ||
This comment has been minimized.
Sorry, something went wrong.
der-gabe
Member
|
||
routes_to_remove = [] | ||
for route in app.routes: | ||
if (route.path, route.methods) in router_routes: | ||
|
@@ -368,4 +370,3 @@ def create_dynamic_router(schema: Schema, app: FastAPI, old_slug: str = None): | |
app.routes.remove(route) | ||
|
||
app.include_router(router, prefix='/entity') | ||
app.openapi_schema = None |
So sending the actual notification would be triggered by a custom function and some triggers on the Postgres side, rather than some Python code?
I guess that works, but if we go this route, then I think I'd like to have some code under version control that ensures that that function actually exists...
The way you wrote that SQL script it should be safe to run it on every startup, right?
Also, I think we're missing a third trigger for delete events...