Skip to content

Commit

Permalink
Add an API endpoint to reload the last-used model
Browse files Browse the repository at this point in the history
  • Loading branch information
anon-contributor-0 committed Jul 15, 2024
1 parent 0315122 commit 40410cf
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 2 deletions.
17 changes: 16 additions & 1 deletion extensions/openai/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from extensions.openai.utils import _start_cloudflared
from modules import shared
from modules.logging_colors import logger
from modules.models import unload_model
from modules.models import unload_model, load_last_model
from modules.text_generation import stop_everything_event

from .typing import (
Expand Down Expand Up @@ -325,6 +325,21 @@ async def handle_load_model(request_data: LoadModelRequest):
return HTTPException(status_code=400, detail="Failed to load the model.")


@app.post("/v1/internal/model/loadlast", dependencies=check_admin_key)
async def handle_load_last_model():
'''
This endpoint is experimental and may change in the future.
Loads the last model used before it was unloaded.
'''
try:
load_last_model()
return JSONResponse(content="OK")
except:
traceback.print_exc()
return HTTPException(status_code=400, detail="Failed to load the last-used model.")


@app.post("/v1/internal/model/unload", dependencies=check_admin_key)
async def handle_unload_model():
unload_model()
Expand Down
6 changes: 5 additions & 1 deletion modules/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,9 +396,13 @@ def unload_model():
clear_torch_cache()


def load_last_model():
shared.model, shared.tokenizer = load_model(shared.previous_model_name)


def reload_model():
unload_model()
shared.model, shared.tokenizer = load_model(shared.model_name)
shared.model, shared.tokenizer = load_model(shared.previous_model_name)


def unload_model_if_idle():
Expand Down

0 comments on commit 40410cf

Please sign in to comment.