Skip to content

Commit

Permalink
Merge pull request #76 from Kiln-AI/main
Browse files Browse the repository at this point in the history
Docs update
  • Loading branch information
scosman authored Dec 15, 2024
2 parents 5029250 + bd6c4cf commit 0a47a0a
Show file tree
Hide file tree
Showing 48 changed files with 2,526 additions and 412 deletions.
1 change: 1 addition & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
omit =
**/test_*.py
libs/core/kiln_ai/adapters/ml_model_list.py
conftest.py
1 change: 0 additions & 1 deletion .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ jobs:
- "3.10"
- "3.11"
- "3.12"
- "3.13"

steps:
- uses: actions/checkout@v4
Expand Down
19 changes: 16 additions & 3 deletions .github/workflows/build_desktop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,22 @@ jobs:
- name: Install the project
run: uv sync

# Compress MacOS app param ignored on Windows
- run: uv run bash ./app/desktop/build_desktop_app.sh --compress-mac-app
- uses: actions/upload-artifact@v4
# Compress MacOS app param ignored on other platforms
- name: Build Desktop App
run: uv run bash ./app/desktop/build_desktop_app.sh --compress-mac-app

- name: Build Windows Installer
if: matrix.os == 'windows-latest'
uses: Minionguyjpro/[email protected]
with:
path: ./app/desktop/WinInnoSetup.iss

- name: Copy Windows Installer
if: matrix.os == 'windows-latest'
run: cp ./app/desktop/Output/kilnsetup.exe ./app/desktop/build/dist/Kiln.Windows.Installer.exe

- name: Upload Build
uses: actions/upload-artifact@v4
with:
name: kiln-desktop-${{ runner.os }}-${{ runner.arch }}
path: ./app/desktop/build/dist/*
2 changes: 1 addition & 1 deletion .github/workflows/test_count.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
enable-cache: true

- name: Set up Python
run: uv python install
run: uv python install 3.12

- name: Install the project
run: uv sync --all-extras --dev
Expand Down
68 changes: 44 additions & 24 deletions README.md

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion app/desktop/WinInnoSetup.iss
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
; Script generated by the Inno Setup Script Wizard.
; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!

#define MyAppPath "APP_PATH_TEMPLATE"
#define MyAppPath "build\dist\Kiln"
#define MyAppName "Kiln"
#define MyAppVersion "0.5.6"
#define MyAppPublisher "Chesterfield Laboratories Inc"
Expand Down
16 changes: 15 additions & 1 deletion app/desktop/desktop_server.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
import contextlib
import threading
import time
from contextlib import asynccontextmanager

import kiln_server.server as kiln_server
import uvicorn
from fastapi import FastAPI
from kiln_ai.datamodel import set_strict_mode as set_strict_mode_datamodel
from kiln_ai.datamodel import strict_mode as strict_mode_datamodel

from app.desktop.studio_server.data_gen_api import connect_data_gen_api
from app.desktop.studio_server.finetune_api import connect_fine_tune_api
Expand All @@ -14,8 +18,18 @@
from app.desktop.studio_server.webhost import connect_webhost


@asynccontextmanager
async def lifespan(app: FastAPI):
# Set strict mode on startup
original_strict_mode = strict_mode_datamodel()
set_strict_mode_datamodel(True)
yield
# Reset strict mode on shutdown
set_strict_mode_datamodel(original_strict_mode)


def make_app():
app = kiln_server.make_app()
app = kiln_server.make_app(lifespan=lifespan)
connect_provider_api(app)
connect_prompt_api(app)
connect_repair_api(app)
Expand Down
19 changes: 3 additions & 16 deletions app/desktop/studio_server/finetune_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,10 +138,7 @@ async def finetune(
project_id: str, task_id: str, finetune_id: str
) -> FinetuneWithStatus:
task = task_from_id(project_id, task_id)
finetune = next(
(finetune for finetune in task.finetunes() if finetune.id == finetune_id),
None,
)
finetune = Finetune.from_id_and_parent_path(finetune_id, task.path)
if finetune is None:
raise HTTPException(
status_code=404,
Expand Down Expand Up @@ -227,14 +224,7 @@ async def create_finetune(
)
finetune_adapter_class = finetune_registry[request.provider]

dataset = next(
(
split
for split in task.dataset_splits()
if split.id == request.dataset_id
),
None,
)
dataset = DatasetSplit.from_id_and_parent_path(request.dataset_id, task.path)
if dataset is None:
raise HTTPException(
status_code=404,
Expand Down Expand Up @@ -281,10 +271,7 @@ async def download_dataset_jsonl(
detail=f"Dataset format '{format_type}' not found",
)
task = task_from_id(project_id, task_id)
dataset = next(
(split for split in task.dataset_splits() if split.id == dataset_id),
None,
)
dataset = DatasetSplit.from_id_and_parent_path(dataset_id, task.path)
if dataset is None:
raise HTTPException(
status_code=404,
Expand Down
60 changes: 56 additions & 4 deletions app/desktop/studio_server/provider_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,21 @@
from pydantic import BaseModel, Field


async def connect_ollama() -> OllamaConnection:
async def connect_ollama(custom_ollama_url: str | None = None) -> OllamaConnection:
# Tags is a list of Ollama models. Proves Ollama is running, and models are available.
if (
custom_ollama_url
and not custom_ollama_url.startswith("http://")
and not custom_ollama_url.startswith("https://")
):
raise HTTPException(
status_code=400,
detail="Invalid Ollama URL. It must start with http:// or https://",
)

try:
tags = requests.get(ollama_base_url() + "/api/tags", timeout=5).json()
base_url = custom_ollama_url or ollama_base_url()
tags = requests.get(base_url + "/api/tags", timeout=5).json()
except requests.exceptions.ConnectionError:
raise HTTPException(
status_code=417,
Expand All @@ -48,6 +59,10 @@ async def connect_ollama() -> OllamaConnection:
detail="Failed to parse Ollama data - unsure which models are installed.",
)

# Save the custom Ollama URL if used to connect
if custom_ollama_url and custom_ollama_url != Config.shared().ollama_base_url:
Config.shared().save_setting("ollama_base_url", custom_ollama_url)

return ollama_connection


Expand Down Expand Up @@ -133,11 +148,18 @@ async def get_available_models() -> List[AvailableModels]:
if fine_tuned_models:
models.append(fine_tuned_models)

# Add any custom models
custom = custom_models()
if custom:
models.append(custom)

return models

@app.get("/api/provider/ollama/connect")
async def connect_ollama_api() -> OllamaConnection:
return await connect_ollama()
async def connect_ollama_api(
custom_ollama_url: str | None = None,
) -> OllamaConnection:
return await connect_ollama(custom_ollama_url)

@app.post("/api/provider/connect_api_key")
async def connect_api_key(payload: dict):
Expand Down Expand Up @@ -436,6 +458,36 @@ def model_from_ollama_tag(
return None, None


def custom_models() -> AvailableModels | None:
custom_model_ids = Config.shared().custom_models
if not custom_model_ids or len(custom_model_ids) == 0:
return None

models: List[ModelDetails] = []
for model_id in custom_model_ids:
try:
provider_id = model_id.split("::", 1)[0]
model_name = model_id.split("::", 1)[1]
models.append(
ModelDetails(
id=model_id,
name=f"{provider_name_from_id(provider_id)}: {model_name}",
supports_structured_output=False,
supports_data_gen=False,
untested_model=True,
)
)
except Exception as e:
# Continue on to the rest
print(f"Error processing custom model {model_id}: {e}")

return AvailableModels(
provider_name="Custom Models",
provider_id=ModelProviderName.kiln_custom_registry,
models=models,
)


def all_fine_tuned_models() -> AvailableModels | None:
# Add any fine tuned models
models: List[ModelDetails] = []
Expand Down
9 changes: 7 additions & 2 deletions app/desktop/studio_server/repair_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,13 @@ async def run_repair(
evaluator_feedback=input.evaluator_feedback,
)

model_name = input.model_name or run.output.source.properties.get("model_name")
provider = input.provider or run.output.source.properties.get("model_provider")
source_properties = (
run.output.source.properties
if run.output.source and run.output.source.properties
else {}
)
model_name = input.model_name or source_properties.get("model_name")
provider = input.provider or source_properties.get("model_provider")
if (
not model_name
or not provider
Expand Down
8 changes: 6 additions & 2 deletions app/desktop/studio_server/settings_api.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
from typing import Any

from fastapi import FastAPI
from kiln_ai.utils.config import Config


def connect_settings(app: FastAPI):
@app.post("/api/settings")
def update_settings(new_settings: dict[str, int | float | str | bool | None]):
def update_settings(
new_settings: dict[str, int | float | str | bool | list | None],
):
Config.shared().update_settings(new_settings)
return Config.shared().settings(hide_sensitive=True)

@app.get("/api/settings")
def read_settings():
def read_settings() -> dict[str, Any]:
settings = Config.shared().settings(hide_sensitive=True)
return settings

Expand Down
Loading

0 comments on commit 0a47a0a

Please sign in to comment.