Skip to content

Commit

Permalink
added: log_conf.json to Dockerfile
Browse files Browse the repository at this point in the history
  • Loading branch information
glorenzo972 committed Apr 30, 2024
1 parent 7a75cc5 commit 580f7c3
Show file tree
Hide file tree
Showing 5 changed files with 62 additions and 44 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@
*Andrea Sponziello*
### **Copyrigth**: *Tiledesk SRL*

## [2024-04-30]

### 0.1.8
- added: log_conf.json to Dockerfile

## [2024-04-24]

### 0.1.7
Expand Down
3 changes: 2 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ FROM python:3.10

WORKDIR /tiledesk-llm

COPY log_conf.yaml /tiledesk-llm/log_conf.yaml
# COPY log_conf.yaml /tiledesk-llm/log_conf.yaml
COPY log_conf.json /tiledesk-llm/log_conf.json
COPY pyproject.toml /tiledesk-llm/pyproject.toml
COPY ./tilellm /tiledesk-llm/tilellm

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "tilellm"
version = "0.1.7"
version = "0.1.8"
description = "tiledesk for RAG"
authors = ["Gianluca Lorenzo <[email protected]>"]
repository = "https://github.com/Tiledesk/tiledesk-llm"
Expand Down
60 changes: 38 additions & 22 deletions tilellm/__main__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import os
from contextlib import asynccontextmanager
from fastapi import (FastAPI,
Depends, HTTPException)
Depends,
HTTPException)
from fastapi.responses import JSONResponse

import argparse
# import argparse

import aioredis
import asyncio
Expand All @@ -14,7 +15,13 @@
from dotenv import load_dotenv

from tilellm.shared.const import populate_constant
from tilellm.models.item_model import ItemSingle, QuestionAnswer, PineconeItemToDelete, ScrapeStatusReq, ScrapeStatusResponse, PineconeIndexingResult
from tilellm.models.item_model import (ItemSingle,
QuestionAnswer,
PineconeItemToDelete,
ScrapeStatusReq,
ScrapeStatusResponse,
PineconeIndexingResult)

from tilellm.store.redis_repository import redis_xgroup_create
from tilellm.controller.openai_controller import (ask_with_memory,
ask_with_sequence,
Expand Down Expand Up @@ -101,8 +108,9 @@ async def reader(channel: aioredis.client.Redis):
status_code=2
)
add_to_queue = await channel.set(f"{item.get('namespace')}/{item.get('id')}",
scrape_status_response.model_dump_json(),
ex=expiration_in_seconds)
scrape_status_response.model_dump_json(),
ex=expiration_in_seconds)

logger.debug(f"Start {add_to_queue}")
raw_webhook = item.get('webhook', "")
if '?' in raw_webhook:
Expand Down Expand Up @@ -137,7 +145,8 @@ async def reader(channel: aioredis.client.Redis):
async with aiohttp.ClientSession() as session:
res = await session.post(webhook,
json=pc_result.model_dump(exclude_none=True),
headers={"Content-Type": "application/json", "X-Auth-Token": token})
headers={"Content-Type": "application/json",
"X-Auth-Token": token})
print(res)
print(await res.json())
except Exception as ewh:
Expand Down Expand Up @@ -197,11 +206,14 @@ async def create_scrape_item_main(item: ItemSingle, redis_client: aioredis.clien
"""
from tilellm.shared import const
logger.debug(item)
res = await redis_client.xadd(const.STREAM_NAME, {"single": item.model_dump_json()} , id="*")
scrape_status_response = ScrapeStatusResponse(status_message = "Document added to queue",
status_code = 0
res = await redis_client.xadd(const.STREAM_NAME, {"single": item.model_dump_json()}, id="*")
scrape_status_response = ScrapeStatusResponse(status_message="Document added to queue",
status_code=0
)
addtoqueue = await redis_client.set(f"{item.namespace}/{item.id}", scrape_status_response.model_dump_json(), ex=expiration_in_seconds)
addtoqueue = await redis_client.set(f"{item.namespace}/{item.id}",
scrape_status_response.model_dump_json(),
ex=expiration_in_seconds)

logger.debug(res)

return {"message": f"Item {item.id} created successfully, more {res}"}
Expand All @@ -224,7 +236,7 @@ async def post_ask_with_memory_chain_main(question_answer: QuestionAnswer):
result = ask_with_sequence(question_answer)
logger.debug(result)
return JSONResponse(content=result)
#return result
# return result


@app.get("/api/list/namespace")
Expand Down Expand Up @@ -258,16 +270,18 @@ async def list_namespace_items_main(namespace: str):
logger.error(ex)
raise HTTPException(status_code=400, detail=repr(ex))


@app.post("/api/scrape/status")
async def srape_status_main(scarpestatusreq: ScrapeStatusReq, redis_client: aioredis.client.Redis = Depends(get_redis_client) ):
async def scrape_status_main(scrape_status_req: ScrapeStatusReq,
redis_client: aioredis.client.Redis = Depends(get_redis_client)):
try:
retrieved_data = await redis_client.get(f"{scarpestatusreq.namespace}/{scarpestatusreq.id}")
retrieved_data = await redis_client.get(f"{scrape_status_req.namespace}/{scrape_status_req.id}")
if retrieved_data:
scrapestatus_response = ScrapeStatusResponse.model_validate(json.loads(retrieved_data.decode('utf-8')))
scrape_status_response = ScrapeStatusResponse.model_validate(json.loads(retrieved_data.decode('utf-8')))
else:
# FIXME recuperare da pinecone id e namespace...
raise Exception(f"Not Found, id: {scarpestatusreq.id}, namespace: {scarpestatusreq.namespace}")
return JSONResponse(content=scrapestatus_response.model_dump())
raise Exception(f"Not Found, id: {scrape_status_req.id}, namespace: {scrape_status_req.namespace}")
return JSONResponse(content=scrape_status_response.model_dump())
except Exception as ex:
raise HTTPException(status_code=400, detail=repr(ex))

Expand All @@ -281,13 +295,13 @@ async def delete_namespace_main(namespace: str):
"""
try:
result = await delete_namespace(namespace)
return JSONResponse(content={"message":f"Namespace {namespace} deleted"})
return JSONResponse(content={"message": f"Namespace {namespace} deleted"})
except Exception as ex:
raise HTTPException(status_code=400, detail=repr(ex) )
raise HTTPException(status_code=400, detail=repr(ex))


@app.delete("/api/id/{metadata_id}/namespace/{namespace}")
async def delete_item_id_namespace_main(metadata_id: str, namespace: str ):
async def delete_item_id_namespace_main(metadata_id: str, namespace: str):
"""
Delete items from namespace identified by id and namespace
:param metadata_id:
Expand All @@ -304,7 +318,9 @@ async def delete_item_id_namespace_main(metadata_id: str, namespace: str ):
raise HTTPException(status_code=400, detail=repr(ex))


@app.post("/api/delete/id", deprecated=True, description="This endpoint is deprecated and is no longer supported. Use method DELETE /api/id/{id}/namespace/{namespace}")
@app.post("/api/delete/id", deprecated=True,
description="This endpoint is deprecated and is no longer supported. "
"Use method DELETE /api/id/{id}/namespace/{namespace}")
async def delete_item_id_namespace_post(item_to_delete: PineconeItemToDelete):
"""
Delete items from namespace given document id via POST.
Expand All @@ -314,12 +330,12 @@ async def delete_item_id_namespace_post(item_to_delete: PineconeItemToDelete):
try:
metadata_id = item_to_delete.id
namespace = item_to_delete.namespace
logger.info(f"cancellazione id {metadata_id} dal namespace {namespace}")
logger.info(f"delete of id {metadata_id} dal namespace {namespace}")
result = await delete_id_from_namespace(metadata_id, namespace)

return JSONResponse(content={"success": True, "message": f"ids {metadata_id} in Namespace {namespace} deleted"})
except Exception as ex:
return JSONResponse(content={"success": True, "message": f"ids {metadata_id} in Namespace {namespace} non deleted due to {repr(ex)}"})
return JSONResponse(content={"success": True, "message": f"ids {metadata_id} in Namespace {namespace} not deleted due to {repr(ex)}"})
# raise HTTPException(status_code=400, detail=repr(ex))


Expand Down
36 changes: 16 additions & 20 deletions tilellm/models/item_model.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from pydantic import BaseModel, Field, validator, field_validator, ValidationError, BaseConfig
from pydantic import BaseModel, Field, field_validator, ValidationError
from typing import Dict, Optional, List
import datetime

Expand All @@ -7,10 +7,10 @@ class ItemSingle(BaseModel):
id: str
source: str | None = None
type: str | None = None
content: str | None =None
content: str | None = None
gptkey: str | None = None
embedding: str = Field(default_factory=lambda: "text-embedding-ada-002")
namespace: str | None =None
namespace: str | None = None
webhook: str = Field(default_factory=lambda: "")


Expand All @@ -24,7 +24,7 @@ class MetadataItem(BaseModel):
class ChatEntry(BaseModel):
question: str
answer: str
#metadata: Optional[Dict[str, str]] = None # Optional field for additional data
# metadata: Optional[Dict[str, str]] = None # Optional field for additional data


class ChatHistory(BaseModel):
Expand All @@ -48,13 +48,13 @@ class QuestionAnswer(BaseModel):
question: str
namespace: str
gptkey: str
model: str =Field(default="gpt-3.5-turbo")
model: str = Field(default="gpt-3.5-turbo")
temperature: float = Field(default=0.0)
top_k: int = Field(default=5)
max_tokens: int = Field(default=128)
embedding: str = Field(default_factory=lambda: "text-embedding-ada-002")
system_context: Optional[str] = None
chat_history_dict : Optional[Dict[str, ChatEntry]] = None
chat_history_dict: Optional[Dict[str, ChatEntry]] = None

@field_validator("temperature")
def temperature_range(cls, v):
Expand All @@ -73,15 +73,15 @@ def top_k_range(cls, v):

class RetrievalResult(BaseModel):
answer: str = Field(default="No answer")
sources: Optional[List[str]]|None =None
source: str |None= None
id: str |None= None
sources: Optional[List[str]] | None = None
source: str | None = None
id: str | None = None
namespace: str
ids: Optional[List[str]]|None =None
ids: Optional[List[str]] | None = None
prompt_token_size: int = Field(default=0)
success: bool = Field(default=False)
error_message: Optional[str]|None =None
chat_history_dict:Optional[Dict[str, ChatEntry]]
error_message: Optional[str] | None = None
chat_history_dict: Optional[Dict[str, ChatEntry]]


class PineconeQueryResult(BaseModel):
Expand All @@ -108,13 +108,13 @@ class ScrapeStatusReq(BaseModel):


class ScrapeStatusResponse(BaseModel):
status_message: str = Field(default="Crawling is not started")
status_code: int = Field(default=0)
queue_order: int = Field(default=-1)
status_message: str = Field(default="Crawling is not started")
status_code: int = Field(default=0)
queue_order: int = Field(default=-1)


class PineconeIndexingResult(BaseModel):
# {"id": f"{id}", "chunks": f"{len(chuncks)}", "total_tokens": f"{total_tokens}", "cost": f"{cost:.6f}"}
# {"id": f"{id}", "chunks": f"{len(chunks)}", "total_tokens": f"{total_tokens}", "cost": f"{cost:.6f}"}
id: str | None = None
chunks: int | None = None
total_tokens: int | None = None
Expand All @@ -131,7 +131,3 @@ class PineconeItemNamespaceResult(BaseModel):

class PineconeNamespaceResult(BaseModel):
namespaces: Optional[List[PineconeItemNamespaceResult]]




0 comments on commit 580f7c3

Please sign in to comment.