-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapi.py
65 lines (40 loc) · 1.6 KB
/
api.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from langchain import HuggingFaceHub
from langchain.chains import ConversationalRetrievalChain
import gradio as gr
from fastapi import FastAPI
app = FastAPI()
@app.get("/")
def read_main():
return {"message": "This is GIKI's Admission Chatbot"}
import os
os.environ["HUGGINGFACEHUB_API_TOKEN"] =
llm=HuggingFaceHub(repo_id="google/flan-t5-small", model_kwargs={"temperature":0.5, "max_length":500})
def create_chain():
from langchain.embeddings import HuggingFaceEmbeddings
embeddings = HuggingFaceEmbeddings()
from langchain.vectorstores import Chroma
docsearch = Chroma(persist_directory="chroma_db", embedding_function=embeddings)
return ConversationalRetrievalChain.from_llm(
llm=llm,
chain_type="stuff",
retriever=docsearch.as_retriever(),
return_source_documents=True
)
from collections import defaultdict
chain = create_chain()
chat_history = defaultdict(list)
def generate_reply(input_text):
Userid = 12345
result = chain({'question': input_text, 'chat_history': chat_history[Userid]})
chat_history[Userid].append((input_text, result['answer']))
file1 = open("{0}.txt".format(Userid), "a") # append mode
file1.write(input_text + " " + result['answer']+"\n")
file1.close()
return result['answer']
# Gradio Interface
iface = gr.Interface(
fn=generate_reply, # Function to generate the reply
inputs=gr.Textbox(), # Text input component
outputs=gr.Textbox() # Text output component
)
app = gr.mount_gradio_app(app, iface, path="/gradio")