Skip to content

Commit

Permalink
Message history context (#64)
Browse files Browse the repository at this point in the history
* feat: rework guidelines

* refactor: rename config file format

* feat: hitstory context and gpt extractor

* fix: enforce answers from context

* feat: static context window

* feat: enforce token limit in chat API call
  • Loading branch information
Emmanuel-Develops authored Mar 18, 2024
1 parent 09bcf47 commit 6a896d2
Show file tree
Hide file tree
Showing 14 changed files with 270 additions and 65 deletions.
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
"next-axiom": "^0.17.0",
"next-connect": "^0.13.0",
"openai": "^3.2.1",
"openai-chat-tokens": "^0.2.8",
"react": "18.2.0",
"react-device-detect": "^2.2.3",
"react-dom": "18.2.0",
Expand Down
2 changes: 1 addition & 1 deletion src/components/chat/ChatScreen.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import Rating from "@/components/rating/Rating";
import authorsConfig, {
AUTHOR_QUERY,
deriveAuthorIntroduction,
} from "@/config/authorsConfig";
} from "@/config/authors-config";
import {
Box,
Button,
Expand Down
2 changes: 1 addition & 1 deletion src/components/home/Home.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import authorsConfig from "@/config/authorsConfig";
import authorsConfig from "@/config/authors-config";
import { PromptAction } from "@/types";
import {
Box,
Expand Down
File renamed without changes.
23 changes: 23 additions & 0 deletions src/config/chatAPI-config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import ERROR_MESSAGES from "./error-config";

export const COMPLETION_URL = "https://api.openai.com/v1/chat/completions"

export const OPENAI_EXTRACTOR_MODEL = "gpt-3.5-turbo"

export const extractorSystemPrompt = `
You are a helpful assistant. You are given a list of user questions, the questions serve as context. Giving priority to ONLY the LAST QUESTION and the context from any relevant previous questions, what are the most relevant keywords that can be used in a search engine to find an answer to the last question. Return the minimum amount of relevant keywords in a json object: {keywords: 'keyword1, keyword2, ...'}
`;

export const guidelines = {
BASE_INSTRUCTION:
"You are an AI assistant providing helpful answers. You are given the following extracted parts of a long document called CONTEXT BLOCK and a conversation. Provide a conversational detailed answer in the same writing style as based on the context provided. DO NOT include any external references or links in the answers.",
NO_ANSWER: `If you are absolutely certain that the answer cannot be found in the CONTEXT BLOCK, just say this phrase '${ERROR_MESSAGES.NO_ANSWER_WITH_LINKS}' EXACTLY. DO NOT try to make up an answer that is not in the CONTEXT BLOCK.`,
UNRELATED_QUESTION: `If the question is not related to the context, say this phrase EXACTLY '${ERROR_MESSAGES.NO_ANSWER}'`,
LINKING: `DO NOT explicity mention the existence of the context provided, however, references can and should be made to the links provided in the context e.g '[0]'`,
FOLLOW_UP_QUESTIONS: `In addition, generate four follow up questions related to the answer generated. Each question should be in this format -{QUESTION_INDEX_HERE}-{{QUESTION_HERE}} and each question should be seperated by a new line. DO NOT ADD AN INTRODUCTORY TEXT TO THE FOLLOW UP QUESTIONS.`,
USED_SOURCES: `Lastly, list all sources relevant in generating the answer in a list in this format '__sources__: [LINK_INDICES_HERE]'`
};

export const CONTEXT_WINDOW_MESSAGES = 6

export const TOKEN_UPPER_LIMIT = 7000
4 changes: 4 additions & 0 deletions src/config/context-config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
export const DOCUMENTS = {
MAX_NUMBER_OF_DOCUMENTS: 6,
MAX_LENGTH_PER_DOCUMENT: 2000,
}
7 changes: 4 additions & 3 deletions src/pages/api/search.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@ import { extractESresults, extractKeywords } from "@/utils/fetchESResult";
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method === "POST") {
const { inputs } = req.body;
const { question: query, author } = inputs;
const { question: query, author, keywords } = inputs;

const extractedKeywords = await extractKeywords(query);
const keywords = extractedKeywords === "" ? query : extractedKeywords;

const keywordsToSearch = keywords ? keywords : extractedKeywords ? extractedKeywords: query;

const searchResults = await extractESresults(keywords, query, author);
const searchResults = await extractESresults(keywordsToSearch, query, author);

if (!searchResults) {
res.status(200).json({ message: "I am not able to find an answer to this question. So please rephrase your question and ask again." });
Expand Down
28 changes: 21 additions & 7 deletions src/pages/api/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,25 @@ import ERROR_MESSAGES from "@/config/error-config";
import { processInput } from "@/utils/openaiChat";
import { createReadableStream } from "@/utils/stream";
import { getNewUrl } from "@/utils/token-api";
import { GPTKeywordExtractor } from "@/service/chat/extractor";
import { ChatHistory } from "@/types";

interface InternalFetchParams {
url: string;
query: string;
author?: string;
keywords?: string;
}
export const config: PageConfig = {
runtime: "edge",
};

export const internalFetch = async (
url: string,
query: string,
author?: string
): Promise<any[] | null> => {
export const internalFetch = async ({
url,
query,
author,
keywords,
}: InternalFetchParams): Promise<any[] | null> => {
const response = await fetch(url, {
method: "POST",
headers: {
Expand All @@ -22,6 +31,7 @@ export const internalFetch = async (
inputs: {
question: query,
author: author,
keywords: keywords
},
}),
});
Expand All @@ -41,12 +51,16 @@ export default async function handler(req: Request) {
let esResults;
let userQuery;

const chatHistory = reqBody?.chatHistory ?? ([] as ChatHistory[]);

try {
const fetchUrl = getNewUrl(requesturl, "/search");
const inputs = reqBody?.inputs;
const { query, author }: { query: string; author: string } = inputs;

esResults = await internalFetch(fetchUrl, query, author);
const gptKeywords = await GPTKeywordExtractor([...chatHistory]);

esResults = await internalFetch({url: fetchUrl, query, author, keywords: gptKeywords});
userQuery = query;

if (!esResults || !esResults.length) {
Expand All @@ -63,7 +77,7 @@ export default async function handler(req: Request) {
}

try {
const result = await processInput(esResults, userQuery);
const result = await processInput(esResults, userQuery, chatHistory);
return new Response(result);
} catch (error: any) {
const errMessage = error?.message
Expand Down
50 changes: 22 additions & 28 deletions src/pages/index.tsx
Original file line number Diff line number Diff line change
@@ -1,23 +1,19 @@
import { useCallback, useEffect, useRef, useState } from "react";
import { Message } from "@/components/message/message";
import { v4 as uuidv4 } from "uuid";
import { SupaBaseDatabase } from "@/database/database";
import { useRouter } from "next/router";
import ChatScreen from "@/components/chat/ChatScreen";
import HomePage from "@/components/home/Home";
import authorsConfig, { AUTHOR_QUERY } from "@/config/authorsConfig";
import authorsConfig, { AUTHOR_QUERY } from "@/config/authors-config";
import useUpdateRouterQuery from "@/hooks/useUpdateRouterQuery";
import { GeneratingErrorMessages, Payload, PromptAction } from "@/types";
import ERROR_MESSAGES, { getAllErrorMessages } from "@/config/error-config";
import { usePaymentContext } from "@/contexts/payment-context";
import InvoiceModal from "@/components/invoice/modal";
import { constructTokenHeader } from "@/utils/token";
import { formatDate } from "@/utils/date";
import { createReadableStream } from "@/utils/stream";
import { separateLinksFromApiMessage } from "@/utils/links";
import { DEFAULT_PAYMENT_PRICE } from "@/config/constants";
import { Button } from "@chakra-ui/react";
import { getCachedAnswer, manageSaveToDB } from "@/utils/db";
import { constructMessageHistory } from "@/service/chat/history";

const initialStream: Message = {
type: "apiStream",
Expand Down Expand Up @@ -64,8 +60,8 @@ export default function Home() {
const abortTypingRef = useRef<AbortController>();

const abortGeneration = () => {
abortTypingRef.current?.abort(GeneratingErrorMessages.stopGenerating)
}
abortTypingRef.current?.abort(GeneratingErrorMessages.stopGenerating);
};

const resetChat = async () => {
if (streamLoading) {
Expand All @@ -77,7 +73,7 @@ export default function Home() {
setStreamLoading(false);
setMessages([]);
}
abortTypingRef.current = undefined
abortTypingRef.current = undefined;
};

useEffect(() => {
Expand Down Expand Up @@ -113,19 +109,18 @@ export default function Home() {
}
// Reset the typedMessage state
let uuid = uuidv4();
const lastMessage =
messages.length > 0 ? messages[messages.length - 1]["message"] : null;
const messageToSet: Message[] =
lastMessage === query
? messages
: [
...messages,
{ message: query, type: "userMessage", uniqueId: uuid },
];
setLoading(true);
setMessages((prevMessages) => {
if (prevMessages.length > 0) {
const lastMessage = prevMessages[prevMessages.length - 1];
if (lastMessage.message === query) {
return prevMessages;
}
}
return [
...prevMessages,
{ message: query, type: "userMessage", uniqueId: uuid },
];
});
setMessages(messageToSet);
let chatHistory = constructMessageHistory(messageToSet);

// instantiate new AbortController
const typingAbortController = new AbortController();
Expand Down Expand Up @@ -156,6 +151,7 @@ export default function Home() {
query,
author,
},
chatHistory,
}),
signal: typingAbortController.signal,
});
Expand Down Expand Up @@ -201,21 +197,20 @@ export default function Home() {
});
}
await updateMessages(finalAnswerWithLinks, uuid);

} catch (err: any) {
switch (typingAbortController.signal.reason) {
case GeneratingErrorMessages.stopGenerating:
await updateMessages(finalAnswerWithLinks, uuid);
break;

case GeneratingErrorMessages.resetChat:
setStreamLoading(false);
setLoading(false);
setStreamData(initialStream);
setMessages([]);
setUserInput("");
break;

default:
await updateMessages(finalAnswerWithLinks, uuid);
break;
Expand All @@ -228,9 +223,8 @@ export default function Home() {
answer: finalAnswerWithLinks,
author,
wasAborted: typingAbortController.signal.aborted,
errorMessages
})

errorMessages,
});
} catch (err: any) {
setMessages((prevMessages) => [
...prevMessages,
Expand All @@ -244,7 +238,7 @@ export default function Home() {
setLoading(false);
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[userInput]
[userInput, messages]
);

const promptChat: PromptAction = async (prompt, author, options) => {
Expand Down
44 changes: 44 additions & 0 deletions src/service/chat/extractor.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import { COMPLETION_URL, extractorSystemPrompt, OPENAI_EXTRACTOR_MODEL } from "@/config/chatAPI-config";
import { ChatHistory } from "@/types";

export const GPTKeywordExtractor = async (history: ChatHistory[]) => {
try {
const userQuestions = history
.filter((message) => message.role === "user")
.slice(-10);
const messages = [
{
role: "system",
content: extractorSystemPrompt,
},
...userQuestions,
];

const payload = {
model: OPENAI_EXTRACTOR_MODEL,
response_format: { "type": "json_object" },
messages,
};
const response = await fetch(COMPLETION_URL, {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${process.env.OPENAI_API_KEY ?? ""}`,
},
method: "POST",
body: JSON.stringify(payload),
});
const body = await response.json();
const keywords = JSON.parse(body.choices[0]?.message.content).keywords
if (Array.isArray(keywords)) {
return keywords.map((keyword: string) => keyword.trim()).join(" ")
}
if (typeof keywords !== "string") {
throw new Error("Parsed response is not a string")
}

return keywords.replaceAll(",", "")
} catch (err) {
console.log(err)
return undefined
}
};
82 changes: 82 additions & 0 deletions src/service/chat/history.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import { Message } from "@/components/message/message";
import { separateLinksFromApiMessage } from "@/utils/links";
import { CONTEXT_WINDOW_MESSAGES, guidelines } from "@/config/chatAPI-config";
import { ChatHistory } from "@/types";

const buildSystemMessage = (question: string, context: string) => {
const {
BASE_INSTRUCTION,
NO_ANSWER,
UNRELATED_QUESTION,
FOLLOW_UP_QUESTIONS,
LINKING,
} = guidelines;
return `${BASE_INSTRUCTION}\n${NO_ANSWER}\n${UNRELATED_QUESTION}\n${context}\n${LINKING}\n${FOLLOW_UP_QUESTIONS}`;
};

export const buildChatMessages = ({
question,
context,
oldContext,
messages,
}: {
question: string;
context: string;
oldContext?: string;
messages: ChatHistory[];
}) => {
const systemMessage = buildSystemMessage(question, context);
return [
{
role: "system",
content: systemMessage,
},
...messages
] as ChatHistory[];
};

const formatMessageToChatHistory = (message: Message) => {
try {
let role, content;
if (message.type === "errorMessage" || message.type === "apiStream") {
return undefined;
}
switch (message.type) {
case "apiMessage": {
role = "assistant";
content = separateLinksFromApiMessage(message.message).messageBody;
break;
}
case "authorMessage": {
role = "system";
content = message.message;
break;
}
case "userMessage": {
role = "user";
content = message.message;
break;
}
default:
return undefined;
}
return {
role,
content,
} as ChatHistory;
} catch (err) {
console.error(err);
return undefined;
}
};

export const constructMessageHistory = (messages: Message[]) => {
const list: ChatHistory[] = [];
const messageWindow = messages.slice(-CONTEXT_WINDOW_MESSAGES)
for (let index = 0; index < messageWindow.length; index++) {
const message = messages[index];
const chat = formatMessageToChatHistory(message);
if (chat) list.push(chat);
}
return list;
};
Loading

0 comments on commit 6a896d2

Please sign in to comment.