forked from run-llama/LlamaIndexTS
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SimpleChatEngine.ts
78 lines (70 loc) · 2.27 KB
/
SimpleChatEngine.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import type { LLM } from "@llamaindex/core/llms";
import { BaseMemory, ChatMemoryBuffer } from "@llamaindex/core/memory";
import { EngineResponse } from "@llamaindex/core/schema";
import {
streamConverter,
streamReducer,
wrapEventCaller,
} from "@llamaindex/core/utils";
import { Settings } from "../../Settings.js";
import type {
ChatEngine,
ChatEngineParamsNonStreaming,
ChatEngineParamsStreaming,
} from "./types.js";
/**
* SimpleChatEngine is the simplest possible chat engine. Useful for using your own custom prompts.
*/
export class SimpleChatEngine implements ChatEngine {
chatHistory: BaseMemory;
llm: LLM;
constructor(init?: Partial<SimpleChatEngine>) {
this.chatHistory = init?.chatHistory ?? new ChatMemoryBuffer();
this.llm = init?.llm ?? Settings.llm;
}
chat(
params: ChatEngineParamsStreaming,
): Promise<AsyncIterable<EngineResponse>>;
chat(params: ChatEngineParamsNonStreaming): Promise<EngineResponse>;
@wrapEventCaller
async chat(
params: ChatEngineParamsStreaming | ChatEngineParamsNonStreaming,
): Promise<EngineResponse | AsyncIterable<EngineResponse>> {
const { message, stream } = params;
const chatHistory = params.chatHistory
? new ChatMemoryBuffer({
chatHistory:
params.chatHistory instanceof BaseMemory
? await params.chatHistory.getMessages(message)
: params.chatHistory,
})
: this.chatHistory;
chatHistory.put({ content: message, role: "user" });
if (stream) {
const stream = await this.llm.chat({
messages: await chatHistory.getMessages(params.message),
stream: true,
});
return streamConverter(
streamReducer({
stream,
initialValue: "",
reducer: (accumulator, part) => accumulator + part.delta,
finished: (accumulator) => {
chatHistory.put({ content: accumulator, role: "assistant" });
},
}),
EngineResponse.fromChatResponseChunk,
);
}
const response = await this.llm.chat({
stream: false,
messages: await chatHistory.getMessages(params.message),
});
chatHistory.put(response.message);
return EngineResponse.fromChatResponse(response);
}
reset() {
this.chatHistory.reset();
}
}