-
Notifications
You must be signed in to change notification settings - Fork 150
/
Copy pathcustomChatProvider.ts
138 lines (117 loc) · 3.91 KB
/
customChatProvider.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
import {
AsyncStream,
BaseLLMTokenizeOutput,
EmbeddingOptions,
EmbeddingOutput,
ExecutionOptions,
GenerateOptions,
LLMCache,
LLMMeta,
StreamGenerateOptions,
} from "bee-agent-framework/llms/base";
import { shallowCopy } from "bee-agent-framework/serializer/utils";
import type { GetRunContext } from "bee-agent-framework/context";
import { Emitter } from "bee-agent-framework/emitter/emitter";
import { ChatLLM, ChatLLMGenerateEvents, ChatLLMOutput } from "bee-agent-framework/llms/chat";
import { BaseMessage, Role } from "bee-agent-framework/llms/primitives/message";
import { sum } from "remeda";
import { NotImplementedError } from "bee-agent-framework/errors";
export class CustomChatLLMOutput extends ChatLLMOutput {
public readonly chunks: BaseMessage[] = [];
constructor(chunk: BaseMessage) {
super();
this.chunks.push(chunk);
}
get messages() {
return this.chunks;
}
merge(other: CustomChatLLMOutput): void {
this.chunks.push(...other.chunks);
}
getTextContent(): string {
return this.chunks.map((result) => result.text).join("");
}
toString(): string {
return this.getTextContent();
}
createSnapshot() {
return { chunks: shallowCopy(this.chunks) };
}
loadSnapshot(snapshot: ReturnType<typeof this.createSnapshot>): void {
Object.assign(this, snapshot);
}
}
// Feel free to extend if you want to support additional parameters
type CustomGenerateOptions = GenerateOptions;
export interface CustomChatLLMInput {
modelId: string;
executionOptions?: ExecutionOptions;
cache?: LLMCache<CustomChatLLMOutput>;
parameters?: Record<string, any>;
}
type CustomChatLLMEvents = ChatLLMGenerateEvents<CustomChatLLMOutput>;
export class CustomChatLLM extends ChatLLM<CustomChatLLMOutput, CustomGenerateOptions> {
public readonly emitter = Emitter.root.child<CustomChatLLMEvents>({
namespace: ["custom", "llm"],
creator: this,
});
constructor(protected readonly input: CustomChatLLMInput) {
super(input.modelId, input.executionOptions, input.cache);
}
static {
this.register();
}
async meta(): Promise<LLMMeta> {
// TODO: retrieve data about current model from the given provider API
return { tokenLimit: Infinity };
}
async embed(input: BaseMessage[][], options?: EmbeddingOptions): Promise<EmbeddingOutput> {
throw new NotImplementedError();
}
async tokenize(input: BaseMessage[]): Promise<BaseLLMTokenizeOutput> {
// TODO: retrieve data about current model from the given provider API
return {
tokensCount: sum(input.map((msg) => Math.ceil(msg.text.length / 4))),
};
}
protected async _generate(
input: BaseMessage[],
options: Partial<CustomGenerateOptions>,
run: GetRunContext<this>,
): Promise<CustomChatLLMOutput> {
// this method should do non-stream request to the API
// TIP: access inference parameters via `this.input.parameters` and `options`
// TIP: use signal from run.signal
const result = BaseMessage.of({
role: Role.ASSISTANT,
text: "TODO: response retrieve from the API",
});
return new CustomChatLLMOutput(result);
}
protected async *_stream(
input: BaseMessage[],
options: Partial<StreamGenerateOptions>,
run: GetRunContext<this>,
): AsyncStream<CustomChatLLMOutput, void> {
// this method should do stream request to the API
// TIP: access inference parameters via `this.input.parameters` and `options`
// TIP: use signal from run.signal
for await (const chunk of ["Hel", "oo", "world", "!"]) {
const result = BaseMessage.of({
role: Role.ASSISTANT,
text: chunk,
});
yield new CustomChatLLMOutput(result);
}
}
createSnapshot() {
return {
...super.createSnapshot(),
input: shallowCopy(this.input),
};
}
loadSnapshot({ input, ...snapshot }: ReturnType<typeof this.createSnapshot>) {
super.loadSnapshot(snapshot);
Object.assign(this, { input });
}
}