-
Notifications
You must be signed in to change notification settings - Fork 139
/
bam_verbose.ts
56 lines (47 loc) · 1.7 KB
/
bam_verbose.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import "dotenv/config";
import { BaseMessage } from "bee-agent-framework/llms/primitives/message";
import { PromptTemplate } from "bee-agent-framework/template";
import { z } from "zod";
import { BAMLLM } from "bee-agent-framework/adapters/bam/llm";
import { BAMChatLLM } from "bee-agent-framework/adapters/bam/chat";
const template = new PromptTemplate({
schema: z.object({
messages: z.array(z.record(z.array(z.string()))),
}),
template: `{{#messages}}{{#system}}<|begin_of_text|><|start_header_id|>system<|end_header_id|>
{{system}}<|eot_id|>{{/system}}{{#user}}<|start_header_id|>user<|end_header_id|>
{{user}}<|eot_id|>{{/user}}{{#assistant}}<|start_header_id|>assistant<|end_header_id|>
{{assistant}}<|eot_id|>{{/assistant}}{{#ipython}}<|start_header_id|>ipython<|end_header_id|>
{{ipython}}<|eot_id|>{{/ipython}}{{/messages}}<|start_header_id|>assistant<|end_header_id|>
`,
});
const llm = new BAMLLM({
modelId: "meta-llama/llama-3-1-70b-instruct",
parameters: {
decoding_method: "greedy",
max_new_tokens: 50,
},
});
const chatLLM = new BAMChatLLM({
llm,
config: {
messagesToPrompt(messages: BaseMessage[]) {
return template.render({
messages: messages.map((message) => ({
system: message.role === "system" ? [message.text] : [],
user: message.role === "user" ? [message.text] : [],
assistant: message.role === "assistant" ? [message.text] : [],
ipython: message.role === "ipython" ? [message.text] : [],
})),
});
},
},
});
console.info("Meta", await chatLLM.meta());
const response = await chatLLM.generate([
BaseMessage.of({
role: "user",
text: "Hello world!",
}),
]);
console.info(response.messages[0]);