-
Notifications
You must be signed in to change notification settings - Fork 139
/
ibm-vllm.ts
44 lines (36 loc) · 1.15 KB
/
ibm-vllm.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import "dotenv/config.js";
import { IBMvLLM } from "bee-agent-framework/adapters/ibm-vllm/llm";
import { IBMVllmChatLLM } from "bee-agent-framework/adapters/ibm-vllm/chat";
import { BaseMessage } from "bee-agent-framework/llms/primitives/message";
import { Client } from "bee-agent-framework/adapters/ibm-vllm/client";
const client = new Client();
{
console.info("===RAW===");
const llm = new IBMvLLM({
client,
modelId: "meta-llama/llama-3-1-70b-instruct",
});
console.info("Meta", await llm.meta());
const response = await llm.generate("Hello world!", {
stream: false,
});
console.info(response.text);
}
{
console.info("===CHAT===");
const llm = IBMVllmChatLLM.fromPreset("meta-llama/llama-3-1-70b-instruct", { client });
console.info("Meta", await llm.meta());
const response = await llm.generate([
BaseMessage.of({
role: "user",
text: "Hello world!",
}),
]);
console.info(response.messages);
}
{
console.info("===EMBEDDING===");
const llm = new IBMvLLM({ client, modelId: "baai/bge-large-en-v1.5" });
const response = await llm.embed([`Hello world!`, `Hello family!`]);
console.info(response);
}