-
Notifications
You must be signed in to change notification settings - Fork 139
/
ollama.ts
64 lines (55 loc) · 1.39 KB
/
ollama.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import { OllamaLLM } from "bee-agent-framework/adapters/ollama/llm";
import { OllamaChatLLM } from "bee-agent-framework/adapters/ollama/chat";
import { BaseMessage } from "bee-agent-framework/llms/primitives/message";
import { Ollama } from "ollama";
{
console.info("===RAW===");
const llm = new OllamaLLM({
modelId: "llama3.1",
parameters: {
num_predict: 10,
stop: ["post"],
},
});
console.info("Meta", await llm.meta());
const response = await llm.generate("Hello world!", {
stream: true,
});
console.info(response.finalResult);
}
{
console.info("===CHAT===");
const llm = new OllamaChatLLM({
modelId: "llama3.1",
parameters: {
num_predict: 10,
temperature: 0,
},
});
console.info("Meta", await llm.meta());
const response = await llm.generate([
BaseMessage.of({
role: "user",
text: "Hello world!",
}),
]);
console.info(response.finalResult);
}
{
console.info("===REMOTE OLLAMA===");
const llm = new OllamaChatLLM({
modelId: "llama3.1",
client: new Ollama({
// use the IP for the server you have ollama running on
host: process.env.OLLAMA_HOST || "http://127.0.0.1:11434",
}),
});
console.info("Meta", await llm.meta());
const response = await llm.generate([
BaseMessage.of({
role: "user",
text: "Hello world!",
}),
]);
console.info(response.finalResult);
}