Skip to content

Commit

Permalink
Merge pull request #863 from isamu/anthropicAgentConfig
Browse files Browse the repository at this point in the history
Anthropic agent config
  • Loading branch information
isamu authored Dec 29, 2024
2 parents 23a06ca + f878ab3 commit d90758b
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 20 deletions.
10 changes: 8 additions & 2 deletions llm_agents/anthropic_agent/lib/anthropic_agent.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,15 @@ type AnthropicInputs = {
model?: string;
temperature?: number;
max_tokens?: number;
stream?: boolean;
messages?: Array<Record<string, any>>;
} & GraphAILLMInputBase;
export declare const anthropicAgent: AgentFunction<AnthropicInputs, Record<string, any> | string, AnthropicInputs>;
type AnthropicConfig = {
apiKey?: string;
stream?: boolean;
forWeb?: boolean;
};
type AnthropicParams = AnthropicInputs & AnthropicConfig;
type AnthropicResult = Record<string, any> | string;
export declare const anthropicAgent: AgentFunction<AnthropicParams, AnthropicResult, AnthropicInputs, AnthropicConfig>;
declare const anthropicAgentInfo: AgentFunctionInfo;
export default anthropicAgentInfo;
16 changes: 9 additions & 7 deletions llm_agents/anthropic_agent/lib/anthropic_agent.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.anthropicAgent = void 0;
const sdk_1 = __importDefault(require("@anthropic-ai/sdk"));
const llm_utils_1 = require("@graphai/llm_utils");
const anthropicAgent = async ({ params, namedInputs, filterParams }) => {
const { model, system, temperature, max_tokens, prompt, messages, stream } = { ...params, ...namedInputs };
const anthropicAgent = async ({ params, namedInputs, filterParams, config }) => {
const { model, system, temperature, max_tokens, prompt, messages } = { ...params, ...namedInputs };
const { apiKey, stream, forWeb } = {
...params,
...(config || {}),
};
const userPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeablePrompts", prompt);
const systemPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeableSystem", system);
const messagesCopy = messages ? messages.map((m) => m) : [];
Expand All @@ -17,11 +21,9 @@ const anthropicAgent = async ({ params, namedInputs, filterParams }) => {
content: userPrompt,
});
}
const anthropic = new sdk_1.default({
apiKey: process.env["ANTHROPIC_API_KEY"], // This is the default and can be omitted
});
const anthropic = new sdk_1.default({ apiKey, dangerouslyAllowBrowser: !!forWeb });
const opt = {
model: model || "claude-3-haiku-20240307", // "claude-3-opus-20240229",
model: model ?? "claude-3-5-sonnet-20241022",
messages: messagesCopy,
system: systemPrompt,
temperature: temperature ?? 0.7,
Expand Down Expand Up @@ -86,7 +88,7 @@ const anthropicAgentInfo = {
author: "Receptron team",
repository: "https://github.com/receptron/graphai",
license: "MIT",
// stream: true,
stream: true,
environmentVariables: ["ANTHROPIC_API_KEY"],
npms: ["@anthropic-ai/sdk"],
};
Expand Down
4 changes: 2 additions & 2 deletions llm_agents/anthropic_agent/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@graphai/anthropic_agent",
"version": "0.2.0",
"version": "0.2.1",
"description": "Anthropic agents for GraphAI.",
"main": "lib/index.js",
"files": [
Expand All @@ -27,7 +27,7 @@
},
"homepage": "https://github.com/receptron/graphai/blob/main/llm_agents/anthropic_agent/README.md",
"dependencies": {
"@anthropic-ai/sdk": "^0.32.1",
"@anthropic-ai/sdk": "^0.33.1",
"@graphai/llm_utils": "^0.0.2"
},
"devDependencies": {},
Expand Down
29 changes: 20 additions & 9 deletions llm_agents/anthropic_agent/src/anthropic_agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,27 @@ type AnthropicInputs = {
max_tokens?: number;
// tools?: any;
// tool_choice?: any;
stream?: boolean;
messages?: Array<Record<string, any>>;
} & GraphAILLMInputBase;

export const anthropicAgent: AgentFunction<AnthropicInputs, Record<string, any> | string, AnthropicInputs> = async ({ params, namedInputs, filterParams }) => {
const { model, system, temperature, max_tokens, prompt, messages, stream } = { ...params, ...namedInputs };
type AnthropicConfig = {
apiKey?: string;
stream?: boolean;
forWeb?: boolean;
};

type AnthropicParams = AnthropicInputs & AnthropicConfig;

type AnthropicResult = Record<string, any> | string;

export const anthropicAgent: AgentFunction<AnthropicParams, AnthropicResult, AnthropicInputs, AnthropicConfig> = async ({ params, namedInputs, filterParams, config }) => {
const { model, system, temperature, max_tokens, prompt, messages } = { ...params, ...namedInputs };

const { apiKey, stream, forWeb } = {
...params,
...(config || {}),
};

const userPrompt = getMergeValue(namedInputs, params, "mergeablePrompts", prompt);
const systemPrompt = getMergeValue(namedInputs, params, "mergeableSystem", system);

Expand All @@ -28,12 +42,9 @@ export const anthropicAgent: AgentFunction<AnthropicInputs, Record<string, any>
});
}

const anthropic = new Anthropic({
apiKey: process.env["ANTHROPIC_API_KEY"], // This is the default and can be omitted
});

const anthropic = new Anthropic({ apiKey, dangerouslyAllowBrowser: !!forWeb });
const opt = {
model: model || "claude-3-haiku-20240307", // "claude-3-opus-20240229",
model: model ?? "claude-3-5-sonnet-20241022",
messages: messagesCopy,
system: systemPrompt,
temperature: temperature ?? 0.7,
Expand Down Expand Up @@ -98,7 +109,7 @@ const anthropicAgentInfo: AgentFunctionInfo = {
author: "Receptron team",
repository: "https://github.com/receptron/graphai",
license: "MIT",
// stream: true,
stream: true,
environmentVariables: ["ANTHROPIC_API_KEY"],
npms: ["@anthropic-ai/sdk"],
};
Expand Down

0 comments on commit d90758b

Please sign in to comment.