diff --git a/llm_agents/openai_agent/lib/openai_agent.d.ts b/llm_agents/openai_agent/lib/openai_agent.d.ts index f0fa0e80..b1b4459c 100644 --- a/llm_agents/openai_agent/lib/openai_agent.d.ts +++ b/llm_agents/openai_agent/lib/openai_agent.d.ts @@ -9,14 +9,18 @@ type OpenAIInputs = { max_tokens?: number; verbose?: boolean; temperature?: number; + messages?: Array; + response_format?: any; +} & GraphAILLMInputBase; +type OpenAIConfig = { baseURL?: string; apiKey?: string; stream?: boolean; - messages?: Array; forWeb?: boolean; - response_format?: any; -} & GraphAILLMInputBase; -export declare const openAIAgent: AgentFunction | string, OpenAIInputs>; +}; +type OpenAIParams = OpenAIInputs & OpenAIConfig; +type OpenAIResult = Record | string; +export declare const openAIAgent: AgentFunction; export declare const openAIMockAgent: AgentFunction<{ model?: string; query?: string; diff --git a/llm_agents/openai_agent/lib/openai_agent.js b/llm_agents/openai_agent/lib/openai_agent.js index bd843b72..9512235c 100644 --- a/llm_agents/openai_agent/lib/openai_agent.js +++ b/llm_agents/openai_agent/lib/openai_agent.js @@ -37,11 +37,15 @@ const convertOpenAIChatCompletion = (response, messages) => { messages, }; }; -const openAIAgent = async ({ filterParams, params, namedInputs }) => { - const { verbose, system, images, temperature, tools, tool_choice, max_tokens, baseURL, apiKey, stream, prompt, messages, forWeb, response_format } = { +const openAIAgent = async ({ filterParams, params, namedInputs, config }) => { + const { verbose, system, images, temperature, tools, tool_choice, max_tokens, prompt, messages, response_format } = { ...params, ...namedInputs, }; + const { apiKey, stream, baseURL, forWeb } = { + ...params, + ...(config || {}) + }; const userPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeablePrompts", prompt); const systemPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeableSystem", system); const messagesCopy = (0, llm_utils_1.getMessages)(systemPrompt, messages); diff --git a/llm_agents/openai_agent/lib/openai_image_agent.d.ts b/llm_agents/openai_agent/lib/openai_image_agent.d.ts index e4e37ce2..f5645230 100644 --- a/llm_agents/openai_agent/lib/openai_image_agent.d.ts +++ b/llm_agents/openai_agent/lib/openai_image_agent.d.ts @@ -2,10 +2,13 @@ import { AgentFunction, AgentFunctionInfo } from "graphai"; import { GraphAILLMInputBase } from "@graphai/llm_utils"; type OpenAIInputs = { model?: string; +} & GraphAILLMInputBase; +type OpenAIConfig = { baseURL?: string; apiKey?: string; forWeb?: boolean; -} & GraphAILLMInputBase; -export declare const openAIImageAgent: AgentFunction | string, OpenAIInputs>; +}; +type OpenAIParams = OpenAIInputs & OpenAIConfig; +export declare const openAIImageAgent: AgentFunction | string, OpenAIInputs, OpenAIConfig>; declare const openAIImageAgentInfo: AgentFunctionInfo; export default openAIImageAgentInfo; diff --git a/llm_agents/openai_agent/lib/openai_image_agent.js b/llm_agents/openai_agent/lib/openai_image_agent.js index fb78ddd1..862fb983 100644 --- a/llm_agents/openai_agent/lib/openai_image_agent.js +++ b/llm_agents/openai_agent/lib/openai_image_agent.js @@ -6,8 +6,12 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.openAIImageAgent = void 0; const openai_1 = __importDefault(require("openai")); const llm_utils_1 = require("@graphai/llm_utils"); -const openAIImageAgent = async ({ params, namedInputs }) => { - const { system, baseURL, apiKey, prompt, forWeb } = { ...params, ...namedInputs }; +const openAIImageAgent = async ({ params, namedInputs, config }) => { + const { system, prompt } = { ...params, ...namedInputs }; + const { apiKey, baseURL, forWeb } = { + ...params, + ...(config || {}) + }; const userPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeablePrompts", prompt); const systemPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeableSystem", system); const openai = new openai_1.default({ apiKey, baseURL, dangerouslyAllowBrowser: !!forWeb }); diff --git a/llm_agents/openai_agent/package.json b/llm_agents/openai_agent/package.json index cf64d1a2..c87a3d97 100644 --- a/llm_agents/openai_agent/package.json +++ b/llm_agents/openai_agent/package.json @@ -1,6 +1,6 @@ { "name": "@graphai/openai_agent", - "version": "0.2.0", + "version": "0.2.1", "description": "OpenAI agents for GraphAI.", "main": "lib/index.js", "files": [ @@ -29,7 +29,7 @@ "homepage": "https://github.com/receptron/graphai/blob/main/llm_agents/openai_agent/README.md", "dependencies": { "@graphai/llm_utils": "^0.0.2", - "openai": "^4.76.1" + "openai": "^4.77.0" }, "devDependencies": {}, "types": "./lib/index.d.ts", diff --git a/llm_agents/openai_agent/src/openai_agent.ts b/llm_agents/openai_agent/src/openai_agent.ts index 76d5359c..564e9e35 100644 --- a/llm_agents/openai_agent/src/openai_agent.ts +++ b/llm_agents/openai_agent/src/openai_agent.ts @@ -10,13 +10,21 @@ type OpenAIInputs = { max_tokens?: number; verbose?: boolean; temperature?: number; + messages?: Array; + response_format?: any; +} & GraphAILLMInputBase; + + +type OpenAIConfig = { baseURL?: string; apiKey?: string; stream?: boolean; - messages?: Array; forWeb?: boolean; - response_format?: any; -} & GraphAILLMInputBase; +} + +type OpenAIParams = OpenAIInputs & OpenAIConfig; + +type OpenAIResult = Record | string; const convertOpenAIChatCompletion = (response: OpenAI.ChatCompletion, messages: OpenAI.ChatCompletionMessageParam[]) => { const message = response?.choices[0] && response?.choices[0].message ? response?.choices[0].message : null; @@ -51,12 +59,17 @@ const convertOpenAIChatCompletion = (response: OpenAI.ChatCompletion, messages: }; }; -export const openAIAgent: AgentFunction | string, OpenAIInputs> = async ({ filterParams, params, namedInputs }) => { - const { verbose, system, images, temperature, tools, tool_choice, max_tokens, baseURL, apiKey, stream, prompt, messages, forWeb, response_format } = { +export const openAIAgent: AgentFunction = async ({ filterParams, params, namedInputs, config }) => { + const { verbose, system, images, temperature, tools, tool_choice, max_tokens, prompt, messages, response_format } = { ...params, ...namedInputs, }; + const { apiKey, stream, baseURL, forWeb } = { + ...params, + ...(config ||{}) + }; + const userPrompt = getMergeValue(namedInputs, params, "mergeablePrompts", prompt); const systemPrompt = getMergeValue(namedInputs, params, "mergeableSystem", system); diff --git a/llm_agents/openai_agent/src/openai_image_agent.ts b/llm_agents/openai_agent/src/openai_image_agent.ts index 213ab932..25418751 100644 --- a/llm_agents/openai_agent/src/openai_image_agent.ts +++ b/llm_agents/openai_agent/src/openai_image_agent.ts @@ -5,13 +5,23 @@ import { GraphAILLMInputBase, getMergeValue } from "@graphai/llm_utils"; type OpenAIInputs = { model?: string; +} & GraphAILLMInputBase; + +type OpenAIConfig = { baseURL?: string; apiKey?: string; forWeb?: boolean; -} & GraphAILLMInputBase; +}; -export const openAIImageAgent: AgentFunction | string, OpenAIInputs> = async ({ params, namedInputs }) => { - const { system, baseURL, apiKey, prompt, forWeb } = { ...params, ...namedInputs }; +type OpenAIParams = OpenAIInputs & OpenAIConfig; + +export const openAIImageAgent: AgentFunction | string, OpenAIInputs, OpenAIConfig> = async ({ params, namedInputs, config }) => { + const { system, prompt } = { ...params, ...namedInputs }; + + const { apiKey, baseURL, forWeb } = { + ...params, + ...(config ||{}) + }; const userPrompt = getMergeValue(namedInputs, params, "mergeablePrompts", prompt); const systemPrompt = getMergeValue(namedInputs, params, "mergeableSystem", system);