Skip to content

Commit

Permalink
Merge pull request #859 from isamu/openAIAgentConfig
Browse files Browse the repository at this point in the history
Open ai agent config
  • Loading branch information
isamu authored Dec 29, 2024
2 parents d0b272f + d4dfe56 commit c611174
Show file tree
Hide file tree
Showing 7 changed files with 58 additions and 20 deletions.
12 changes: 8 additions & 4 deletions llm_agents/openai_agent/lib/openai_agent.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,18 @@ type OpenAIInputs = {
max_tokens?: number;
verbose?: boolean;
temperature?: number;
messages?: Array<OpenAI.ChatCompletionMessageParam>;
response_format?: any;
} & GraphAILLMInputBase;
type OpenAIConfig = {
baseURL?: string;
apiKey?: string;
stream?: boolean;
messages?: Array<OpenAI.ChatCompletionMessageParam>;
forWeb?: boolean;
response_format?: any;
} & GraphAILLMInputBase;
export declare const openAIAgent: AgentFunction<OpenAIInputs, Record<string, any> | string, OpenAIInputs>;
};
type OpenAIParams = OpenAIInputs & OpenAIConfig;
type OpenAIResult = Record<string, any> | string;
export declare const openAIAgent: AgentFunction<OpenAIParams, OpenAIResult, OpenAIInputs, OpenAIConfig>;
export declare const openAIMockAgent: AgentFunction<{
model?: string;
query?: string;
Expand Down
8 changes: 6 additions & 2 deletions llm_agents/openai_agent/lib/openai_agent.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,15 @@ const convertOpenAIChatCompletion = (response, messages) => {
messages,
};
};
const openAIAgent = async ({ filterParams, params, namedInputs }) => {
const { verbose, system, images, temperature, tools, tool_choice, max_tokens, baseURL, apiKey, stream, prompt, messages, forWeb, response_format } = {
const openAIAgent = async ({ filterParams, params, namedInputs, config }) => {
const { verbose, system, images, temperature, tools, tool_choice, max_tokens, prompt, messages, response_format } = {
...params,
...namedInputs,
};
const { apiKey, stream, baseURL, forWeb } = {
...params,
...(config || {})
};
const userPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeablePrompts", prompt);
const systemPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeableSystem", system);
const messagesCopy = (0, llm_utils_1.getMessages)(systemPrompt, messages);
Expand Down
7 changes: 5 additions & 2 deletions llm_agents/openai_agent/lib/openai_image_agent.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,13 @@ import { AgentFunction, AgentFunctionInfo } from "graphai";
import { GraphAILLMInputBase } from "@graphai/llm_utils";
type OpenAIInputs = {
model?: string;
} & GraphAILLMInputBase;
type OpenAIConfig = {
baseURL?: string;
apiKey?: string;
forWeb?: boolean;
} & GraphAILLMInputBase;
export declare const openAIImageAgent: AgentFunction<OpenAIInputs, Record<string, any> | string, OpenAIInputs>;
};
type OpenAIParams = OpenAIInputs & OpenAIConfig;
export declare const openAIImageAgent: AgentFunction<OpenAIParams, Record<string, any> | string, OpenAIInputs, OpenAIConfig>;
declare const openAIImageAgentInfo: AgentFunctionInfo;
export default openAIImageAgentInfo;
8 changes: 6 additions & 2 deletions llm_agents/openai_agent/lib/openai_image_agent.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
exports.openAIImageAgent = void 0;
const openai_1 = __importDefault(require("openai"));
const llm_utils_1 = require("@graphai/llm_utils");
const openAIImageAgent = async ({ params, namedInputs }) => {
const { system, baseURL, apiKey, prompt, forWeb } = { ...params, ...namedInputs };
const openAIImageAgent = async ({ params, namedInputs, config }) => {
const { system, prompt } = { ...params, ...namedInputs };
const { apiKey, baseURL, forWeb } = {
...params,
...(config || {})
};
const userPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeablePrompts", prompt);
const systemPrompt = (0, llm_utils_1.getMergeValue)(namedInputs, params, "mergeableSystem", system);
const openai = new openai_1.default({ apiKey, baseURL, dangerouslyAllowBrowser: !!forWeb });
Expand Down
4 changes: 2 additions & 2 deletions llm_agents/openai_agent/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@graphai/openai_agent",
"version": "0.2.0",
"version": "0.2.1",
"description": "OpenAI agents for GraphAI.",
"main": "lib/index.js",
"files": [
Expand Down Expand Up @@ -29,7 +29,7 @@
"homepage": "https://github.com/receptron/graphai/blob/main/llm_agents/openai_agent/README.md",
"dependencies": {
"@graphai/llm_utils": "^0.0.2",
"openai": "^4.76.1"
"openai": "^4.77.0"
},
"devDependencies": {},
"types": "./lib/index.d.ts",
Expand Down
23 changes: 18 additions & 5 deletions llm_agents/openai_agent/src/openai_agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,21 @@ type OpenAIInputs = {
max_tokens?: number;
verbose?: boolean;
temperature?: number;
messages?: Array<OpenAI.ChatCompletionMessageParam>;
response_format?: any;
} & GraphAILLMInputBase;


type OpenAIConfig = {
baseURL?: string;
apiKey?: string;
stream?: boolean;
messages?: Array<OpenAI.ChatCompletionMessageParam>;
forWeb?: boolean;
response_format?: any;
} & GraphAILLMInputBase;
}

type OpenAIParams = OpenAIInputs & OpenAIConfig;

type OpenAIResult = Record<string, any> | string;

const convertOpenAIChatCompletion = (response: OpenAI.ChatCompletion, messages: OpenAI.ChatCompletionMessageParam[]) => {
const message = response?.choices[0] && response?.choices[0].message ? response?.choices[0].message : null;
Expand Down Expand Up @@ -51,12 +59,17 @@ const convertOpenAIChatCompletion = (response: OpenAI.ChatCompletion, messages:
};
};

export const openAIAgent: AgentFunction<OpenAIInputs, Record<string, any> | string, OpenAIInputs> = async ({ filterParams, params, namedInputs }) => {
const { verbose, system, images, temperature, tools, tool_choice, max_tokens, baseURL, apiKey, stream, prompt, messages, forWeb, response_format } = {
export const openAIAgent: AgentFunction<OpenAIParams, OpenAIResult, OpenAIInputs, OpenAIConfig> = async ({ filterParams, params, namedInputs, config }) => {
const { verbose, system, images, temperature, tools, tool_choice, max_tokens, prompt, messages, response_format } = {
...params,
...namedInputs,
};

const { apiKey, stream, baseURL, forWeb } = {
...params,
...(config ||{})
};

const userPrompt = getMergeValue(namedInputs, params, "mergeablePrompts", prompt);
const systemPrompt = getMergeValue(namedInputs, params, "mergeableSystem", system);

Expand Down
16 changes: 13 additions & 3 deletions llm_agents/openai_agent/src/openai_image_agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,23 @@ import { GraphAILLMInputBase, getMergeValue } from "@graphai/llm_utils";

type OpenAIInputs = {
model?: string;
} & GraphAILLMInputBase;

type OpenAIConfig = {
baseURL?: string;
apiKey?: string;
forWeb?: boolean;
} & GraphAILLMInputBase;
};

export const openAIImageAgent: AgentFunction<OpenAIInputs, Record<string, any> | string, OpenAIInputs> = async ({ params, namedInputs }) => {
const { system, baseURL, apiKey, prompt, forWeb } = { ...params, ...namedInputs };
type OpenAIParams = OpenAIInputs & OpenAIConfig;

export const openAIImageAgent: AgentFunction<OpenAIParams, Record<string, any> | string, OpenAIInputs, OpenAIConfig> = async ({ params, namedInputs, config }) => {
const { system, prompt } = { ...params, ...namedInputs };

const { apiKey, baseURL, forWeb } = {
...params,
...(config ||{})
};

const userPrompt = getMergeValue(namedInputs, params, "mergeablePrompts", prompt);
const systemPrompt = getMergeValue(namedInputs, params, "mergeableSystem", system);
Expand Down

0 comments on commit c611174

Please sign in to comment.