From b55084ff9764851be537ce8847a387ccc8d04bd6 Mon Sep 17 00:00:00 2001 From: Ryan Lamb <4955475+kinyoklion@users.noreply.github.com> Date: Thu, 14 Nov 2024 12:56:17 -0800 Subject: [PATCH 1/5] feat: Change the typing for the LDAIConfig. --- .../__tests__/LDAIClientImpl.test.ts | 5 ++- packages/sdk/server-ai/src/LDAIClientImpl.ts | 40 +++++++++++-------- packages/sdk/server-ai/src/api/LDAIClient.ts | 16 ++------ .../server-ai/src/api/config/LDAIConfig.ts | 18 +++++++-- 4 files changed, 43 insertions(+), 36 deletions(-) diff --git a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts index a396c2c1a..4317b93e4 100644 --- a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts +++ b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts @@ -1,6 +1,6 @@ import { LDContext } from '@launchdarkly/js-server-sdk-common'; -import { LDGenerationConfig } from '../src/api/config'; +import { LDAIDefaults, LDGenerationConfig } from '../src/api/config'; import { LDAIClientImpl } from '../src/LDAIClientImpl'; import { LDClientMin } from '../src/LDClientMin'; @@ -32,9 +32,10 @@ it('handles empty variables in template interpolation', () => { it('returns model config with interpolated prompts', async () => { const client = new LDAIClientImpl(mockLdClient); const key = 'test-flag'; - const defaultValue: LDGenerationConfig = { + const defaultValue: LDAIDefaults = { model: { modelId: 'test', name: 'test-model' }, prompt: [], + enabled: true, }; const mockVariation = { diff --git a/packages/sdk/server-ai/src/LDAIClientImpl.ts b/packages/sdk/server-ai/src/LDAIClientImpl.ts index dc87d270e..bec6ed834 100644 --- a/packages/sdk/server-ai/src/LDAIClientImpl.ts +++ b/packages/sdk/server-ai/src/LDAIClientImpl.ts @@ -2,7 +2,13 @@ import * as Mustache from 'mustache'; import { LDContext } from '@launchdarkly/js-server-sdk-common'; -import { LDAIConfig, LDGenerationConfig, LDMessage, LDModelConfig } from './api/config'; +import { + LDAIConfig, + LDAIDefaults, + LDGenerationConfig, + LDMessage, + LDModelConfig, +} from './api/config'; import { LDAIClient } from './api/LDAIClient'; import { LDAIConfigTrackerImpl } from './LDAIConfigTrackerImpl'; import { LDClientMin } from './LDClientMin'; @@ -32,16 +38,28 @@ export class LDAIClientImpl implements LDAIClient { return Mustache.render(template, variables, undefined, { escape: (item: any) => item }); } - async modelConfig( + async modelConfig( key: string, context: LDContext, - defaultValue: TDefault, + defaultValue: LDAIDefaults, variables?: Record, ): Promise { const value: VariationContent = await this._ldClient.variation(key, context, defaultValue); + const tracker = new LDAIConfigTrackerImpl( + this._ldClient, + key, + // eslint-disable-next-line no-underscore-dangle + value._ldMeta?.versionKey ?? '', + context, + ); + // eslint-disable-next-line no-underscore-dangle + const enabled = !!value._ldMeta?.enabled; + const config: LDAIConfig = { + tracker, + enabled, + }; // We are going to modify the contents before returning them, so we make a copy. // This isn't a deep copy and the application developer should not modify the returned content. - const config: LDGenerationConfig = {}; if (value.model) { config.model = { ...value.model }; } @@ -54,18 +72,6 @@ export class LDAIClientImpl implements LDAIClient { })); } - return { - config, - // eslint-disable-next-line no-underscore-dangle - tracker: new LDAIConfigTrackerImpl( - this._ldClient, - key, - // eslint-disable-next-line no-underscore-dangle - value._ldMeta?.versionKey ?? '', - context, - ), - // eslint-disable-next-line no-underscore-dangle - enabled: !!value._ldMeta?.enabled, - }; + return config; } } diff --git a/packages/sdk/server-ai/src/api/LDAIClient.ts b/packages/sdk/server-ai/src/api/LDAIClient.ts index cffd657a7..d716bc230 100644 --- a/packages/sdk/server-ai/src/api/LDAIClient.ts +++ b/packages/sdk/server-ai/src/api/LDAIClient.ts @@ -1,16 +1,6 @@ import { LDContext } from '@launchdarkly/js-server-sdk-common'; -import { LDAIConfig, LDGenerationConfig } from './config/LDAIConfig'; - -/** - * Interface for default model configuration. - */ -export interface LDAIDefaults extends LDGenerationConfig { - /** - * Whether the configuration is enabled. - */ - enabled?: boolean; -} +import { LDAIConfig, LDAIDefaults } from './config/LDAIConfig'; /** * Interface for performing AI operations using LaunchDarkly. @@ -77,10 +67,10 @@ export interface LDAIClient { * } * ``` */ - modelConfig( + modelConfig( key: string, context: LDContext, - defaultValue: TDefault, + defaultValue: LDAIDefaults, variables?: Record, ): Promise; } diff --git a/packages/sdk/server-ai/src/api/config/LDAIConfig.ts b/packages/sdk/server-ai/src/api/config/LDAIConfig.ts index 432a0a732..e5d09b85f 100644 --- a/packages/sdk/server-ai/src/api/config/LDAIConfig.ts +++ b/packages/sdk/server-ai/src/api/config/LDAIConfig.ts @@ -7,7 +7,7 @@ export interface LDModelConfig { /** * The ID of the model. */ - modelId?: string; + modelId: string; /** * Tuning parameter for randomness versus determinism. Exact effect will be determined by the @@ -55,13 +55,17 @@ export interface LDGenerationConfig { } /** - * AI Config value and tracker. + * AI configuration and tracker. */ export interface LDAIConfig { /** - * The result of the AI Config customization. + * Optional model configuration. + */ + model?: LDModelConfig; + /** + * Optional prompt data. */ - config: LDGenerationConfig; + prompt?: LDMessage[]; /** * A tracker which can be used to generate analytics. @@ -73,3 +77,9 @@ export interface LDAIConfig { */ enabled: boolean; } + +/** + * Default value for a `modelConfig`. This is the same as the LDAIConfig, but it does not include + * a tracker. + */ +export type LDAIDefaults = Omit From 7f820d22c3db69275d191fd29b6b29eaea6c19b0 Mon Sep 17 00:00:00 2001 From: Ryan Lamb <4955475+kinyoklion@users.noreply.github.com> Date: Thu, 14 Nov 2024 14:56:14 -0800 Subject: [PATCH 2/5] Update examples. --- packages/sdk/server-ai/examples/bedrock/src/index.ts | 8 ++++++-- packages/sdk/server-ai/examples/openai/src/index.ts | 7 +++++-- packages/sdk/server-ai/src/LDAIClientImpl.ts | 8 +------- packages/sdk/server-ai/src/api/config/LDAIConfig.ts | 11 +++++++++-- 4 files changed, 21 insertions(+), 13 deletions(-) diff --git a/packages/sdk/server-ai/examples/bedrock/src/index.ts b/packages/sdk/server-ai/examples/bedrock/src/index.ts index 95bd83da6..39fd3fb03 100644 --- a/packages/sdk/server-ai/examples/bedrock/src/index.ts +++ b/packages/sdk/server-ai/examples/bedrock/src/index.ts @@ -66,8 +66,12 @@ async function main() { const completion = tracker.trackBedrockConverse( await awsClient.send( new ConverseCommand({ - modelId: aiConfig.config.model?.modelId ?? 'no-model', - messages: mapPromptToConversation(aiConfig.config.prompt ?? []), + modelId: aiConfig.model?.modelId ?? 'no-model', + messages: mapPromptToConversation(aiConfig.prompt ?? []), + inferenceConfig: { + temperature: aiConfig.model?.temperature ?? 0.5, + maxTokens: aiConfig.model?.maxTokens ?? 4096, + }, }), ), ); diff --git a/packages/sdk/server-ai/examples/openai/src/index.ts b/packages/sdk/server-ai/examples/openai/src/index.ts index 041d1f076..2ce5636a7 100644 --- a/packages/sdk/server-ai/examples/openai/src/index.ts +++ b/packages/sdk/server-ai/examples/openai/src/index.ts @@ -1,3 +1,4 @@ + /* eslint-disable no-console */ import { OpenAI } from 'openai'; @@ -60,8 +61,10 @@ async function main(): Promise { const { tracker } = aiConfig; const completion = await tracker.trackOpenAI(async () => client.chat.completions.create({ - messages: aiConfig.config.prompt || [], - model: aiConfig.config.model?.modelId || 'gpt-4', + messages: aiConfig.prompt || [], + model: aiConfig.model?.modelId || 'gpt-4', + temperature: aiConfig.model?.temperature ?? 0.5, + max_tokens: aiConfig.model?.maxTokens ?? 4096, }), ); diff --git a/packages/sdk/server-ai/src/LDAIClientImpl.ts b/packages/sdk/server-ai/src/LDAIClientImpl.ts index bec6ed834..7cf7b6f2d 100644 --- a/packages/sdk/server-ai/src/LDAIClientImpl.ts +++ b/packages/sdk/server-ai/src/LDAIClientImpl.ts @@ -2,13 +2,7 @@ import * as Mustache from 'mustache'; import { LDContext } from '@launchdarkly/js-server-sdk-common'; -import { - LDAIConfig, - LDAIDefaults, - LDGenerationConfig, - LDMessage, - LDModelConfig, -} from './api/config'; +import { LDAIConfig, LDAIDefaults, LDMessage, LDModelConfig } from './api/config'; import { LDAIClient } from './api/LDAIClient'; import { LDAIConfigTrackerImpl } from './LDAIConfigTrackerImpl'; import { LDClientMin } from './LDClientMin'; diff --git a/packages/sdk/server-ai/src/api/config/LDAIConfig.ts b/packages/sdk/server-ai/src/api/config/LDAIConfig.ts index e5d09b85f..0144ae439 100644 --- a/packages/sdk/server-ai/src/api/config/LDAIConfig.ts +++ b/packages/sdk/server-ai/src/api/config/LDAIConfig.ts @@ -80,6 +80,13 @@ export interface LDAIConfig { /** * Default value for a `modelConfig`. This is the same as the LDAIConfig, but it does not include - * a tracker. + * a tracker and `enabled` is optional. */ -export type LDAIDefaults = Omit +export type LDAIDefaults = Omit & { + /** + * Whether the configuration is enabled. + * + * defaults to false + */ + enabled?: boolean; +}; From 259ad3fd72320b03c5cc8b748735d949a5cee5d3 Mon Sep 17 00:00:00 2001 From: Ryan Lamb <4955475+kinyoklion@users.noreply.github.com> Date: Thu, 14 Nov 2024 15:08:52 -0800 Subject: [PATCH 3/5] Lint. --- packages/sdk/server-ai/examples/openai/src/index.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/sdk/server-ai/examples/openai/src/index.ts b/packages/sdk/server-ai/examples/openai/src/index.ts index 2ce5636a7..abdae7612 100644 --- a/packages/sdk/server-ai/examples/openai/src/index.ts +++ b/packages/sdk/server-ai/examples/openai/src/index.ts @@ -1,4 +1,3 @@ - /* eslint-disable no-console */ import { OpenAI } from 'openai'; From b3a200da67b548ce436af04ca820106c4def5a71 Mon Sep 17 00:00:00 2001 From: Ryan Lamb <4955475+kinyoklion@users.noreply.github.com> Date: Thu, 14 Nov 2024 15:17:52 -0800 Subject: [PATCH 4/5] Fix tests. --- .../__tests__/LDAIClientImpl.test.ts | 32 +++++++++---------- .../server-ai/src/api/config/LDAIConfig.ts | 14 -------- 2 files changed, 15 insertions(+), 31 deletions(-) diff --git a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts index 4317b93e4..a1aa24041 100644 --- a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts +++ b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts @@ -1,6 +1,6 @@ import { LDContext } from '@launchdarkly/js-server-sdk-common'; -import { LDAIDefaults, LDGenerationConfig } from '../src/api/config'; +import { LDAIDefaults } from '../src/api/config'; import { LDAIClientImpl } from '../src/LDAIClientImpl'; import { LDClientMin } from '../src/LDClientMin'; @@ -56,13 +56,11 @@ it('returns model config with interpolated prompts', async () => { const result = await client.modelConfig(key, testContext, defaultValue, variables); expect(result).toEqual({ - config: { - model: { modelId: 'example-provider', name: 'imagination' }, - prompt: [ - { role: 'system', content: 'Hello John' }, - { role: 'user', content: 'Score: 42' }, - ], - }, + model: { modelId: 'example-provider', name: 'imagination' }, + prompt: [ + { role: 'system', content: 'Hello John' }, + { role: 'user', content: 'Score: 42' }, + ], tracker: expect.any(Object), enabled: true, }); @@ -71,7 +69,7 @@ it('returns model config with interpolated prompts', async () => { it('includes context in variables for prompt interpolation', async () => { const client = new LDAIClientImpl(mockLdClient); const key = 'test-flag'; - const defaultValue: LDGenerationConfig = { + const defaultValue: LDAIDefaults = { model: { modelId: 'test', name: 'test-model' }, prompt: [], }; @@ -85,13 +83,13 @@ it('includes context in variables for prompt interpolation', async () => { const result = await client.modelConfig(key, testContext, defaultValue); - expect(result.config.prompt?.[0].content).toBe('User key: test-user'); + expect(result.prompt?.[0].content).toBe('User key: test-user'); }); it('handles missing metadata in variation', async () => { const client = new LDAIClientImpl(mockLdClient); const key = 'test-flag'; - const defaultValue: LDGenerationConfig = { + const defaultValue: LDAIDefaults = { model: { modelId: 'test', name: 'test-model' }, prompt: [], }; @@ -106,10 +104,8 @@ it('handles missing metadata in variation', async () => { const result = await client.modelConfig(key, testContext, defaultValue); expect(result).toEqual({ - config: { - model: { modelId: 'example-provider', name: 'imagination' }, - prompt: [{ role: 'system', content: 'Hello' }], - }, + model: { modelId: 'example-provider', name: 'imagination' }, + prompt: [{ role: 'system', content: 'Hello' }], tracker: expect.any(Object), enabled: false, }); @@ -118,9 +114,10 @@ it('handles missing metadata in variation', async () => { it('passes the default value to the underlying client', async () => { const client = new LDAIClientImpl(mockLdClient); const key = 'non-existent-flag'; - const defaultValue: LDGenerationConfig = { + const defaultValue: LDAIDefaults = { model: { modelId: 'default-model', name: 'default' }, prompt: [{ role: 'system', content: 'Default prompt' }], + enabled: true, }; mockLdClient.variation.mockResolvedValue(defaultValue); @@ -128,7 +125,8 @@ it('passes the default value to the underlying client', async () => { const result = await client.modelConfig(key, testContext, defaultValue); expect(result).toEqual({ - config: defaultValue, + model: defaultValue.model, + prompt: defaultValue.prompt, tracker: expect.any(Object), enabled: false, }); diff --git a/packages/sdk/server-ai/src/api/config/LDAIConfig.ts b/packages/sdk/server-ai/src/api/config/LDAIConfig.ts index 0144ae439..2dfa4baee 100644 --- a/packages/sdk/server-ai/src/api/config/LDAIConfig.ts +++ b/packages/sdk/server-ai/src/api/config/LDAIConfig.ts @@ -40,20 +40,6 @@ export interface LDMessage { content: string; } -/** - * Configuration which affects generation. - */ -export interface LDGenerationConfig { - /** - * Optional model configuration. - */ - model?: LDModelConfig; - /** - * Optional prompt data. - */ - prompt?: LDMessage[]; -} - /** * AI configuration and tracker. */ From 3a7fd63199a3a089a5b0a63168e1186eb3abaa98 Mon Sep 17 00:00:00 2001 From: Ryan Lamb <4955475+kinyoklion@users.noreply.github.com> Date: Thu, 14 Nov 2024 15:20:52 -0800 Subject: [PATCH 5/5] Expand tests. --- packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts index a1aa24041..d586b7c0c 100644 --- a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts +++ b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts @@ -39,7 +39,7 @@ it('returns model config with interpolated prompts', async () => { }; const mockVariation = { - model: { modelId: 'example-provider', name: 'imagination' }, + model: { modelId: 'example-provider', name: 'imagination', temperature: 0.7, maxTokens: 4096 }, prompt: [ { role: 'system', content: 'Hello {{name}}' }, { role: 'user', content: 'Score: {{score}}' }, @@ -56,7 +56,7 @@ it('returns model config with interpolated prompts', async () => { const result = await client.modelConfig(key, testContext, defaultValue, variables); expect(result).toEqual({ - model: { modelId: 'example-provider', name: 'imagination' }, + model: { modelId: 'example-provider', name: 'imagination', temperature: 0.7, maxTokens: 4096 }, prompt: [ { role: 'system', content: 'Hello John' }, { role: 'user', content: 'Score: 42' },