From c64f18d486613409e4b2bd5995c12ba8b95bb7f5 Mon Sep 17 00:00:00 2001 From: Michael Farrell Date: Thu, 2 Nov 2023 22:44:16 -0700 Subject: [PATCH] Fixes last set of bugs (#265) --- package.json | 2 +- src/ai/TranscendPromptManager.ts | 38 +++++++++++++++++-------- src/graphql/fetchLargeLanguageModels.ts | 4 +-- src/graphql/gqls/largeLanguageModel.ts | 2 +- src/graphql/reportPromptRun.ts | 4 +-- 5 files changed, 32 insertions(+), 18 deletions(-) diff --git a/package.json b/package.json index 596fa819..87bc7b4d 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "Transcend Inc.", "name": "@transcend-io/cli", "description": "Small package containing useful typescript utilities.", - "version": "4.114.1", + "version": "4.114.2", "homepage": "https://github.com/transcend-io/cli", "repository": { "type": "git", diff --git a/src/ai/TranscendPromptManager.ts b/src/ai/TranscendPromptManager.ts index 5ccbbbcd..97c7215e 100644 --- a/src/ai/TranscendPromptManager.ts +++ b/src/ai/TranscendPromptManager.ts @@ -114,6 +114,14 @@ export interface ReportPromptRunOptions }; } +const jsonParseSafe = (obj: string): unknown => { + try { + return JSON.parse(obj); + } catch (e) { + return obj; + } +}; + /** * A class that is capable of loading and insert variables into prompts from * Transcend's Prompt Manager @@ -421,7 +429,11 @@ export class TranscendPromptManager< : response; // Parse via codec - return decodeCodec(promptInput.outputCodec, extracted); + return decodeCodec( + promptInput.outputCodec, + jsonParseSafe(extracted), + false, + ); } /** @@ -433,7 +445,7 @@ export class TranscendPromptManager< */ async reportAndParsePromptRun( promptName: TPromptName, - options: Requirize, + { largeLanguageModel, ...options }: ReportPromptRunOptions, ): Promise> { const name = options.name || @@ -471,9 +483,8 @@ export class TranscendPromptManager< options.promptRunMessages[options.promptRunMessages.length - 1].content; // Look up the large language model being report on - const largeLanguageModel = this.getLargeLanguageModel( - options.largeLanguageModel, - ); + const largeLanguageModelInstance = + this.getLargeLanguageModel(largeLanguageModel); let parsed: t.TypeOf; try { @@ -484,8 +495,9 @@ export class TranscendPromptManager< productArea: PromptRunProductArea.PromptManager, ...options, name, + error: err.message, status: QueueStatus.Error, - largeLanguageModelId: largeLanguageModel.id, + largeLanguageModelId: largeLanguageModelInstance.id, promptId: promptInput.id, promptRunMessages: options.promptRunMessages.map((message, ind) => ({ ...message, @@ -501,7 +513,7 @@ export class TranscendPromptManager< ...options, name, status: QueueStatus.Resolved, - largeLanguageModelId: largeLanguageModel.id, + largeLanguageModelId: largeLanguageModelInstance.id, promptId: promptInput.id, promptRunMessages: options.promptRunMessages.map((message, ind) => ({ ...message, @@ -521,7 +533,10 @@ export class TranscendPromptManager< */ async reportPromptRunError( promptName: TPromptName, - options: ReportPromptRunOptions, + { + largeLanguageModel, + ...options + }: Requirize, ): Promise { const name = options.name || @@ -547,16 +562,15 @@ export class TranscendPromptManager< } // Look up the large language model being report on - const largeLanguageModel = this.getLargeLanguageModel( - options.largeLanguageModel, - ); + const largeLanguageModelInstance = + this.getLargeLanguageModel(largeLanguageModel); await reportPromptRun(this.graphQLClient, { productArea: PromptRunProductArea.PromptManager, ...options, name, status: QueueStatus.Error, - largeLanguageModelId: largeLanguageModel.id, + largeLanguageModelId: largeLanguageModelInstance.id, promptId: promptInput.id, promptRunMessages: options.promptRunMessages.map((message, ind) => ({ ...message, diff --git a/src/graphql/fetchLargeLanguageModels.ts b/src/graphql/fetchLargeLanguageModels.ts index 6ee48cbd..10d81e26 100644 --- a/src/graphql/fetchLargeLanguageModels.ts +++ b/src/graphql/fetchLargeLanguageModels.ts @@ -1,6 +1,6 @@ import { GraphQLClient } from 'graphql-request'; import { LargeLanguageModelClient } from '@transcend-io/privacy-types'; -import { PROMPT_TEMPLATES } from './gqls'; +import { LARGE_LANGUAGE_MODELS } from './gqls'; import { makeGraphQLRequest } from './makeGraphQLRequest'; export interface LargeLanguageModel { @@ -40,7 +40,7 @@ export async function fetchAllLargeLanguageModels( /** List */ nodes: LargeLanguageModel[]; }; - }>(client, PROMPT_TEMPLATES, { + }>(client, LARGE_LANGUAGE_MODELS, { first: PAGE_SIZE, offset, }); diff --git a/src/graphql/gqls/largeLanguageModel.ts b/src/graphql/gqls/largeLanguageModel.ts index ff4c6f95..cf0cd9ad 100644 --- a/src/graphql/gqls/largeLanguageModel.ts +++ b/src/graphql/gqls/largeLanguageModel.ts @@ -7,7 +7,7 @@ export const LARGE_LANGUAGE_MODELS = gql` query TranscendCliLargeLanguageModels( $first: Int! $offset: Int! - $filterBy: PromptFiltersInput + $filterBy: LargeLanguageModelFiltersInput ) { largeLanguageModels( first: $first diff --git a/src/graphql/reportPromptRun.ts b/src/graphql/reportPromptRun.ts index aed6af0f..844e6c72 100644 --- a/src/graphql/reportPromptRun.ts +++ b/src/graphql/reportPromptRun.ts @@ -1,5 +1,5 @@ import { GraphQLClient } from 'graphql-request'; -import { CREATE_PROMPT } from './gqls'; +import { REPORT_PROMPT_RUN } from './gqls'; import { makeGraphQLRequest } from './makeGraphQLRequest'; import { QueueStatus, @@ -65,7 +65,7 @@ export async function reportPromptRun( id: string; }; }; - }>(client, CREATE_PROMPT, { + }>(client, REPORT_PROMPT_RUN, { input: { ...input, promptRunMessages: input.promptRunMessages.map(