Skip to content

Commit

Permalink
Fixes last set of bugs (#265)
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelfarrell76 authored Nov 3, 2023
1 parent 597bff1 commit c64f18d
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 18 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"author": "Transcend Inc.",
"name": "@transcend-io/cli",
"description": "Small package containing useful typescript utilities.",
"version": "4.114.1",
"version": "4.114.2",
"homepage": "https://github.com/transcend-io/cli",
"repository": {
"type": "git",
Expand Down
38 changes: 26 additions & 12 deletions src/ai/TranscendPromptManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,14 @@ export interface ReportPromptRunOptions
};
}

const jsonParseSafe = (obj: string): unknown => {
try {
return JSON.parse(obj);
} catch (e) {
return obj;
}
};

/**
* A class that is capable of loading and insert variables into prompts from
* Transcend's Prompt Manager
Expand Down Expand Up @@ -421,7 +429,11 @@ export class TranscendPromptManager<
: response;

// Parse via codec
return decodeCodec(promptInput.outputCodec, extracted);
return decodeCodec(
promptInput.outputCodec,
jsonParseSafe(extracted),
false,
);
}

/**
Expand All @@ -433,7 +445,7 @@ export class TranscendPromptManager<
*/
async reportAndParsePromptRun<TPromptName extends TPromptNames>(
promptName: TPromptName,
options: Requirize<ReportPromptRunOptions, 'error'>,
{ largeLanguageModel, ...options }: ReportPromptRunOptions,
): Promise<t.TypeOf<TPrompts[TPromptName]['outputCodec']>> {
const name =
options.name ||
Expand Down Expand Up @@ -471,9 +483,8 @@ export class TranscendPromptManager<
options.promptRunMessages[options.promptRunMessages.length - 1].content;

// Look up the large language model being report on
const largeLanguageModel = this.getLargeLanguageModel(
options.largeLanguageModel,
);
const largeLanguageModelInstance =
this.getLargeLanguageModel(largeLanguageModel);

let parsed: t.TypeOf<TPrompts[TPromptName]['outputCodec']>;
try {
Expand All @@ -484,8 +495,9 @@ export class TranscendPromptManager<
productArea: PromptRunProductArea.PromptManager,
...options,
name,
error: err.message,
status: QueueStatus.Error,
largeLanguageModelId: largeLanguageModel.id,
largeLanguageModelId: largeLanguageModelInstance.id,
promptId: promptInput.id,
promptRunMessages: options.promptRunMessages.map((message, ind) => ({
...message,
Expand All @@ -501,7 +513,7 @@ export class TranscendPromptManager<
...options,
name,
status: QueueStatus.Resolved,
largeLanguageModelId: largeLanguageModel.id,
largeLanguageModelId: largeLanguageModelInstance.id,
promptId: promptInput.id,
promptRunMessages: options.promptRunMessages.map((message, ind) => ({
...message,
Expand All @@ -521,7 +533,10 @@ export class TranscendPromptManager<
*/
async reportPromptRunError<TPromptName extends TPromptNames>(
promptName: TPromptName,
options: ReportPromptRunOptions,
{
largeLanguageModel,
...options
}: Requirize<ReportPromptRunOptions, 'error'>,
): Promise<void> {
const name =
options.name ||
Expand All @@ -547,16 +562,15 @@ export class TranscendPromptManager<
}

// Look up the large language model being report on
const largeLanguageModel = this.getLargeLanguageModel(
options.largeLanguageModel,
);
const largeLanguageModelInstance =
this.getLargeLanguageModel(largeLanguageModel);

await reportPromptRun(this.graphQLClient, {
productArea: PromptRunProductArea.PromptManager,
...options,
name,
status: QueueStatus.Error,
largeLanguageModelId: largeLanguageModel.id,
largeLanguageModelId: largeLanguageModelInstance.id,
promptId: promptInput.id,
promptRunMessages: options.promptRunMessages.map((message, ind) => ({
...message,
Expand Down
4 changes: 2 additions & 2 deletions src/graphql/fetchLargeLanguageModels.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { GraphQLClient } from 'graphql-request';
import { LargeLanguageModelClient } from '@transcend-io/privacy-types';
import { PROMPT_TEMPLATES } from './gqls';
import { LARGE_LANGUAGE_MODELS } from './gqls';
import { makeGraphQLRequest } from './makeGraphQLRequest';

export interface LargeLanguageModel {
Expand Down Expand Up @@ -40,7 +40,7 @@ export async function fetchAllLargeLanguageModels(
/** List */
nodes: LargeLanguageModel[];
};
}>(client, PROMPT_TEMPLATES, {
}>(client, LARGE_LANGUAGE_MODELS, {
first: PAGE_SIZE,
offset,
});
Expand Down
2 changes: 1 addition & 1 deletion src/graphql/gqls/largeLanguageModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export const LARGE_LANGUAGE_MODELS = gql`
query TranscendCliLargeLanguageModels(
$first: Int!
$offset: Int!
$filterBy: PromptFiltersInput
$filterBy: LargeLanguageModelFiltersInput
) {
largeLanguageModels(
first: $first
Expand Down
4 changes: 2 additions & 2 deletions src/graphql/reportPromptRun.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { GraphQLClient } from 'graphql-request';
import { CREATE_PROMPT } from './gqls';
import { REPORT_PROMPT_RUN } from './gqls';
import { makeGraphQLRequest } from './makeGraphQLRequest';
import {
QueueStatus,
Expand Down Expand Up @@ -65,7 +65,7 @@ export async function reportPromptRun(
id: string;
};
};
}>(client, CREATE_PROMPT, {
}>(client, REPORT_PROMPT_RUN, {
input: {
...input,
promptRunMessages: input.promptRunMessages.map(
Expand Down

0 comments on commit c64f18d

Please sign in to comment.