Skip to content

Commit

Permalink
refactor: use @xsai/model instead of openai (#1)
Browse files Browse the repository at this point in the history
  • Loading branch information
kwaa authored Dec 6, 2024
1 parent 4aa1de9 commit d93510b
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 116 deletions.
6 changes: 3 additions & 3 deletions packages/stage/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@
"@unocss/reset": "^0.65.0",
"@vueuse/core": "^12.0.0",
"@vueuse/head": "^2.0.0",
"@xsai/shared-chat-completion": "^0.0.14",
"@xsai/stream-text": "^0.0.14",
"@xsai/model": "^0.0.16",
"@xsai/shared-chat-completion": "^0.0.16",
"@xsai/stream-text": "^0.0.16",
"nprogress": "^0.2.0",
"ofetch": "^1.4.1",
"openai": "^4.75.0",
"pinia": "^2.2.8",
"pixi-live2d-display": "^0.4.0",
"rehype-stringify": "^10.0.1",
Expand Down
6 changes: 2 additions & 4 deletions packages/stage/src/components/MainStage.vue
Original file line number Diff line number Diff line change
Expand Up @@ -253,16 +253,14 @@ watch([openAiApiBaseURL, openAiApiKey], async ([baseUrl, apiKey]) => {
return
}
const fetchedModels = await models(baseUrl, apiKey)
supportedModels.value = fetchedModels.data
supportedModels.value = await models(baseUrl, apiKey)
})
onMounted(async () => {
if (!openAiApiBaseURL.value || !openAiApiKey.value)
return
const fetchedModels = await models(openAiApiBaseURL.value, openAiApiKey.value)
supportedModels.value = fetchedModels.data
supportedModels.value = await models(openAiApiBaseURL.value, openAiApiKey.value)
})
onUnmounted(() => {
Expand Down
11 changes: 4 additions & 7 deletions packages/stage/src/stores/llm.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import type { GenerateAudioStream } from '@airi-proj/elevenlabs/types'
import type { Message } from '@xsai/shared-chat-completion'
import { listModels } from '@xsai/model'
import { streamText } from '@xsai/stream-text'
import { ofetch } from 'ofetch'
import { OpenAI } from 'openai'
import { defineStore } from 'pinia'

export const useLLM = defineStore('llm', () => {
async function stream(apiUrl: string, apiKey: string, model: string, messages: Message[]) {
return await streamText({
url: `${apiUrl}/chat/completions`,
baseURL: (apiUrl.endsWith('/') ? apiUrl : `${apiUrl}/`) as `${string}/`,
apiKey,
model,
messages,
Expand All @@ -27,13 +27,10 @@ export const useLLM = defineStore('llm', () => {
}

try {
const openai = new OpenAI({
return await listModels({
baseURL: (apiUrl.endsWith('/') ? apiUrl : `${apiUrl}/`) as `${string}/`,
apiKey,
baseURL: apiUrl,
dangerouslyAllowBrowser: true,
})

return await openai.models.list()
}
catch (err) {
if (String(err).includes(`Failed to construct 'URL': Invalid URL`)) {
Expand Down
127 changes: 25 additions & 102 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit d93510b

Please sign in to comment.