Skip to content

Commit

Permalink
feat: 支持 deepseek
Browse files Browse the repository at this point in the history
  • Loading branch information
rxliuli committed Aug 24, 2024
1 parent d37acf5 commit efafc8b
Show file tree
Hide file tree
Showing 9 changed files with 86 additions and 7 deletions.
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ Supported models
- [x] Anthropic
- [x] Google Vertex Anthropic
- [x] Google Gemini
- [ ] DeepSeek
- [x] DeepSeek

## Deployment

Expand All @@ -31,6 +31,8 @@ Environment variables
- `ANTROPIC_API_KEY`: Anthropic API Key
- Google Gemini: Supports Google Gemini models, e.g. `gemini-1.5-flash`
- `GOOGLE_GEN_AI_API_KEY`: Google Gemini API Key
- DeepSeek: Supports DeepSeek models, e.g. `deepseek-chat`
- `DEEPSEEK_API_KEY`: DeepSeek API Key

## Usage

Expand Down
4 changes: 3 additions & 1 deletion README.zh-CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
- [x] Anthropic
- [x] Google Vertex Anthropic
- [x] Google Gemini
- [ ] DeepSeek
- [x] DeepSeek

## 部署

Expand All @@ -31,6 +31,8 @@
- `ANTROPIC_API_KEY`: Anthropic API Key
- Google Gemini: 支持 Google Gemini 模型,例如 `gemini-1.5-flash`
- `GOOGLE_GEN_AI_API_KEY`: Google Gemini API Key
- DeepSeek: 支持 DeepSeek 模型,例如 `deepseek-chat`
- `DEEPSEEK_API_KEY`: DeepSeek API Key

## 使用

Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
"langchain-anthropic-edge": "workspace:*",
"lodash-es": "^4.17.21",
"openai": "^4.56.0",
"serialize-error": "^11.0.3",
"zod": "^3.23.8"
},
"devDependencies": {
Expand Down
17 changes: 17 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 13 additions & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ import { anthropic, anthropicVertex } from './llm/anthropic'
import OpenAI from 'openai'
import { uniq } from 'lodash-es'
import { google } from './llm/google'
import { deepseek } from './llm/deepseek'
import { serializeError } from 'serialize-error'
import { HTTPException } from 'hono/http-exception'

interface Bindings {
API_KEY: string
Expand All @@ -17,6 +20,7 @@ function getModels(env: Record<string, string>) {
anthropic(env),
anthropicVertex(env),
google(env),
deepseek(env),
].filter((it) => it.requiredEnv.every((it) => it in env))
}

Expand All @@ -43,11 +47,19 @@ curl https://api.openai.com/v1/chat/completions \
}
return next()
})
.use(async (c, next) => {
await next()
if (c.error) {
throw new HTTPException((c.error as any)?.status ?? 500, {
message: serializeError(c.error).message,
})
}
})
.post('/v1/chat/completions', async (c) => {
const list = getModels(c.env as any)
const req = (await c.req.json()) as
| OpenAI.ChatCompletionCreateParamsNonStreaming
| OpenAI.ChatCompletionCreateParamsStreaming
const list = getModels(c.env as any)
const llm = list.find((it) => it.supportModels.includes(req.model))
if (!llm) {
return c.json({ error: `Model ${req.model} not supported` }, 400)
Expand Down
22 changes: 22 additions & 0 deletions src/llm/__tests__/deepseek.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import OpenAI from 'openai'
import { expect, it } from 'vitest'
import { deepseek } from '../deepseek'
import { omit } from 'lodash-es'

it('deepseek', async () => {
const client = new OpenAI({
baseURL: 'https://api.deepseek.com',
apiKey: import.meta.env.VITE_DEEPSEEK_API_KEY,
})
const params: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming =
{
model: 'deepseek-chat',
messages: [{ role: 'user', content: 'Hello, world!' }],
temperature: 0,
}
const r1 = await client.chat.completions.create(params)
const r2 = await deepseek({
DEEPSEEK_API_KEY: import.meta.env.VITE_DEEPSEEK_API_KEY,
}).invoke(params)
expect(omit(r1, ['id', 'created'])).toEqual(omit(r2, ['id', 'created']))
})
2 changes: 2 additions & 0 deletions src/llm/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,7 @@ export function anthropicVertex(env: Record<string, string>): IAnthropicVertex {
projectId: env.VERTEX_ANTROPIC_PROJECTID,
})
const r = anthropicBase(createClient as any) as IAnthropicVertex
r.name = 'vertex-anthropic'
r.requiredEnv = [
'VERTEX_ANTROPIC_GOOGLE_SA_CLIENT_EMAIL',
'VERTEX_ANTROPIC_GOOGLE_SA_PRIVATE_KEY',
Expand All @@ -231,6 +232,7 @@ export function anthropicVertex(env: Record<string, string>): IAnthropicVertex {
export function anthropic(env: Record<string, string>): IAnthropicVertex {
const createClient = () => new Anthropic({ apiKey: env.ANTROPIC_API_KEY })
const r = anthropicBase(createClient as any) as IAnthropicVertex
r.name = 'anthropic'
r.requiredEnv = ['ANTROPIC_API_KEY']
r.supportModels = [
'claude-3-5-sonnet-20240620',
Expand Down
13 changes: 13 additions & 0 deletions src/llm/deepseek.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import { IChat } from './base'
import { openaiBase } from './openai'

export function deepseek(env: Record<string, string>): IChat {
const r = openaiBase(env, {
apiKey: env.DEEPSEEK_API_KEY,
baseURL: 'https://api.deepseek.com',
})
r.name = 'deepseek'
r.requiredEnv = ['DEEPSEEK_API_KEY']
r.supportModels = ['deepseek-chat', 'deepseek-coder']
return r
}
16 changes: 12 additions & 4 deletions src/llm/openai.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import { Context } from 'hono'
import OpenAI from 'openai'
import OpenAI, { ClientOptions } from 'openai'
import { IChat } from './base'

export function openai(env: Record<string, string>): IChat {
export function openaiBase(
env: Record<string, string>,
options?: ClientOptions,
): IChat {
return {
name: 'openai',
supportModels: [
Expand Down Expand Up @@ -40,7 +42,7 @@ export function openai(env: Record<string, string>): IChat {
],
requiredEnv: ['OPENAI_API_KEY'],
invoke(req) {
const client = new OpenAI({ apiKey: env.OPENAI_API_KEY })
const client = new OpenAI(options)
return client.chat.completions.create({ ...req, stream: false })
},
async *stream(req, signal) {
Expand All @@ -55,3 +57,9 @@ export function openai(env: Record<string, string>): IChat {
},
}
}

export function openai(env: Record<string, string>): IChat {
return openaiBase(env, {
apiKey: env.OPENAI_API_KEY,
})
}

0 comments on commit efafc8b

Please sign in to comment.