Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
actions-user committed Sep 7, 2024
2 parents 129af46 + 2bd799f commit 460d13e
Show file tree
Hide file tree
Showing 31 changed files with 1,569 additions and 407 deletions.
12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -91,13 +91,13 @@ For enterprise inquiries, please contact: **[email protected]**
- [x] Desktop App with tauri
- [x] Self-host Model: Fully compatible with [RWKV-Runner](https://github.com/josStorer/RWKV-Runner), as well as server deployment of [LocalAI](https://github.com/go-skynet/LocalAI): llama/gpt4all/rwkv/vicuna/koala/gpt4all-j/cerebras/falcon/dolly etc.
- [x] Artifacts: Easily preview, copy and share generated content/webpages through a separate window [#5092](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/pull/5092)
- [x] Plugins: support artifacts, network search, calculator, any other apis etc. [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165)
- [x] artifacts
- [ ] network search, calculator, any other apis etc. [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165)
- [x] Plugins: support network search, calculator, any other apis etc. [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
- [x] network search, calculator, any other apis etc. [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
- [ ] local knowledge base

## What's New

- 🚀 v2.15.0 Now supports Plugins! Read this: [NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
- 🚀 v2.14.0 Now supports Artifacts & SD
- 🚀 v2.10.1 support Google Gemini Pro model.
- 🚀 v2.9.11 you can use azure endpoint now.
Expand Down Expand Up @@ -128,13 +128,13 @@ For enterprise inquiries, please contact: **[email protected]**
- [x] 使用 tauri 打包桌面应用
- [x] 支持自部署的大语言模型:开箱即用 [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) ,服务端部署 [LocalAI 项目](https://github.com/go-skynet/LocalAI) llama / gpt4all / rwkv / vicuna / koala / gpt4all-j / cerebras / falcon / dolly 等等,或者使用 [api-for-open-llm](https://github.com/xusenlinzy/api-for-open-llm)
- [x] Artifacts: 通过独立窗口,轻松预览、复制和分享生成的内容/可交互网页 [#5092](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/pull/5092)
- [x] 插件机制,支持 artifacts,联网搜索、计算器、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165)
- [x] artifacts
- [ ] 支持联网搜索、计算器、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165)
- [x] 插件机制,支持`联网搜索``计算器`、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
- [x] 支持联网搜索、计算器、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) [#5353](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/issues/5353)
- [ ] 本地知识库

## 最新动态

- 🚀 v2.15.0 现在支持插件功能了!了解更多:[NextChat-Awesome-Plugins](https://github.com/ChatGPTNextWeb/NextChat-Awesome-Plugins)
- 🚀 v2.14.0 现在支持 Artifacts & SD 了。
- 🚀 v2.10.1 现在支持 Gemini Pro 模型。
- 🚀 v2.9.11 现在可以使用自定义 Azure 服务了。
Expand Down
6 changes: 5 additions & 1 deletion app/api/[provider]/[...path]/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import { handle as alibabaHandler } from "../../alibaba";
import { handle as moonshotHandler } from "../../moonshot";
import { handle as stabilityHandler } from "../../stability";
import { handle as iflytekHandler } from "../../iflytek";
import { handle as proxyHandler } from "../../proxy";

async function handle(
req: NextRequest,
{ params }: { params: { provider: string; path: string[] } },
Expand All @@ -36,8 +38,10 @@ async function handle(
return stabilityHandler(req, { params });
case ApiPath.Iflytek:
return iflytekHandler(req, { params });
default:
case ApiPath.OpenAI:
return openaiHandler(req, { params });
default:
return proxyHandler(req, { params });
}
}

Expand Down
5 changes: 1 addition & 4 deletions app/api/common.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,7 @@ export async function requestOpenai(req: NextRequest) {
authHeaderName = "Authorization";
}

let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
"/api/openai/",
"",
);
let path = `${req.nextUrl.pathname}`.replaceAll("/api/openai/", "");

let baseUrl =
(isAzure ? serverConfig.azureUrl : serverConfig.baseUrl) || OPENAI_BASE_URL;
Expand Down
75 changes: 75 additions & 0 deletions app/api/proxy.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import { NextRequest, NextResponse } from "next/server";

export async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
) {
console.log("[Proxy Route] params ", params);

if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}

// remove path params from searchParams
req.nextUrl.searchParams.delete("path");
req.nextUrl.searchParams.delete("provider");

const subpath = params.path.join("/");
const fetchUrl = `${req.headers.get(
"x-base-url",
)}/${subpath}?${req.nextUrl.searchParams.toString()}`;
const skipHeaders = ["connection", "host", "origin", "referer", "cookie"];
const headers = new Headers(
Array.from(req.headers.entries()).filter((item) => {
if (
item[0].indexOf("x-") > -1 ||
item[0].indexOf("sec-") > -1 ||
skipHeaders.includes(item[0])
) {
return false;
}
return true;
}),
);
const controller = new AbortController();
const fetchOptions: RequestInit = {
headers,
method: req.method,
body: req.body,
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};

const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);

try {
const res = await fetch(fetchUrl, fetchOptions);
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");

// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
// So if the streaming is disabled, we need to remove the content-encoding header
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");

return new Response(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}
10 changes: 9 additions & 1 deletion app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,13 @@ import {
ModelProvider,
ServiceProvider,
} from "../constant";
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
import {
ChatMessageTool,
ChatMessage,
ModelType,
useAccessStore,
useChatStore,
} from "../store";
import { ChatGPTApi, DalleRequestPayload } from "./platforms/openai";
import { GeminiProApi } from "./platforms/google";
import { ClaudeApi } from "./platforms/anthropic";
Expand Down Expand Up @@ -56,6 +62,8 @@ export interface ChatOptions {
onFinish: (message: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
onBeforeTool?: (tool: ChatMessageTool) => void;
onAfterTool?: (tool: ChatMessageTool) => void;
}

export interface LLMUsage {
Expand Down
Loading

0 comments on commit 460d13e

Please sign in to comment.