mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-26 07:30:18 +09:00
Merge branch 'main' into main
This commit is contained in:
commit
81b14b7b8d
@ -1,6 +1,9 @@
|
|||||||
# Your openai api key. (required)
|
# Your openai api key. (required)
|
||||||
OPENAI_API_KEY=sk-xxxx
|
OPENAI_API_KEY=sk-xxxx
|
||||||
|
|
||||||
|
# DeepSeek Api Key. (Optional)
|
||||||
|
DEEPSEEK_API_KEY=
|
||||||
|
|
||||||
# Access password, separated by comma. (optional)
|
# Access password, separated by comma. (optional)
|
||||||
CODE=your-password
|
CODE=your-password
|
||||||
|
|
||||||
@ -70,10 +73,13 @@ ANTHROPIC_API_VERSION=
|
|||||||
### anthropic claude Api url (optional)
|
### anthropic claude Api url (optional)
|
||||||
ANTHROPIC_URL=
|
ANTHROPIC_URL=
|
||||||
|
|
||||||
|
|
||||||
### (optional)
|
### (optional)
|
||||||
WHITE_WEBDAV_ENDPOINTS=
|
WHITE_WEBDAV_ENDPOINTS=
|
||||||
|
|
||||||
|
|
||||||
### bedrock (optional)
|
### bedrock (optional)
|
||||||
AWS_REGION=
|
AWS_REGION=
|
||||||
AWS_ACCESS_KEY=AKIA
|
AWS_ACCESS_KEY=AKIA
|
||||||
AWS_SECRET_KEY=
|
AWS_SECRET_KEY=
|
||||||
|
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
public/serviceWorker.js
|
public/serviceWorker.js
|
||||||
app/mcp/mcp_config.json
|
app/mcp/mcp_config.json
|
||||||
|
app/mcp/mcp_config.default.json
|
@ -42,7 +42,7 @@ COPY --from=builder /app/.next/static ./.next/static
|
|||||||
COPY --from=builder /app/.next/server ./.next/server
|
COPY --from=builder /app/.next/server ./.next/server
|
||||||
|
|
||||||
RUN mkdir -p /app/app/mcp && chmod 777 /app/app/mcp
|
RUN mkdir -p /app/app/mcp && chmod 777 /app/app/mcp
|
||||||
COPY --from=builder /app/app/mcp/mcp_config.json /app/app/mcp/
|
COPY --from=builder /app/app/mcp/mcp_config.default.json /app/app/mcp/mcp_config.json
|
||||||
|
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|
||||||
|
@ -40,6 +40,13 @@ One-Click to get a well-designed cross-platform ChatGPT web UI, with Claude, GPT
|
|||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
## 🥳 Cheer for DeepSeek, China's AI star!
|
||||||
|
> Purpose-Built UI for DeepSeek Reasoner Model
|
||||||
|
|
||||||
|
<img src="https://github.com/user-attachments/assets/f3952210-3af1-4dc0-9b81-40eaa4847d9a"/>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## 🫣 NextChat Support MCP !
|
## 🫣 NextChat Support MCP !
|
||||||
> Before build, please set env ENABLE_MCP=true
|
> Before build, please set env ENABLE_MCP=true
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ import {
|
|||||||
ChatMessageTool,
|
ChatMessageTool,
|
||||||
usePluginStore,
|
usePluginStore,
|
||||||
} from "@/app/store";
|
} from "@/app/store";
|
||||||
import { stream } from "@/app/utils/chat";
|
import { streamWithThink } from "@/app/utils/chat";
|
||||||
import {
|
import {
|
||||||
ChatOptions,
|
ChatOptions,
|
||||||
getHeaders,
|
getHeaders,
|
||||||
@ -22,7 +22,10 @@ import {
|
|||||||
SpeechOptions,
|
SpeechOptions,
|
||||||
} from "../api";
|
} from "../api";
|
||||||
import { getClientConfig } from "@/app/config/client";
|
import { getClientConfig } from "@/app/config/client";
|
||||||
import { getMessageTextContent } from "@/app/utils";
|
import {
|
||||||
|
getMessageTextContent,
|
||||||
|
getMessageTextContentWithoutThinking,
|
||||||
|
} from "@/app/utils";
|
||||||
import { RequestPayload } from "./openai";
|
import { RequestPayload } from "./openai";
|
||||||
import { fetch } from "@/app/utils/stream";
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
@ -67,8 +70,13 @@ export class DeepSeekApi implements LLMApi {
|
|||||||
async chat(options: ChatOptions) {
|
async chat(options: ChatOptions) {
|
||||||
const messages: ChatOptions["messages"] = [];
|
const messages: ChatOptions["messages"] = [];
|
||||||
for (const v of options.messages) {
|
for (const v of options.messages) {
|
||||||
const content = getMessageTextContent(v);
|
if (v.role === "assistant") {
|
||||||
messages.push({ role: v.role, content });
|
const content = getMessageTextContentWithoutThinking(v);
|
||||||
|
messages.push({ role: v.role, content });
|
||||||
|
} else {
|
||||||
|
const content = getMessageTextContent(v);
|
||||||
|
messages.push({ role: v.role, content });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const modelConfig = {
|
const modelConfig = {
|
||||||
@ -107,6 +115,8 @@ export class DeepSeekApi implements LLMApi {
|
|||||||
headers: getHeaders(),
|
headers: getHeaders(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// console.log(chatPayload);
|
||||||
|
|
||||||
// make a fetch request
|
// make a fetch request
|
||||||
const requestTimeoutId = setTimeout(
|
const requestTimeoutId = setTimeout(
|
||||||
() => controller.abort(),
|
() => controller.abort(),
|
||||||
@ -119,7 +129,7 @@ export class DeepSeekApi implements LLMApi {
|
|||||||
.getAsTools(
|
.getAsTools(
|
||||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||||
);
|
);
|
||||||
return stream(
|
return streamWithThink(
|
||||||
chatPath,
|
chatPath,
|
||||||
requestPayload,
|
requestPayload,
|
||||||
getHeaders(),
|
getHeaders(),
|
||||||
@ -132,8 +142,9 @@ export class DeepSeekApi implements LLMApi {
|
|||||||
const json = JSON.parse(text);
|
const json = JSON.parse(text);
|
||||||
const choices = json.choices as Array<{
|
const choices = json.choices as Array<{
|
||||||
delta: {
|
delta: {
|
||||||
content: string;
|
content: string | null;
|
||||||
tool_calls: ChatMessageTool[];
|
tool_calls: ChatMessageTool[];
|
||||||
|
reasoning_content: string | null;
|
||||||
};
|
};
|
||||||
}>;
|
}>;
|
||||||
const tool_calls = choices[0]?.delta?.tool_calls;
|
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||||
@ -155,7 +166,36 @@ export class DeepSeekApi implements LLMApi {
|
|||||||
runTools[index]["function"]["arguments"] += args;
|
runTools[index]["function"]["arguments"] += args;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return choices[0]?.delta?.content;
|
const reasoning = choices[0]?.delta?.reasoning_content;
|
||||||
|
const content = choices[0]?.delta?.content;
|
||||||
|
|
||||||
|
// Skip if both content and reasoning_content are empty or null
|
||||||
|
if (
|
||||||
|
(!reasoning || reasoning.trim().length === 0) &&
|
||||||
|
(!content || content.trim().length === 0)
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
isThinking: false,
|
||||||
|
content: "",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reasoning && reasoning.trim().length > 0) {
|
||||||
|
return {
|
||||||
|
isThinking: true,
|
||||||
|
content: reasoning,
|
||||||
|
};
|
||||||
|
} else if (content && content.trim().length > 0) {
|
||||||
|
return {
|
||||||
|
isThinking: false,
|
||||||
|
content: content,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
isThinking: false,
|
||||||
|
content: "",
|
||||||
|
};
|
||||||
},
|
},
|
||||||
// processToolMessage, include tool_calls message and tool call results
|
// processToolMessage, include tool_calls message and tool call results
|
||||||
(
|
(
|
||||||
|
@ -270,6 +270,6 @@ export const getServerSideConfig = () => {
|
|||||||
defaultModel,
|
defaultModel,
|
||||||
visionModels,
|
visionModels,
|
||||||
allowedWebDavEndpoints,
|
allowedWebDavEndpoints,
|
||||||
enableMcp: !!process.env.ENABLE_MCP,
|
enableMcp: process.env.ENABLE_MCP === "true",
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -405,6 +405,7 @@ You are an AI assistant with access to system tools. Your role is to help users
|
|||||||
|
|
||||||
export const SUMMARIZE_MODEL = "gpt-4o-mini";
|
export const SUMMARIZE_MODEL = "gpt-4o-mini";
|
||||||
export const GEMINI_SUMMARIZE_MODEL = "gemini-pro";
|
export const GEMINI_SUMMARIZE_MODEL = "gemini-pro";
|
||||||
|
export const DEEPSEEK_SUMMARIZE_MODEL = "deepseek-chat";
|
||||||
|
|
||||||
export const KnowledgeCutOffDate: Record<string, string> = {
|
export const KnowledgeCutOffDate: Record<string, string> = {
|
||||||
default: "2021-09",
|
default: "2021-09",
|
||||||
@ -597,7 +598,7 @@ const iflytekModels = [
|
|||||||
"4.0Ultra",
|
"4.0Ultra",
|
||||||
];
|
];
|
||||||
|
|
||||||
const deepseekModels = ["deepseek-chat", "deepseek-coder"];
|
const deepseekModels = ["deepseek-chat", "deepseek-coder", "deepseek-reasoner"];
|
||||||
|
|
||||||
const xAIModes = ["grok-beta"];
|
const xAIModes = ["grok-beta"];
|
||||||
|
|
||||||
|
@ -365,6 +365,8 @@ export async function getMcpConfigFromFile(): Promise<McpConfigData> {
|
|||||||
// 更新 MCP 配置文件
|
// 更新 MCP 配置文件
|
||||||
async function updateMcpConfig(config: McpConfigData): Promise<void> {
|
async function updateMcpConfig(config: McpConfigData): Promise<void> {
|
||||||
try {
|
try {
|
||||||
|
// 确保目录存在
|
||||||
|
await fs.mkdir(path.dirname(CONFIG_PATH), { recursive: true });
|
||||||
await fs.writeFile(CONFIG_PATH, JSON.stringify(config, null, 2));
|
await fs.writeFile(CONFIG_PATH, JSON.stringify(config, null, 2));
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
|
3
app/mcp/mcp_config.default.json
Normal file
3
app/mcp/mcp_config.default.json
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"mcpServers": {}
|
||||||
|
}
|
@ -20,6 +20,7 @@ import {
|
|||||||
DEFAULT_MODELS,
|
DEFAULT_MODELS,
|
||||||
DEFAULT_SYSTEM_TEMPLATE,
|
DEFAULT_SYSTEM_TEMPLATE,
|
||||||
GEMINI_SUMMARIZE_MODEL,
|
GEMINI_SUMMARIZE_MODEL,
|
||||||
|
DEEPSEEK_SUMMARIZE_MODEL,
|
||||||
KnowledgeCutOffDate,
|
KnowledgeCutOffDate,
|
||||||
MCP_SYSTEM_TEMPLATE,
|
MCP_SYSTEM_TEMPLATE,
|
||||||
MCP_TOOLS_TEMPLATE,
|
MCP_TOOLS_TEMPLATE,
|
||||||
@ -35,7 +36,7 @@ import { ModelConfig, ModelType, useAppConfig } from "./config";
|
|||||||
import { useAccessStore } from "./access";
|
import { useAccessStore } from "./access";
|
||||||
import { collectModelsWithDefaultModel } from "../utils/model";
|
import { collectModelsWithDefaultModel } from "../utils/model";
|
||||||
import { createEmptyMask, Mask } from "./mask";
|
import { createEmptyMask, Mask } from "./mask";
|
||||||
import { executeMcpAction, getAllTools } from "../mcp/actions";
|
import { executeMcpAction, getAllTools, isMcpEnabled } from "../mcp/actions";
|
||||||
import { extractMcpJson, isMcpJson } from "../mcp/utils";
|
import { extractMcpJson, isMcpJson } from "../mcp/utils";
|
||||||
|
|
||||||
const localStorage = safeLocalStorage();
|
const localStorage = safeLocalStorage();
|
||||||
@ -143,7 +144,10 @@ function getSummarizeModel(
|
|||||||
}
|
}
|
||||||
if (currentModel.startsWith("gemini")) {
|
if (currentModel.startsWith("gemini")) {
|
||||||
return [GEMINI_SUMMARIZE_MODEL, ServiceProvider.Google];
|
return [GEMINI_SUMMARIZE_MODEL, ServiceProvider.Google];
|
||||||
|
} else if (currentModel.startsWith("deepseek-")) {
|
||||||
|
return [DEEPSEEK_SUMMARIZE_MODEL, ServiceProvider.DeepSeek];
|
||||||
}
|
}
|
||||||
|
|
||||||
return [currentModel, providerName];
|
return [currentModel, providerName];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -245,7 +249,7 @@ export const useChatStore = createPersistStore(
|
|||||||
|
|
||||||
newSession.topic = currentSession.topic;
|
newSession.topic = currentSession.topic;
|
||||||
// 深拷贝消息
|
// 深拷贝消息
|
||||||
newSession.messages = currentSession.messages.map(msg => ({
|
newSession.messages = currentSession.messages.map((msg) => ({
|
||||||
...msg,
|
...msg,
|
||||||
id: nanoid(), // 生成新的消息 ID
|
id: nanoid(), // 生成新的消息 ID
|
||||||
}));
|
}));
|
||||||
@ -551,27 +555,32 @@ export const useChatStore = createPersistStore(
|
|||||||
(session.mask.modelConfig.model.startsWith("gpt-") ||
|
(session.mask.modelConfig.model.startsWith("gpt-") ||
|
||||||
session.mask.modelConfig.model.startsWith("chatgpt-"));
|
session.mask.modelConfig.model.startsWith("chatgpt-"));
|
||||||
|
|
||||||
const mcpSystemPrompt = await getMcpSystemPrompt();
|
const mcpEnabled = await isMcpEnabled();
|
||||||
|
const mcpSystemPrompt = mcpEnabled ? await getMcpSystemPrompt() : "";
|
||||||
|
|
||||||
var systemPrompts: ChatMessage[] = [];
|
var systemPrompts: ChatMessage[] = [];
|
||||||
systemPrompts = shouldInjectSystemPrompts
|
|
||||||
? [
|
|
||||||
createMessage({
|
|
||||||
role: "system",
|
|
||||||
content:
|
|
||||||
fillTemplateWith("", {
|
|
||||||
...modelConfig,
|
|
||||||
template: DEFAULT_SYSTEM_TEMPLATE,
|
|
||||||
}) + mcpSystemPrompt,
|
|
||||||
}),
|
|
||||||
]
|
|
||||||
: [
|
|
||||||
createMessage({
|
|
||||||
role: "system",
|
|
||||||
content: mcpSystemPrompt,
|
|
||||||
}),
|
|
||||||
];
|
|
||||||
if (shouldInjectSystemPrompts) {
|
if (shouldInjectSystemPrompts) {
|
||||||
|
systemPrompts = [
|
||||||
|
createMessage({
|
||||||
|
role: "system",
|
||||||
|
content:
|
||||||
|
fillTemplateWith("", {
|
||||||
|
...modelConfig,
|
||||||
|
template: DEFAULT_SYSTEM_TEMPLATE,
|
||||||
|
}) + mcpSystemPrompt,
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
} else if (mcpEnabled) {
|
||||||
|
systemPrompts = [
|
||||||
|
createMessage({
|
||||||
|
role: "system",
|
||||||
|
content: mcpSystemPrompt,
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shouldInjectSystemPrompts || mcpEnabled) {
|
||||||
console.log(
|
console.log(
|
||||||
"[Global System Prompt] ",
|
"[Global System Prompt] ",
|
||||||
systemPrompts.at(0)?.content ?? "empty",
|
systemPrompts.at(0)?.content ?? "empty",
|
||||||
@ -816,6 +825,8 @@ export const useChatStore = createPersistStore(
|
|||||||
|
|
||||||
/** check if the message contains MCP JSON and execute the MCP action */
|
/** check if the message contains MCP JSON and execute the MCP action */
|
||||||
checkMcpJson(message: ChatMessage) {
|
checkMcpJson(message: ChatMessage) {
|
||||||
|
const mcpEnabled = isMcpEnabled();
|
||||||
|
if (!mcpEnabled) return;
|
||||||
const content = getMessageTextContent(message);
|
const content = getMessageTextContent(message);
|
||||||
if (isMcpJson(content)) {
|
if (isMcpJson(content)) {
|
||||||
try {
|
try {
|
||||||
|
26
app/utils.ts
26
app/utils.ts
@ -241,6 +241,28 @@ export function getMessageTextContent(message: RequestMessage) {
|
|||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getMessageTextContentWithoutThinking(message: RequestMessage) {
|
||||||
|
let content = "";
|
||||||
|
|
||||||
|
if (typeof message.content === "string") {
|
||||||
|
content = message.content;
|
||||||
|
} else {
|
||||||
|
for (const c of message.content) {
|
||||||
|
if (c.type === "text") {
|
||||||
|
content = c.text ?? "";
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter out thinking lines (starting with "> ")
|
||||||
|
return content
|
||||||
|
.split("\n")
|
||||||
|
.filter((line) => !line.startsWith("> ") && line.trim() !== "")
|
||||||
|
.join("\n")
|
||||||
|
.trim();
|
||||||
|
}
|
||||||
|
|
||||||
export function getMessageImages(message: RequestMessage): string[] {
|
export function getMessageImages(message: RequestMessage): string[] {
|
||||||
if (typeof message.content === "string") {
|
if (typeof message.content === "string") {
|
||||||
return [];
|
return [];
|
||||||
@ -256,9 +278,7 @@ export function getMessageImages(message: RequestMessage): string[] {
|
|||||||
|
|
||||||
export function isVisionModel(model: string) {
|
export function isVisionModel(model: string) {
|
||||||
const visionModels = useAccessStore.getState().visionModels;
|
const visionModels = useAccessStore.getState().visionModels;
|
||||||
const envVisionModels = visionModels
|
const envVisionModels = visionModels?.split(",").map((m) => m.trim());
|
||||||
?.split(",")
|
|
||||||
.map((m) => m.trim());
|
|
||||||
if (envVisionModels?.includes(model)) {
|
if (envVisionModels?.includes(model)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -344,8 +344,12 @@ export function stream(
|
|||||||
return finish();
|
return finish();
|
||||||
}
|
}
|
||||||
const text = msg.data;
|
const text = msg.data;
|
||||||
|
// Skip empty messages
|
||||||
|
if (!text || text.trim().length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
try {
|
try {
|
||||||
const chunk = parseSSE(msg.data, runTools);
|
const chunk = parseSSE(text, runTools);
|
||||||
if (chunk) {
|
if (chunk) {
|
||||||
remainText += chunk;
|
remainText += chunk;
|
||||||
}
|
}
|
||||||
@ -366,3 +370,262 @@ export function stream(
|
|||||||
console.debug("[ChatAPI] start");
|
console.debug("[ChatAPI] start");
|
||||||
chatApi(chatPath, headers, requestPayload, tools); // call fetchEventSource
|
chatApi(chatPath, headers, requestPayload, tools); // call fetchEventSource
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function streamWithThink(
|
||||||
|
chatPath: string,
|
||||||
|
requestPayload: any,
|
||||||
|
headers: any,
|
||||||
|
tools: any[],
|
||||||
|
funcs: Record<string, Function>,
|
||||||
|
controller: AbortController,
|
||||||
|
parseSSE: (
|
||||||
|
text: string,
|
||||||
|
runTools: any[],
|
||||||
|
) => {
|
||||||
|
isThinking: boolean;
|
||||||
|
content: string | undefined;
|
||||||
|
},
|
||||||
|
processToolMessage: (
|
||||||
|
requestPayload: any,
|
||||||
|
toolCallMessage: any,
|
||||||
|
toolCallResult: any[],
|
||||||
|
) => void,
|
||||||
|
options: any,
|
||||||
|
) {
|
||||||
|
let responseText = "";
|
||||||
|
let remainText = "";
|
||||||
|
let finished = false;
|
||||||
|
let running = false;
|
||||||
|
let runTools: any[] = [];
|
||||||
|
let responseRes: Response;
|
||||||
|
let isInThinkingMode = false;
|
||||||
|
let lastIsThinking = false;
|
||||||
|
|
||||||
|
// animate response to make it looks smooth
|
||||||
|
function animateResponseText() {
|
||||||
|
if (finished || controller.signal.aborted) {
|
||||||
|
responseText += remainText;
|
||||||
|
console.log("[Response Animation] finished");
|
||||||
|
if (responseText?.length === 0) {
|
||||||
|
options.onError?.(new Error("empty response from server"));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (remainText.length > 0) {
|
||||||
|
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||||
|
const fetchText = remainText.slice(0, fetchCount);
|
||||||
|
responseText += fetchText;
|
||||||
|
remainText = remainText.slice(fetchCount);
|
||||||
|
options.onUpdate?.(responseText, fetchText);
|
||||||
|
}
|
||||||
|
|
||||||
|
requestAnimationFrame(animateResponseText);
|
||||||
|
}
|
||||||
|
|
||||||
|
// start animaion
|
||||||
|
animateResponseText();
|
||||||
|
|
||||||
|
const finish = () => {
|
||||||
|
if (!finished) {
|
||||||
|
if (!running && runTools.length > 0) {
|
||||||
|
const toolCallMessage = {
|
||||||
|
role: "assistant",
|
||||||
|
tool_calls: [...runTools],
|
||||||
|
};
|
||||||
|
running = true;
|
||||||
|
runTools.splice(0, runTools.length); // empty runTools
|
||||||
|
return Promise.all(
|
||||||
|
toolCallMessage.tool_calls.map((tool) => {
|
||||||
|
options?.onBeforeTool?.(tool);
|
||||||
|
return Promise.resolve(
|
||||||
|
// @ts-ignore
|
||||||
|
funcs[tool.function.name](
|
||||||
|
// @ts-ignore
|
||||||
|
tool?.function?.arguments
|
||||||
|
? JSON.parse(tool?.function?.arguments)
|
||||||
|
: {},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.then((res) => {
|
||||||
|
let content = res.data || res?.statusText;
|
||||||
|
// hotfix #5614
|
||||||
|
content =
|
||||||
|
typeof content === "string"
|
||||||
|
? content
|
||||||
|
: JSON.stringify(content);
|
||||||
|
if (res.status >= 300) {
|
||||||
|
return Promise.reject(content);
|
||||||
|
}
|
||||||
|
return content;
|
||||||
|
})
|
||||||
|
.then((content) => {
|
||||||
|
options?.onAfterTool?.({
|
||||||
|
...tool,
|
||||||
|
content,
|
||||||
|
isError: false,
|
||||||
|
});
|
||||||
|
return content;
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
options?.onAfterTool?.({
|
||||||
|
...tool,
|
||||||
|
isError: true,
|
||||||
|
errorMsg: e.toString(),
|
||||||
|
});
|
||||||
|
return e.toString();
|
||||||
|
})
|
||||||
|
.then((content) => ({
|
||||||
|
name: tool.function.name,
|
||||||
|
role: "tool",
|
||||||
|
content,
|
||||||
|
tool_call_id: tool.id,
|
||||||
|
}));
|
||||||
|
}),
|
||||||
|
).then((toolCallResult) => {
|
||||||
|
processToolMessage(requestPayload, toolCallMessage, toolCallResult);
|
||||||
|
setTimeout(() => {
|
||||||
|
// call again
|
||||||
|
console.debug("[ChatAPI] restart");
|
||||||
|
running = false;
|
||||||
|
chatApi(chatPath, headers, requestPayload, tools); // call fetchEventSource
|
||||||
|
}, 60);
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (running) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
console.debug("[ChatAPI] end");
|
||||||
|
finished = true;
|
||||||
|
options.onFinish(responseText + remainText, responseRes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
controller.signal.onabort = finish;
|
||||||
|
|
||||||
|
function chatApi(
|
||||||
|
chatPath: string,
|
||||||
|
headers: any,
|
||||||
|
requestPayload: any,
|
||||||
|
tools: any,
|
||||||
|
) {
|
||||||
|
const chatPayload = {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify({
|
||||||
|
...requestPayload,
|
||||||
|
tools: tools && tools.length ? tools : undefined,
|
||||||
|
}),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers,
|
||||||
|
};
|
||||||
|
const requestTimeoutId = setTimeout(
|
||||||
|
() => controller.abort(),
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
fetchEventSource(chatPath, {
|
||||||
|
fetch: tauriFetch as any,
|
||||||
|
...chatPayload,
|
||||||
|
async onopen(res) {
|
||||||
|
clearTimeout(requestTimeoutId);
|
||||||
|
const contentType = res.headers.get("content-type");
|
||||||
|
console.log("[Request] response content type: ", contentType);
|
||||||
|
responseRes = res;
|
||||||
|
|
||||||
|
if (contentType?.startsWith("text/plain")) {
|
||||||
|
responseText = await res.clone().text();
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!res.ok ||
|
||||||
|
!res.headers
|
||||||
|
.get("content-type")
|
||||||
|
?.startsWith(EventStreamContentType) ||
|
||||||
|
res.status !== 200
|
||||||
|
) {
|
||||||
|
const responseTexts = [responseText];
|
||||||
|
let extraInfo = await res.clone().text();
|
||||||
|
try {
|
||||||
|
const resJson = await res.clone().json();
|
||||||
|
extraInfo = prettyObject(resJson);
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
if (res.status === 401) {
|
||||||
|
responseTexts.push(Locale.Error.Unauthorized);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (extraInfo) {
|
||||||
|
responseTexts.push(extraInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
responseText = responseTexts.join("\n\n");
|
||||||
|
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onmessage(msg) {
|
||||||
|
if (msg.data === "[DONE]" || finished) {
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
const text = msg.data;
|
||||||
|
// Skip empty messages
|
||||||
|
if (!text || text.trim().length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const chunk = parseSSE(text, runTools);
|
||||||
|
// Skip if content is empty
|
||||||
|
if (!chunk?.content || chunk.content.trim().length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Check if thinking mode changed
|
||||||
|
const isThinkingChanged = lastIsThinking !== chunk.isThinking;
|
||||||
|
lastIsThinking = chunk.isThinking;
|
||||||
|
|
||||||
|
if (chunk.isThinking) {
|
||||||
|
// If in thinking mode
|
||||||
|
if (!isInThinkingMode || isThinkingChanged) {
|
||||||
|
// If this is a new thinking block or mode changed, add prefix
|
||||||
|
isInThinkingMode = true;
|
||||||
|
if (remainText.length > 0) {
|
||||||
|
remainText += "\n";
|
||||||
|
}
|
||||||
|
remainText += "> " + chunk.content;
|
||||||
|
} else {
|
||||||
|
// Handle newlines in thinking content
|
||||||
|
if (chunk.content.includes("\n\n")) {
|
||||||
|
const lines = chunk.content.split("\n\n");
|
||||||
|
remainText += lines.join("\n\n> ");
|
||||||
|
} else {
|
||||||
|
remainText += chunk.content;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If in normal mode
|
||||||
|
if (isInThinkingMode || isThinkingChanged) {
|
||||||
|
// If switching from thinking mode to normal mode
|
||||||
|
isInThinkingMode = false;
|
||||||
|
remainText += "\n\n" + chunk.content;
|
||||||
|
} else {
|
||||||
|
remainText += chunk.content;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[Request] parse error", text, msg, e);
|
||||||
|
// Don't throw error for parse failures, just log them
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onclose() {
|
||||||
|
finish();
|
||||||
|
},
|
||||||
|
onerror(e) {
|
||||||
|
options?.onError?.(e);
|
||||||
|
throw e;
|
||||||
|
},
|
||||||
|
openWhenHidden: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
console.debug("[ChatAPI] start");
|
||||||
|
chatApi(chatPath, headers, requestPayload, tools); // call fetchEventSource
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user