mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-19 04:00:16 +09:00
optimization
This commit is contained in:
parent
98a11e56d2
commit
b0758cccde
@ -171,6 +171,9 @@ export class QwenApi implements LLMApi {
|
|||||||
reasoning_content: string | null;
|
reasoning_content: string | null;
|
||||||
};
|
};
|
||||||
}>;
|
}>;
|
||||||
|
|
||||||
|
if (!choices?.length) return { isThinking: false, content: "" };
|
||||||
|
|
||||||
const tool_calls = choices[0]?.message?.tool_calls;
|
const tool_calls = choices[0]?.message?.tool_calls;
|
||||||
if (tool_calls?.length > 0) {
|
if (tool_calls?.length > 0) {
|
||||||
const index = tool_calls[0]?.index;
|
const index = tool_calls[0]?.index;
|
||||||
@ -190,6 +193,7 @@ export class QwenApi implements LLMApi {
|
|||||||
runTools[index]["function"]["arguments"] += args;
|
runTools[index]["function"]["arguments"] += args;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const reasoning = choices[0]?.message?.reasoning_content;
|
const reasoning = choices[0]?.message?.reasoning_content;
|
||||||
const content = choices[0]?.message?.content;
|
const content = choices[0]?.message?.content;
|
||||||
|
|
||||||
@ -227,10 +231,8 @@ export class QwenApi implements LLMApi {
|
|||||||
toolCallMessage: any,
|
toolCallMessage: any,
|
||||||
toolCallResult: any[],
|
toolCallResult: any[],
|
||||||
) => {
|
) => {
|
||||||
// @ts-ignore
|
requestPayload?.input?.messages?.splice(
|
||||||
requestPayload?.messages?.splice(
|
requestPayload?.input?.messages?.length,
|
||||||
// @ts-ignore
|
|
||||||
requestPayload?.messages?.length,
|
|
||||||
0,
|
0,
|
||||||
toolCallMessage,
|
toolCallMessage,
|
||||||
...toolCallResult,
|
...toolCallResult,
|
||||||
|
@ -37,7 +37,7 @@ export interface OpenAIListModelResponse {
|
|||||||
}>;
|
}>;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface RequestPayload {
|
interface RequestPayloadForByteDance {
|
||||||
messages: {
|
messages: {
|
||||||
role: "system" | "user" | "assistant";
|
role: "system" | "user" | "assistant";
|
||||||
content: string | MultimodalContent[];
|
content: string | MultimodalContent[];
|
||||||
@ -105,7 +105,7 @@ export class DoubaoApi implements LLMApi {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const shouldStream = !!options.config.stream;
|
const shouldStream = !!options.config.stream;
|
||||||
const requestPayload: RequestPayload = {
|
const requestPayload: RequestPayloadForByteDance = {
|
||||||
messages,
|
messages,
|
||||||
stream: shouldStream,
|
stream: shouldStream,
|
||||||
model: modelConfig.model,
|
model: modelConfig.model,
|
||||||
@ -157,6 +157,9 @@ export class DoubaoApi implements LLMApi {
|
|||||||
reasoning_content: string | null;
|
reasoning_content: string | null;
|
||||||
};
|
};
|
||||||
}>;
|
}>;
|
||||||
|
|
||||||
|
if (!choices?.length) return { isThinking: false, content: "" };
|
||||||
|
|
||||||
const tool_calls = choices[0]?.delta?.tool_calls;
|
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||||
if (tool_calls?.length > 0) {
|
if (tool_calls?.length > 0) {
|
||||||
const index = tool_calls[0]?.index;
|
const index = tool_calls[0]?.index;
|
||||||
@ -209,13 +212,11 @@ export class DoubaoApi implements LLMApi {
|
|||||||
},
|
},
|
||||||
// processToolMessage, include tool_calls message and tool call results
|
// processToolMessage, include tool_calls message and tool call results
|
||||||
(
|
(
|
||||||
requestPayload: RequestPayload,
|
requestPayload: RequestPayloadForByteDance,
|
||||||
toolCallMessage: any,
|
toolCallMessage: any,
|
||||||
toolCallResult: any[],
|
toolCallResult: any[],
|
||||||
) => {
|
) => {
|
||||||
// @ts-ignore
|
|
||||||
requestPayload?.messages?.splice(
|
requestPayload?.messages?.splice(
|
||||||
// @ts-ignore
|
|
||||||
requestPayload?.messages?.length,
|
requestPayload?.messages?.length,
|
||||||
0,
|
0,
|
||||||
toolCallMessage,
|
toolCallMessage,
|
||||||
|
Loading…
Reference in New Issue
Block a user