mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-20 04:30:17 +09:00
Support VLM on SiliconFlow
This commit is contained in:
parent
a029b4330b
commit
86f86962fb
@ -13,7 +13,7 @@ import {
|
|||||||
ChatMessageTool,
|
ChatMessageTool,
|
||||||
usePluginStore,
|
usePluginStore,
|
||||||
} from "@/app/store";
|
} from "@/app/store";
|
||||||
import { streamWithThink } from "@/app/utils/chat";
|
import { preProcessImageContent, streamWithThink } from "@/app/utils/chat";
|
||||||
import {
|
import {
|
||||||
ChatOptions,
|
ChatOptions,
|
||||||
getHeaders,
|
getHeaders,
|
||||||
@ -25,6 +25,7 @@ import { getClientConfig } from "@/app/config/client";
|
|||||||
import {
|
import {
|
||||||
getMessageTextContent,
|
getMessageTextContent,
|
||||||
getMessageTextContentWithoutThinking,
|
getMessageTextContentWithoutThinking,
|
||||||
|
isVisionModel,
|
||||||
} from "@/app/utils";
|
} from "@/app/utils";
|
||||||
import { RequestPayload } from "./openai";
|
import { RequestPayload } from "./openai";
|
||||||
import { fetch } from "@/app/utils/stream";
|
import { fetch } from "@/app/utils/stream";
|
||||||
@ -71,13 +72,16 @@ export class SiliconflowApi implements LLMApi {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async chat(options: ChatOptions) {
|
async chat(options: ChatOptions) {
|
||||||
|
const visionModel = isVisionModel(options.config.model);
|
||||||
const messages: ChatOptions["messages"] = [];
|
const messages: ChatOptions["messages"] = [];
|
||||||
for (const v of options.messages) {
|
for (const v of options.messages) {
|
||||||
if (v.role === "assistant") {
|
if (v.role === "assistant") {
|
||||||
const content = getMessageTextContentWithoutThinking(v);
|
const content = getMessageTextContentWithoutThinking(v);
|
||||||
messages.push({ role: v.role, content });
|
messages.push({ role: v.role, content });
|
||||||
} else {
|
} else {
|
||||||
const content = getMessageTextContent(v);
|
const content = visionModel
|
||||||
|
? await preProcessImageContent(v.content)
|
||||||
|
: getMessageTextContent(v);
|
||||||
messages.push({ role: v.role, content });
|
messages.push({ role: v.role, content });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -462,6 +462,7 @@ export const VISION_MODEL_REGEXES = [
|
|||||||
/gpt-4-turbo(?!.*preview)/, // Matches "gpt-4-turbo" but not "gpt-4-turbo-preview"
|
/gpt-4-turbo(?!.*preview)/, // Matches "gpt-4-turbo" but not "gpt-4-turbo-preview"
|
||||||
/^dall-e-3$/, // Matches exactly "dall-e-3"
|
/^dall-e-3$/, // Matches exactly "dall-e-3"
|
||||||
/glm-4v/,
|
/glm-4v/,
|
||||||
|
/vl/i,
|
||||||
];
|
];
|
||||||
|
|
||||||
export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];
|
export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];
|
||||||
|
Loading…
Reference in New Issue
Block a user