mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-21 05:00:16 +09:00
try to add o1 as a vision model
This commit is contained in:
parent
d65aca6d13
commit
2509495cdc
@ -196,9 +196,9 @@ export class ChatGPTApi implements LLMApi {
|
||||
let requestPayload: RequestPayload | DalleRequestPayload;
|
||||
|
||||
const isDalle3 = _isDalle3(options.config.model);
|
||||
const isO1OrO3 =
|
||||
options.config.model.startsWith("o1") ||
|
||||
options.config.model.startsWith("o3");
|
||||
const isO1 = options.config.model.startsWith("o1");
|
||||
const isO3 = options.config.model.startsWith("o3");
|
||||
const isO1OrO3 = isO1 || isO3;
|
||||
if (isDalle3) {
|
||||
const prompt = getMessageTextContent(
|
||||
options.messages.slice(-1)?.pop() as any,
|
||||
@ -244,7 +244,12 @@ export class ChatGPTApi implements LLMApi {
|
||||
|
||||
// add max_tokens to vision model
|
||||
if (visionModel) {
|
||||
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
||||
if (isO1) {
|
||||
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
|
||||
|
||||
} else {
|
||||
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user