diff --git a/README.md b/README.md index 7d3f7145c..d0a6b409d 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ English / [简体中文](./README_CN.md) [![MacOS][MacOS-image]][download-url] [![Linux][Linux-image]][download-url] -[NextChatAI](https://nextchat.club?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev) +[NextChatAI](https://nextchat.club?utm_source=readme) / [iOS APP](https://apps.apple.com/us/app/nextchat-ai/id6743085599) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Enterprise Edition](#enterprise-edition) [saas-url]: https://nextchat.club?utm_source=readme @@ -40,13 +40,14 @@ English / [简体中文](./README_CN.md) -## 🥳 Cheer for DeepSeek, China's AI star! - > Purpose-Built UI for DeepSeek Reasoner Model +## 🥳 Cheer for NextChat iOS Version Online! +> [👉 Click Here to Install Now](https://apps.apple.com/us/app/nextchat-ai/id6743085599) + +> [❤️ Source Code Coming Soon](https://github.com/ChatGPTNextWeb/NextChat-iOS) + +![Github iOS Image](https://github.com/user-attachments/assets/e0aa334f-4c13-4dc9-8310-e3b09fa4b9f3) + - - - - ## 🫣 NextChat Support MCP ! > Before build, please set env ENABLE_MCP=true diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index c6f3fc425..4cad6bf94 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -198,7 +198,8 @@ export class ChatGPTApi implements LLMApi { const isDalle3 = _isDalle3(options.config.model); const isO1OrO3 = options.config.model.startsWith("o1") || - options.config.model.startsWith("o3"); + options.config.model.startsWith("o3") || + options.config.model.startsWith("o4-mini"); if (isDalle3) { const prompt = getMessageTextContent( options.messages.slice(-1)?.pop() as any, @@ -243,7 +244,7 @@ export class ChatGPTApi implements LLMApi { } // add max_tokens to vision model - if (visionModel) { + if (visionModel && !isO1OrO3) { requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000); } } diff --git a/app/constant.ts b/app/constant.ts index 422c42629..9fcea1187 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -478,6 +478,8 @@ export const VISION_MODEL_REGEXES = [ /^dall-e-3$/, // Matches exactly "dall-e-3" /glm-4v/, /vl/i, + /o3/, + /o4-mini/, ]; export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/]; @@ -516,6 +518,8 @@ const openaiModels = [ "o1-mini", "o1-preview", "o3-mini", + "o3", + "o4-mini", ]; const googleModels = [