mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-21 05:00:16 +09:00
Update constant.ts
This commit is contained in:
parent
f5f3ce94f6
commit
6aecdd80e9
128
app/constant.ts
128
app/constant.ts
@ -408,21 +408,9 @@ export const GEMINI_SUMMARIZE_MODEL = "gemini-pro";
|
||||
export const DEEPSEEK_SUMMARIZE_MODEL = "deepseek-chat";
|
||||
|
||||
export const KnowledgeCutOffDate: Record<string, string> = {
|
||||
default: "2021-09",
|
||||
"gpt-4-turbo": "2023-12",
|
||||
"gpt-4-turbo-2024-04-09": "2023-12",
|
||||
"gpt-4-turbo-preview": "2023-12",
|
||||
default: "2023-10",
|
||||
"gpt-4o": "2023-10",
|
||||
"gpt-4o-2024-05-13": "2023-10",
|
||||
"gpt-4o-2024-08-06": "2023-10",
|
||||
"gpt-4o-2024-11-20": "2023-10",
|
||||
"chatgpt-4o-latest": "2023-10",
|
||||
"gpt-4o-mini": "2023-10",
|
||||
"gpt-4o-mini-2024-07-18": "2023-10",
|
||||
"gpt-4-vision-preview": "2023-04",
|
||||
"o1-mini-2024-09-12": "2023-10",
|
||||
"o1-mini": "2023-10",
|
||||
"o1-preview-2024-09-12": "2023-10",
|
||||
"o1-preview": "2023-10",
|
||||
"o1-2024-12-17": "2023-10",
|
||||
o1: "2023-10",
|
||||
@ -471,84 +459,26 @@ export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];
|
||||
const openaiModels = [
|
||||
// As of July 2024, gpt-4o-mini should be used in place of gpt-3.5-turbo,
|
||||
// as it is cheaper, more capable, multimodal, and just as fast. gpt-3.5-turbo is still available for use in the API.
|
||||
"gpt-3.5-turbo",
|
||||
"gpt-3.5-turbo-1106",
|
||||
"gpt-3.5-turbo-0125",
|
||||
"gpt-4",
|
||||
"gpt-4-0613",
|
||||
"gpt-4-32k",
|
||||
"gpt-4-32k-0613",
|
||||
"gpt-4-turbo",
|
||||
"gpt-4-turbo-preview",
|
||||
"gpt-4o",
|
||||
"gpt-4o-2024-05-13",
|
||||
"gpt-4o-2024-08-06",
|
||||
"gpt-4o-2024-11-20",
|
||||
"chatgpt-4o-latest",
|
||||
"gpt-4o-mini",
|
||||
"gpt-4o-mini-2024-07-18",
|
||||
"gpt-4-vision-preview",
|
||||
"gpt-4-turbo-2024-04-09",
|
||||
"gpt-4-1106-preview",
|
||||
"dall-e-3",
|
||||
"o1-mini",
|
||||
"o1-preview",
|
||||
'o1',
|
||||
"o3-mini",
|
||||
];
|
||||
|
||||
const googleModels = [
|
||||
"gemini-1.0-pro", // Deprecated on 2/15/2025
|
||||
"gemini-1.5-pro-latest",
|
||||
"gemini-1.5-pro",
|
||||
"gemini-1.5-pro-002",
|
||||
"gemini-1.5-pro-exp-0827",
|
||||
"gemini-1.5-flash-latest",
|
||||
"gemini-1.5-flash-8b-latest",
|
||||
"gemini-1.5-flash",
|
||||
"gemini-1.5-flash-8b",
|
||||
"gemini-1.5-flash-002",
|
||||
"gemini-1.5-flash-exp-0827",
|
||||
"learnlm-1.5-pro-experimental",
|
||||
"gemini-exp-1114",
|
||||
"gemini-exp-1121",
|
||||
"gemini-exp-1206",
|
||||
"gemini-2.0-flash",
|
||||
"gemini-2.0-flash-exp",
|
||||
"gemini-2.0-flash-lite-preview-02-05",
|
||||
"gemini-2.0-flash-thinking-exp",
|
||||
"gemini-2.0-flash-thinking-exp-1219",
|
||||
"gemini-2.0-flash-thinking-exp-01-21",
|
||||
"gemini-2.0-pro-exp",
|
||||
"gemini-2.0-pro-exp-02-05",
|
||||
];
|
||||
|
||||
const anthropicModels = [
|
||||
"claude-instant-1.2",
|
||||
"claude-2.0",
|
||||
"claude-2.1",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-3-opus-20240229",
|
||||
"claude-3-opus-latest",
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-5-haiku-latest",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-5-sonnet-latest",
|
||||
"claude-3-7-sonnet-20250219",
|
||||
];
|
||||
|
||||
const baiduModels = [
|
||||
"ernie-4.0-turbo-8k",
|
||||
"ernie-4.0-8k",
|
||||
"ernie-4.0-8k-preview",
|
||||
"ernie-4.0-8k-preview-0518",
|
||||
"ernie-4.0-8k-latest",
|
||||
"ernie-3.5-8k",
|
||||
"ernie-3.5-8k-0205",
|
||||
"ernie-speed-128k",
|
||||
"ernie-speed-8k",
|
||||
"ernie-lite-8k",
|
||||
"ernie-tiny-8k",
|
||||
];
|
||||
|
||||
const bytedanceModels = [
|
||||
@ -561,32 +491,16 @@ const bytedanceModels = [
|
||||
];
|
||||
|
||||
const alibabaModes = [
|
||||
"qwen-turbo",
|
||||
"qwen-plus",
|
||||
"qwen-max",
|
||||
"qwen-max-0428",
|
||||
"qwen-max-0403",
|
||||
"qwen-max-0107",
|
||||
"qwen-max-longcontext",
|
||||
];
|
||||
|
||||
const tencentModels = [
|
||||
"hunyuan-pro",
|
||||
"hunyuan-standard",
|
||||
"hunyuan-lite",
|
||||
"hunyuan-role",
|
||||
"hunyuan-functioncall",
|
||||
"hunyuan-code",
|
||||
"hunyuan-vision",
|
||||
];
|
||||
|
||||
const moonshotModes = ["moonshot-v1-8k", "moonshot-v1-32k", "moonshot-v1-128k"];
|
||||
|
||||
const iflytekModels = [
|
||||
"general",
|
||||
"generalv3",
|
||||
"pro-128k",
|
||||
"generalv3.5",
|
||||
"4.0Ultra",
|
||||
];
|
||||
|
||||
@ -594,50 +508,14 @@ const deepseekModels = ["deepseek-chat", "deepseek-coder", "deepseek-reasoner"];
|
||||
|
||||
const xAIModes = [
|
||||
"grok-beta",
|
||||
"grok-2",
|
||||
"grok-2-1212",
|
||||
"grok-2-latest",
|
||||
"grok-vision-beta",
|
||||
"grok-2-vision-1212",
|
||||
"grok-2-vision",
|
||||
"grok-2-vision-latest",
|
||||
];
|
||||
|
||||
const chatglmModels = [
|
||||
"glm-4-plus",
|
||||
"glm-4-0520",
|
||||
"glm-4",
|
||||
"glm-4-air",
|
||||
"glm-4-airx",
|
||||
"glm-4-long",
|
||||
"glm-4-flashx",
|
||||
"glm-4-flash",
|
||||
"glm-4v-plus",
|
||||
"glm-4v",
|
||||
"glm-4v-flash", // free
|
||||
"cogview-3-plus",
|
||||
"cogview-3",
|
||||
"cogview-3-flash", // free
|
||||
// 目前无法适配轮询任务
|
||||
// "cogvideox",
|
||||
// "cogvideox-flash", // free
|
||||
];
|
||||
|
||||
const siliconflowModels = [
|
||||
"Qwen/Qwen2.5-7B-Instruct",
|
||||
"Qwen/Qwen2.5-72B-Instruct",
|
||||
"deepseek-ai/DeepSeek-R1",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Llama-8B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
|
||||
"deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
|
||||
"deepseek-ai/DeepSeek-V3",
|
||||
"meta-llama/Llama-3.3-70B-Instruct",
|
||||
"THUDM/glm-4-9b-chat",
|
||||
"Pro/deepseek-ai/DeepSeek-R1",
|
||||
"Pro/deepseek-ai/DeepSeek-V3",
|
||||
];
|
||||
|
||||
let seq = 1000; // 内置的模型序号生成器从1000开始
|
||||
|
Loading…
Reference in New Issue
Block a user