feat: add claude-2.1 model

This commit is contained in:
Zhang Minghan 2023-12-03 18:19:20 +08:00
parent 9e90acfd26
commit fa9f7da55e
7 changed files with 22 additions and 9 deletions

View File

@ -8,7 +8,7 @@
},
"package": {
"productName": "chatnio",
"version": "3.7.0"
"version": "3.7.2"
},
"tauri": {
"allowlist": {

View File

@ -69,7 +69,7 @@ export const ChannelInfos: Record<string, ChannelInfo> = {
id: 1,
endpoint: "https://api.anthropic.com",
format: "<x-api-key>",
models: ["claude-instant-1", "claude-2"],
models: ["claude-instant-1", "claude-2", "claude-2.1"],
},
slack: {
id: 2,

View File

@ -34,6 +34,7 @@ export const modelColorMapper: Record<string, string> = {
"claude-1-100k": "#ff9d3b",
"claude-slack": "#ff9d3b",
"claude-2": "#ff840b",
"claude-2.1": "#ff840b",
"claude-2-100k": "#ff840b",
"spark-desk-v1.5": "#06b3e8",

View File

@ -8,7 +8,7 @@ import {
} from "@/utils/env.ts";
import { getMemory } from "@/utils/memory.ts";
export const version = "3.7.1";
export const version = "3.7.2";
export const dev: boolean = getDev();
export const deploy: boolean = true;
export let rest_api: string = getRestApi(deploy);
@ -185,6 +185,13 @@ export const supportModels: Model[] = [
auth: true,
tag: ["official", "high-context"],
},
{
id: "claude-2.1",
name: "Claude 200k",
free: false,
auth: true,
tag: ["official", "high-context"],
},
// llama models
{
@ -313,6 +320,7 @@ export const defaultModels = [
"claude-1-100k",
"claude-2",
"claude-2.1",
"spark-desk-v3",
"qwen-plus",
@ -334,6 +342,7 @@ export const largeContextModels = [
"claude-1",
"claude-1-100k",
"claude-2",
"claude-2.1",
"claude-2-100k",
"zhipu-chatglm-turbo",
];
@ -346,6 +355,7 @@ export const planModels: PlanModel[] = [
{ id: "gpt-4-all", level: 1 },
{ id: "gpt-4-dalle", level: 1 },
{ id: "claude-2", level: 1 },
{ id: "claude-2.1", level: 1 },
{ id: "claude-2-100k", level: 1 },
{ id: "midjourney-fast", level: 2 },
];
@ -369,6 +379,7 @@ export const modelAvatars: Record<string, string> = {
"gpt-4-dalle": "gpt4dalle.png",
"claude-1-100k": "claude.png",
"claude-2": "claude100k.png",
"claude-2.1": "claude100k.png",
"stable-diffusion": "stablediffusion.jpeg",
"llama-2-70b": "llama2.webp",
"llama-2-13b": "llama2.webp",

View File

@ -18,7 +18,7 @@ func CanEnableModel(db *sql.DB, user *User, model string) bool {
return user != nil && user.GetQuota(db) >= 50
case globals.SparkDesk, globals.SparkDeskV2, globals.SparkDeskV3:
return user != nil && user.GetQuota(db) >= 1
case globals.Claude1100k, globals.Claude2100k:
case globals.Claude1100k, globals.Claude2100k, globals.Claude2200k:
return user != nil && user.GetQuota(db) >= 1
case globals.ZhiPuChatGLMTurbo, globals.ZhiPuChatGLMPro, globals.ZhiPuChatGLMStd:
return user != nil && user.GetQuota(db) >= 1

View File

@ -63,6 +63,7 @@ const (
Claude1100k = "claude-1.3"
Claude2 = "claude-1-100k"
Claude2100k = "claude-2"
Claude2200k = "claude-2.1"
ClaudeSlack = "claude-slack"
SparkDesk = "spark-desk-v1.5"
SparkDeskV2 = "spark-desk-v2"
@ -120,7 +121,7 @@ var LongContextModelArray = []string{
GPT41106Preview, GPT41106VisionPreview, GPT432k, GPT432k0314, GPT432k0613,
Claude1, Claude1100k,
CodeLLaMa34B, LLaMa270B,
Claude2, Claude2100k,
Claude2, Claude2100k, Claude2200k,
}
var FreeModelArray = []string{
@ -159,7 +160,7 @@ func IsDalleModel(model string) bool {
}
func IsClaude100KModel(model string) bool {
return model == Claude1100k || model == Claude2100k
return model == Claude1100k || model == Claude2100k || model == Claude2200k
}
func IsMidjourneyFastModel(model string) bool {

View File

@ -14,7 +14,7 @@ func GetWeightByModel(model string) int {
switch model {
case globals.GPT3TurboInstruct,
globals.Claude1, globals.Claude1100k,
globals.Claude2, globals.Claude2100k:
globals.Claude2, globals.Claude2100k, globals.Claude2200k:
return 2
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo1106,
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613,
@ -93,7 +93,7 @@ func CountInputToken(model string, v []globals.Message) float32 {
return float32(CountTokenPrice(v, model)) / 1000 * 0.3
case globals.Claude1, globals.Claude2:
return 0
case globals.Claude1100k, globals.Claude2100k:
case globals.Claude1100k, globals.Claude2100k, globals.Claude2200k:
return float32(CountTokenPrice(v, model)) / 1000 * 0.8
case globals.LLaMa270B, globals.CodeLLaMa34B:
return float32(CountTokenPrice(v, model)) / 1000 * 0.25
@ -141,7 +141,7 @@ func CountOutputToken(model string, t int) float32 {
return float32(t*GetWeightByModel(model)) / 1000 * 0.3
case globals.Claude1, globals.Claude2:
return 0
case globals.Claude1100k, globals.Claude2100k:
case globals.Claude1100k, globals.Claude2100k, globals.Claude2200k:
return float32(t*GetWeightByModel(model)) / 1000 * 2.4
case globals.LLaMa270B, globals.CodeLLaMa34B:
return float32(t*GetWeightByModel(model)) / 1000 * 0.25