mirror of
https://github.com/coaidev/coai.git
synced 2025-05-19 21:10:18 +09:00
update buffer and vision image counter
This commit is contained in:
parent
b7535111e0
commit
bb783f04b2
@ -42,13 +42,13 @@ type ChatProps struct {
|
|||||||
TopK *int
|
TopK *int
|
||||||
Tools *globals.FunctionTools
|
Tools *globals.FunctionTools
|
||||||
ToolChoice *interface{}
|
ToolChoice *interface{}
|
||||||
|
Buffer utils.Buffer
|
||||||
}
|
}
|
||||||
|
|
||||||
func createChatRequest(props *ChatProps, hook globals.Hook) error {
|
func createChatRequest(props *ChatProps, hook globals.Hook) error {
|
||||||
if oneapi.IsHit(props.Model) {
|
if oneapi.IsHit(props.Model) {
|
||||||
return oneapi.HandleRequest(&oneapi.AdapterProps{
|
return oneapi.NewChatInstanceFromConfig().CreateStreamChatRequest(&oneapi.ChatProps{
|
||||||
Model: props.Model,
|
Model: props.Model,
|
||||||
Plan: props.Plan,
|
|
||||||
Message: props.Message,
|
Message: props.Message,
|
||||||
Token: utils.Multi(
|
Token: utils.Multi(
|
||||||
props.Token == 0,
|
props.Token == 0,
|
||||||
@ -61,6 +61,7 @@ func createChatRequest(props *ChatProps, hook globals.Hook) error {
|
|||||||
TopP: props.TopP,
|
TopP: props.TopP,
|
||||||
Tools: props.Tools,
|
Tools: props.Tools,
|
||||||
ToolChoice: props.ToolChoice,
|
ToolChoice: props.ToolChoice,
|
||||||
|
Buffer: props.Buffer,
|
||||||
}, hook)
|
}, hook)
|
||||||
|
|
||||||
} else if globals.IsChatGPTModel(props.Model) {
|
} else if globals.IsChatGPTModel(props.Model) {
|
||||||
@ -82,6 +83,7 @@ func createChatRequest(props *ChatProps, hook globals.Hook) error {
|
|||||||
TopP: props.TopP,
|
TopP: props.TopP,
|
||||||
Tools: props.Tools,
|
Tools: props.Tools,
|
||||||
ToolChoice: props.ToolChoice,
|
ToolChoice: props.ToolChoice,
|
||||||
|
Buffer: props.Buffer,
|
||||||
}, hook)
|
}, hook)
|
||||||
|
|
||||||
} else if globals.IsClaudeModel(props.Model) {
|
} else if globals.IsClaudeModel(props.Model) {
|
||||||
@ -102,6 +104,7 @@ func createChatRequest(props *ChatProps, hook globals.Hook) error {
|
|||||||
Temperature: props.Temperature,
|
Temperature: props.Temperature,
|
||||||
TopK: props.TopK,
|
TopK: props.TopK,
|
||||||
Tools: props.Tools,
|
Tools: props.Tools,
|
||||||
|
Buffer: props.Buffer,
|
||||||
}, hook)
|
}, hook)
|
||||||
|
|
||||||
} else if globals.IsPalm2Model(props.Model) {
|
} else if globals.IsPalm2Model(props.Model) {
|
||||||
|
@ -17,6 +17,7 @@ type ChatProps struct {
|
|||||||
TopP *float32
|
TopP *float32
|
||||||
Tools *globals.FunctionTools
|
Tools *globals.FunctionTools
|
||||||
ToolChoice *interface{}
|
ToolChoice *interface{}
|
||||||
|
Buffer utils.Buffer
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatEndpoint(props *ChatProps) string {
|
func (c *ChatInstance) GetChatEndpoint(props *ChatProps) string {
|
||||||
@ -113,7 +114,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
|
|||||||
c.GetHeader(),
|
c.GetHeader(),
|
||||||
c.GetChatBody(props, true),
|
c.GetChatBody(props, true),
|
||||||
func(data string) error {
|
func(data string) error {
|
||||||
data, err := c.ProcessLine(instruct, buf, data)
|
data, err := c.ProcessLine(props.Buffer, instruct, buf, data)
|
||||||
chunk += data
|
chunk += data
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -106,7 +106,15 @@ func getCompletionChoices(form *CompletionResponse) string {
|
|||||||
return form.Data.Choices[0].Text
|
return form.Data.Choices[0].Text
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) ProcessLine(instruct bool, buf, data string) (string, error) {
|
func getToolCalls(form *ChatStreamResponse) *globals.ToolCalls {
|
||||||
|
if len(form.Data.Choices) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return form.Data.Choices[0].Delta.ToolCalls
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ChatInstance) ProcessLine(obj utils.Buffer, instruct bool, buf, data string) (string, error) {
|
||||||
item := processFormat(buf + data)
|
item := processFormat(buf + data)
|
||||||
if isDone(item) {
|
if isDone(item) {
|
||||||
return "", nil
|
return "", nil
|
||||||
@ -122,7 +130,7 @@ func (c *ChatInstance) ProcessLine(instruct bool, buf, data string) (string, err
|
|||||||
|
|
||||||
// recursive call
|
// recursive call
|
||||||
if len(buf) > 0 {
|
if len(buf) > 0 {
|
||||||
return c.ProcessLine(instruct, "", buf+item)
|
return c.ProcessLine(obj, instruct, "", buf+item)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := processChatErrorResponse(item); err == nil || err.Data.Error.Message == "" {
|
if err := processChatErrorResponse(item); err == nil || err.Data.Error.Message == "" {
|
||||||
@ -133,6 +141,7 @@ func (c *ChatInstance) ProcessLine(instruct bool, buf, data string) (string, err
|
|||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
obj.SetToolCalls(getToolCalls(form))
|
||||||
return getChoices(form), nil
|
return getChoices(form), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,8 @@ func NewChatInstanceFromModel(props *InstanceProps) *ChatInstance {
|
|||||||
globals.GPT432k, globals.GPT432k0613, globals.GPT432k0314:
|
globals.GPT432k, globals.GPT432k0613, globals.GPT432k0314:
|
||||||
return NewChatInstanceFromConfig("gpt4")
|
return NewChatInstanceFromConfig("gpt4")
|
||||||
|
|
||||||
case globals.GPT3Turbo1106, globals.GPT41106Preview, globals.GPT4Vision, globals.GPT4Dalle, globals.Dalle3, globals.GPT4All:
|
case globals.GPT3Turbo1106, globals.GPT41106Preview, globals.GPT41106VisionPreview,
|
||||||
|
globals.GPT4Vision, globals.GPT4Dalle, globals.Dalle3, globals.GPT4All:
|
||||||
return NewChatInstanceFromConfig("reverse")
|
return NewChatInstanceFromConfig("reverse")
|
||||||
|
|
||||||
case globals.GPT3Turbo, globals.GPT3TurboInstruct, globals.GPT3Turbo0613, globals.GPT3Turbo0301,
|
case globals.GPT3Turbo, globals.GPT3TurboInstruct, globals.GPT3Turbo0613, globals.GPT3Turbo0301,
|
||||||
|
@ -17,6 +17,7 @@ type ChatProps struct {
|
|||||||
TopP *float32 `json:"top_p"`
|
TopP *float32 `json:"top_p"`
|
||||||
Tools *globals.FunctionTools `json:"tools"`
|
Tools *globals.FunctionTools `json:"tools"`
|
||||||
ToolChoice *interface{} `json:"tool_choice"` // string or object
|
ToolChoice *interface{} `json:"tool_choice"` // string or object
|
||||||
|
Buffer utils.Buffer
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatEndpoint() string {
|
func (c *ChatInstance) GetChatEndpoint() string {
|
||||||
@ -69,7 +70,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
|
|||||||
c.GetHeader(),
|
c.GetHeader(),
|
||||||
c.GetChatBody(props, true),
|
c.GetChatBody(props, true),
|
||||||
func(data string) error {
|
func(data string) error {
|
||||||
data, err := c.ProcessLine(buf, data)
|
data, err := c.ProcessLine(props.Buffer, buf, data)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if strings.HasPrefix(err.Error(), "oneapi error") {
|
if strings.HasPrefix(err.Error(), "oneapi error") {
|
||||||
|
@ -12,18 +12,6 @@ var HitModels = []string{
|
|||||||
globals.CodeLLaMa34B, globals.CodeLLaMa13B, globals.CodeLLaMa7B,
|
globals.CodeLLaMa34B, globals.CodeLLaMa13B, globals.CodeLLaMa7B,
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) Process(data string) string {
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) FormatMessage(message []globals.Message) []globals.Message {
|
|
||||||
return message
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) FormatModel(model string) string {
|
|
||||||
return model
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) GetToken(model string) int {
|
func (c *ChatInstance) GetToken(model string) int {
|
||||||
switch model {
|
switch model {
|
||||||
case globals.Claude1, globals.Claude2:
|
case globals.Claude1, globals.Claude2:
|
||||||
|
@ -1,36 +0,0 @@
|
|||||||
package oneapi
|
|
||||||
|
|
||||||
import (
|
|
||||||
"chat/globals"
|
|
||||||
)
|
|
||||||
|
|
||||||
type AdapterProps struct {
|
|
||||||
Model string
|
|
||||||
Plan bool
|
|
||||||
Infinity bool
|
|
||||||
Message []globals.Message
|
|
||||||
Token *int
|
|
||||||
PresencePenalty *float32
|
|
||||||
FrequencyPenalty *float32
|
|
||||||
Temperature *float32
|
|
||||||
TopP *float32
|
|
||||||
Tools *globals.FunctionTools
|
|
||||||
ToolChoice *interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func HandleRequest(props *AdapterProps, hook globals.Hook) error {
|
|
||||||
instance := NewChatInstanceFromConfig()
|
|
||||||
return instance.CreateStreamChatRequest(&ChatProps{
|
|
||||||
Model: instance.FormatModel(props.Model),
|
|
||||||
Message: instance.FormatMessage(props.Message),
|
|
||||||
Token: props.Token,
|
|
||||||
PresencePenalty: props.PresencePenalty,
|
|
||||||
FrequencyPenalty: props.FrequencyPenalty,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
TopP: props.TopP,
|
|
||||||
Tools: props.Tools,
|
|
||||||
ToolChoice: props.ToolChoice,
|
|
||||||
}, func(data string) error {
|
|
||||||
return hook(instance.Process(data))
|
|
||||||
})
|
|
||||||
}
|
|
@ -72,7 +72,15 @@ func getChoices(form *ChatStreamResponse) string {
|
|||||||
return form.Data.Choices[0].Delta.Content
|
return form.Data.Choices[0].Delta.Content
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) ProcessLine(buf, data string) (string, error) {
|
func getToolCalls(form *ChatStreamResponse) *globals.ToolCalls {
|
||||||
|
if len(form.Data.Choices) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return form.Data.Choices[0].Delta.ToolCalls
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ChatInstance) ProcessLine(obj utils.Buffer, buf, data string) (string, error) {
|
||||||
item := processFormat(buf + data)
|
item := processFormat(buf + data)
|
||||||
if isDone(item) {
|
if isDone(item) {
|
||||||
return "", nil
|
return "", nil
|
||||||
@ -81,7 +89,7 @@ func (c *ChatInstance) ProcessLine(buf, data string) (string, error) {
|
|||||||
if form := processChatResponse(item); form == nil {
|
if form := processChatResponse(item); form == nil {
|
||||||
// recursive call
|
// recursive call
|
||||||
if len(buf) > 0 {
|
if len(buf) > 0 {
|
||||||
return c.ProcessLine("", buf+item)
|
return c.ProcessLine(obj, "", buf+item)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := processChatErrorResponse(item); err == nil {
|
if err := processChatErrorResponse(item); err == nil {
|
||||||
@ -92,6 +100,7 @@ func (c *ChatInstance) ProcessLine(buf, data string) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
obj.SetToolCalls(getToolCalls(form))
|
||||||
return getChoices(form), nil
|
return getChoices(form), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -13,6 +13,7 @@ type ChatProps struct {
|
|||||||
Temperature *float32
|
Temperature *float32
|
||||||
TopK *int
|
TopK *int
|
||||||
Tools *globals.FunctionTools
|
Tools *globals.FunctionTools
|
||||||
|
Buffer utils.Buffer
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetToken(props *ChatProps) *int {
|
func GetToken(props *ChatProps) *int {
|
||||||
@ -65,6 +66,21 @@ func (c *ChatInstance) GetFunctionCalling(props *ChatProps) *FunctionsPayload {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getChoice(form *ChatResponse, buffer utils.Buffer) string {
|
||||||
|
resp := form.Payload.Choices.Text
|
||||||
|
if len(resp) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
buffer.SetToolCalls(&globals.ToolCalls{
|
||||||
|
globals.ToolCall{
|
||||||
|
Type: "text",
|
||||||
|
Id: globals.ToolCallId(form.Header.Sid),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return resp[0].Content
|
||||||
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error {
|
||||||
var conn *utils.WebSocket
|
var conn *utils.WebSocket
|
||||||
if conn = utils.NewWebsocketClient(c.GenerateUrl()); conn == nil {
|
if conn = utils.NewWebsocketClient(c.GenerateUrl()); conn == nil {
|
||||||
@ -102,7 +118,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Ho
|
|||||||
return fmt.Errorf("sparkdesk error: %s (sid: %s)", form.Header.Message, form.Header.Sid)
|
return fmt.Errorf("sparkdesk error: %s (sid: %s)", form.Header.Message, form.Header.Sid)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := hook(form.Payload.Choices.Text[0].Content); err != nil {
|
if err := hook(getChoice(form, props.Buffer)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -34,8 +34,7 @@ type FunctionCall struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type MessagePayload struct {
|
type MessagePayload struct {
|
||||||
Text []Message `json:"text"`
|
Text []Message `json:"text"`
|
||||||
Functions FunctionsPayload `json:"functions"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type RequestParameter struct {
|
type RequestParameter struct {
|
||||||
|
@ -21,6 +21,7 @@ func CreateGeneration(model string, prompt string, path string, plan bool, hook
|
|||||||
Message: message,
|
Message: message,
|
||||||
Plan: plan,
|
Plan: plan,
|
||||||
Infinity: true,
|
Infinity: true,
|
||||||
|
Buffer: *buffer,
|
||||||
}, func(data string) error {
|
}, func(data string) error {
|
||||||
buffer.Write(data)
|
buffer.Write(data)
|
||||||
hook(buffer, data)
|
hook(buffer, data)
|
||||||
|
2
app/.env.deeptrain
Normal file
2
app/.env.deeptrain
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
VITE_USE_DEEPTRAIN=true
|
||||||
|
VITE_BACKEND_ENDPOINT=https://api.chatnio.net
|
@ -9,7 +9,7 @@
|
|||||||
<meta name="description" content="👋 Chat Nio, lightweight Web ChatGPT chat site 👋 Chat Nio, 一个轻量级的联网版 AI 聊天网站">
|
<meta name="description" content="👋 Chat Nio, lightweight Web ChatGPT chat site 👋 Chat Nio, 一个轻量级的联网版 AI 聊天网站">
|
||||||
<meta name="author" content="Deeptrain Team">
|
<meta name="author" content="Deeptrain Team">
|
||||||
<meta name="theme-color" content="#000000">
|
<meta name="theme-color" content="#000000">
|
||||||
<meta itemprop="image" content="https://chatnio.net/favicon.ico">
|
<meta itemprop="image" content="/favicon.ico">
|
||||||
<meta name="baidu-site-verification" content="codeva-TJkbi40ZBi" />
|
<meta name="baidu-site-verification" content="codeva-TJkbi40ZBi" />
|
||||||
<link href="https://open.lightxi.com/fonts/Andika" rel="stylesheet">
|
<link href="https://open.lightxi.com/fonts/Andika" rel="stylesheet">
|
||||||
<link href="https://open.lightxi.com/fonts/Jetbrains-Mono" rel="stylesheet">
|
<link href="https://open.lightxi.com/fonts/Jetbrains-Mono" rel="stylesheet">
|
||||||
|
@ -8,6 +8,7 @@ export type StreamMessage = {
|
|||||||
quota?: number;
|
quota?: number;
|
||||||
message: string;
|
message: string;
|
||||||
end: boolean;
|
end: boolean;
|
||||||
|
plan?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ChatProps = {
|
export type ChatProps = {
|
||||||
|
@ -188,11 +188,13 @@ export class Conversation {
|
|||||||
keyword?: string,
|
keyword?: string,
|
||||||
quota?: number,
|
quota?: number,
|
||||||
end?: boolean,
|
end?: boolean,
|
||||||
|
plan?: boolean,
|
||||||
) {
|
) {
|
||||||
this.data[idx].content += message;
|
this.data[idx].content += message;
|
||||||
if (keyword) this.data[idx].keyword = keyword;
|
if (keyword) this.data[idx].keyword = keyword;
|
||||||
if (quota) this.data[idx].quota = quota;
|
if (quota) this.data[idx].quota = quota;
|
||||||
this.data[idx].end = end;
|
this.data[idx].end = end;
|
||||||
|
this.data[idx].plan = plan;
|
||||||
this.triggerCallback();
|
this.triggerCallback();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -209,6 +211,7 @@ export class Conversation {
|
|||||||
message.keyword,
|
message.keyword,
|
||||||
message.quota,
|
message.quota,
|
||||||
message.end,
|
message.end,
|
||||||
|
message.plan,
|
||||||
);
|
);
|
||||||
if (message.end) {
|
if (message.end) {
|
||||||
this.end = true;
|
this.end = true;
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import axios from "axios";
|
import axios from "axios";
|
||||||
import { blob_api } from "@/conf.ts";
|
import { blobEndpoint } from "@/utils/env.ts";
|
||||||
|
|
||||||
export type BlobParserResponse = {
|
export type BlobParserResponse = {
|
||||||
status: boolean;
|
status: boolean;
|
||||||
@ -18,7 +18,7 @@ export type FileArray = FileObject[];
|
|||||||
export async function blobParser(file: File): Promise<BlobParserResponse> {
|
export async function blobParser(file: File): Promise<BlobParserResponse> {
|
||||||
try {
|
try {
|
||||||
const resp = await axios.post(
|
const resp = await axios.post(
|
||||||
`${blob_api}/upload`,
|
`${blobEndpoint}/upload`,
|
||||||
{ file },
|
{ file },
|
||||||
{
|
{
|
||||||
headers: { "Content-Type": "multipart/form-data" },
|
headers: { "Content-Type": "multipart/form-data" },
|
||||||
|
@ -6,6 +6,7 @@ export type Message = {
|
|||||||
keyword?: string;
|
keyword?: string;
|
||||||
quota?: number;
|
quota?: number;
|
||||||
end?: boolean;
|
end?: boolean;
|
||||||
|
plan?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type Model = {
|
export type Model = {
|
||||||
|
@ -54,7 +54,7 @@
|
|||||||
|
|
||||||
--model-card-background: rgba(222,214,200,.2);
|
--model-card-background: rgba(222,214,200,.2);
|
||||||
|
|
||||||
--gold: 45, 100%, 50%;
|
--gold: 45 100% 50%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark {
|
.dark {
|
||||||
|
@ -111,6 +111,7 @@
|
|||||||
.quota {
|
.quota {
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
color: hsl(var(--text-secondary));
|
color: hsl(var(--text-secondary));
|
||||||
|
transition: .25s;
|
||||||
}
|
}
|
||||||
|
|
||||||
.icon {
|
.icon {
|
||||||
|
@ -47,7 +47,7 @@ function MessageSegment(props: MessageProps) {
|
|||||||
<TooltipProvider>
|
<TooltipProvider>
|
||||||
<Tooltip>
|
<Tooltip>
|
||||||
<TooltipTrigger asChild>
|
<TooltipTrigger asChild>
|
||||||
<div className={`message-quota`}>
|
<div className={`message-quota ${message.plan ? "subscription" : ""}`}>
|
||||||
<Cloud className={`h-4 w-4 icon`} />
|
<Cloud className={`h-4 w-4 icon`} />
|
||||||
<span className={`quota`}>
|
<span className={`quota`}>
|
||||||
{(message.quota < 0 ? 0 : message.quota).toFixed(2)}
|
{(message.quota < 0 ? 0 : message.quota).toFixed(2)}
|
||||||
|
@ -10,26 +10,31 @@ import { useState } from "react";
|
|||||||
import { useSelector } from "react-redux";
|
import { useSelector } from "react-redux";
|
||||||
import { selectInit } from "@/store/auth.ts";
|
import { selectInit } from "@/store/auth.ts";
|
||||||
import { useEffectAsync } from "@/utils/hook.ts";
|
import { useEffectAsync } from "@/utils/hook.ts";
|
||||||
import {BroadcastInfo, createBroadcast, getBroadcastList} from "@/api/broadcast.ts";
|
import {
|
||||||
|
BroadcastInfo,
|
||||||
|
createBroadcast,
|
||||||
|
getBroadcastList,
|
||||||
|
} from "@/api/broadcast.ts";
|
||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
import { extractMessage } from "@/utils/processor.ts";
|
import { extractMessage } from "@/utils/processor.ts";
|
||||||
import { Button } from "@/components/ui/button.tsx";
|
import { Button } from "@/components/ui/button.tsx";
|
||||||
import {Plus, RotateCcw} from "lucide-react";
|
import { Plus, RotateCcw } from "lucide-react";
|
||||||
import {useToast} from "@/components/ui/use-toast.ts";
|
import { useToast } from "@/components/ui/use-toast.ts";
|
||||||
import {
|
import {
|
||||||
Dialog,
|
Dialog,
|
||||||
DialogContent,
|
DialogContent,
|
||||||
DialogDescription, DialogFooter,
|
DialogDescription,
|
||||||
|
DialogFooter,
|
||||||
DialogHeader,
|
DialogHeader,
|
||||||
DialogTitle,
|
DialogTitle,
|
||||||
DialogTrigger
|
DialogTrigger,
|
||||||
} from "@/components/ui/dialog.tsx";
|
} from "@/components/ui/dialog.tsx";
|
||||||
import {Textarea} from "@/components/ui/textarea.tsx";
|
import { Textarea } from "@/components/ui/textarea.tsx";
|
||||||
import {DialogClose} from "@radix-ui/react-dialog";
|
import { DialogClose } from "@radix-ui/react-dialog";
|
||||||
|
|
||||||
type CreateBroadcastDialogProps = {
|
type CreateBroadcastDialogProps = {
|
||||||
onCreated?: () => void;
|
onCreated?: () => void;
|
||||||
}
|
};
|
||||||
|
|
||||||
function CreateBroadcastDialog(props: CreateBroadcastDialogProps) {
|
function CreateBroadcastDialog(props: CreateBroadcastDialogProps) {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
@ -43,21 +48,20 @@ function CreateBroadcastDialog(props: CreateBroadcastDialogProps) {
|
|||||||
const resp = await createBroadcast(broadcast);
|
const resp = await createBroadcast(broadcast);
|
||||||
if (resp.status) {
|
if (resp.status) {
|
||||||
toast({
|
toast({
|
||||||
title: t('admin.post-success'),
|
title: t("admin.post-success"),
|
||||||
description: t('admin.post-success-prompt'),
|
description: t("admin.post-success-prompt"),
|
||||||
});
|
});
|
||||||
setContent("");
|
setContent("");
|
||||||
setOpen(false);
|
setOpen(false);
|
||||||
props.onCreated?.();
|
props.onCreated?.();
|
||||||
} else {
|
} else {
|
||||||
toast({
|
toast({
|
||||||
title: t('admin.post-failed'),
|
title: t("admin.post-failed"),
|
||||||
description: t('admin.post-failed-prompt', {reason: resp.error}),
|
description: t("admin.post-failed-prompt", { reason: resp.error }),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Dialog open={open} onOpenChange={setOpen}>
|
<Dialog open={open} onOpenChange={setOpen}>
|
||||||
<DialogTrigger asChild>
|
<DialogTrigger asChild>
|
||||||
@ -90,7 +94,7 @@ function CreateBroadcastDialog(props: CreateBroadcastDialogProps) {
|
|||||||
</DialogFooter>
|
</DialogFooter>
|
||||||
</DialogContent>
|
</DialogContent>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
function BroadcastTable() {
|
function BroadcastTable() {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
@ -114,9 +118,9 @@ function BroadcastTable() {
|
|||||||
<RotateCcw className={`w-4 h-4`} />
|
<RotateCcw className={`w-4 h-4`} />
|
||||||
</Button>
|
</Button>
|
||||||
<div className={`grow`} />
|
<div className={`grow`} />
|
||||||
<CreateBroadcastDialog onCreated={async () => (
|
<CreateBroadcastDialog
|
||||||
setData(await getBroadcastList())
|
onCreated={async () => setData(await getBroadcastList())}
|
||||||
)} />
|
/>
|
||||||
</div>
|
</div>
|
||||||
<Table>
|
<Table>
|
||||||
<TableHeader>
|
<TableHeader>
|
||||||
|
@ -27,6 +27,7 @@ import { openDialog as openInvitationDialog } from "@/store/invitation.ts";
|
|||||||
import { openDialog as openSharingDialog } from "@/store/sharing.ts";
|
import { openDialog as openSharingDialog } from "@/store/sharing.ts";
|
||||||
import { openDialog as openApiDialog } from "@/store/api.ts";
|
import { openDialog as openApiDialog } from "@/store/api.ts";
|
||||||
import router from "@/router.tsx";
|
import router from "@/router.tsx";
|
||||||
|
import { useDeeptrain } from "@/utils/env.ts";
|
||||||
|
|
||||||
type MenuBarProps = {
|
type MenuBarProps = {
|
||||||
children: React.ReactNode;
|
children: React.ReactNode;
|
||||||
@ -58,10 +59,12 @@ function MenuBar({ children, className }: MenuBarProps) {
|
|||||||
<CalendarPlus className={`h-4 w-4 mr-1`} />
|
<CalendarPlus className={`h-4 w-4 mr-1`} />
|
||||||
{t("sub.title")}
|
{t("sub.title")}
|
||||||
</DropdownMenuItem>
|
</DropdownMenuItem>
|
||||||
<DropdownMenuItem onClick={() => dispatch(openPackageDialog())}>
|
{useDeeptrain && (
|
||||||
<Boxes className={`h-4 w-4 mr-1`} />
|
<DropdownMenuItem onClick={() => dispatch(openPackageDialog())}>
|
||||||
{t("pkg.title")}
|
<Boxes className={`h-4 w-4 mr-1`} />
|
||||||
</DropdownMenuItem>
|
{t("pkg.title")}
|
||||||
|
</DropdownMenuItem>
|
||||||
|
)}
|
||||||
<DropdownMenuItem onClick={() => dispatch(openInvitationDialog())}>
|
<DropdownMenuItem onClick={() => dispatch(openInvitationDialog())}>
|
||||||
<Gift className={`h-4 w-4 mr-1`} />
|
<Gift className={`h-4 w-4 mr-1`} />
|
||||||
{t("invitation.title")}
|
{t("invitation.title")}
|
||||||
|
@ -20,6 +20,7 @@ import {
|
|||||||
} from "@/components/ui/dialog.tsx";
|
} from "@/components/ui/dialog.tsx";
|
||||||
import { getLanguage } from "@/i18n.ts";
|
import { getLanguage } from "@/i18n.ts";
|
||||||
import { selectAuthenticated } from "@/store/auth.ts";
|
import { selectAuthenticated } from "@/store/auth.ts";
|
||||||
|
import { docsEndpoint, useDeeptrain } from "@/utils/env.ts";
|
||||||
|
|
||||||
function ChatSpace() {
|
function ChatSpace() {
|
||||||
const [open, setOpen] = useState(false);
|
const [open, setOpen] = useState(false);
|
||||||
@ -31,11 +32,13 @@ function ChatSpace() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={`chat-product`}>
|
<div className={`chat-product`}>
|
||||||
<Button variant={`outline`} onClick={() => setOpen(true)}>
|
{useDeeptrain && (
|
||||||
<Users2 className={`h-4 w-4 mr-1.5`} />
|
<Button variant={`outline`} onClick={() => setOpen(true)}>
|
||||||
{t("contact.title")}
|
<Users2 className={`h-4 w-4 mr-1.5`} />
|
||||||
<ChevronRight className={`h-4 w-4 ml-2`} />
|
{t("contact.title")}
|
||||||
</Button>
|
<ChevronRight className={`h-4 w-4 ml-2`} />
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
{subscription && (
|
{subscription && (
|
||||||
<Button variant={`outline`} onClick={() => router.navigate("/article")}>
|
<Button variant={`outline`} onClick={() => router.navigate("/article")}>
|
||||||
<Newspaper className={`h-4 w-4 mr-1.5`} />
|
<Newspaper className={`h-4 w-4 mr-1.5`} />
|
||||||
@ -58,9 +61,7 @@ function ChatSpace() {
|
|||||||
<Button
|
<Button
|
||||||
className={`mx-auto`}
|
className={`mx-auto`}
|
||||||
variant={`outline`}
|
variant={`outline`}
|
||||||
onClick={() =>
|
onClick={() => window.open(docsEndpoint, "_blank")}
|
||||||
window.open("https://docs.chatnio.net", "_blank")
|
|
||||||
}
|
|
||||||
>
|
>
|
||||||
<BookMarked className={`h-4 w-4 mr-1.5`} />
|
<BookMarked className={`h-4 w-4 mr-1.5`} />
|
||||||
{t("docs.title")}
|
{t("docs.title")}
|
||||||
|
@ -13,7 +13,6 @@ import React, { useMemo, useState } from "react";
|
|||||||
import {
|
import {
|
||||||
login,
|
login,
|
||||||
modelAvatars,
|
modelAvatars,
|
||||||
modelPricingLink,
|
|
||||||
planModels,
|
planModels,
|
||||||
studentModels,
|
studentModels,
|
||||||
supportModels,
|
supportModels,
|
||||||
@ -35,6 +34,7 @@ import { teenagerSelector } from "@/store/package.ts";
|
|||||||
import { ToastAction } from "@/components/ui/toast.tsx";
|
import { ToastAction } from "@/components/ui/toast.tsx";
|
||||||
import { selectAuthenticated } from "@/store/auth.ts";
|
import { selectAuthenticated } from "@/store/auth.ts";
|
||||||
import { useToast } from "@/components/ui/use-toast.ts";
|
import { useToast } from "@/components/ui/use-toast.ts";
|
||||||
|
import { docsEndpoint } from "@/utils/env.ts";
|
||||||
|
|
||||||
type SearchBarProps = {
|
type SearchBarProps = {
|
||||||
value: string;
|
value: string;
|
||||||
@ -231,7 +231,7 @@ function MarketFooter() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={`market-footer`}>
|
<div className={`market-footer`}>
|
||||||
<a href={modelPricingLink} target={`_blank`}>
|
<a href={docsEndpoint} target={`_blank`}>
|
||||||
<Link size={14} className={`mr-1`} />
|
<Link size={14} className={`mr-1`} />
|
||||||
{t("pricing")}
|
{t("pricing")}
|
||||||
</a>
|
</a>
|
||||||
|
@ -8,14 +8,12 @@ import {
|
|||||||
} from "@/utils/env.ts";
|
} from "@/utils/env.ts";
|
||||||
import { getMemory } from "@/utils/memory.ts";
|
import { getMemory } from "@/utils/memory.ts";
|
||||||
|
|
||||||
export const version = "3.6.32";
|
export const version = "3.6.33";
|
||||||
export const dev: boolean = getDev();
|
export const dev: boolean = getDev();
|
||||||
export const deploy: boolean = true;
|
export const deploy: boolean = true;
|
||||||
export let rest_api: string = getRestApi(deploy);
|
export let rest_api: string = getRestApi(deploy);
|
||||||
export let ws_api: string = getWebsocketApi(deploy);
|
export let ws_api: string = getWebsocketApi(deploy);
|
||||||
export let blob_api: string = "https://blob.chatnio.net";
|
|
||||||
export const tokenField = getTokenField(deploy);
|
export const tokenField = getTokenField(deploy);
|
||||||
|
|
||||||
export const supportModels: Model[] = [
|
export const supportModels: Model[] = [
|
||||||
// openai models
|
// openai models
|
||||||
{
|
{
|
||||||
@ -388,11 +386,9 @@ export const modelAvatars: Record<string, string> = {
|
|||||||
hunyuan: "hunyuan.png",
|
hunyuan: "hunyuan.png",
|
||||||
"360-gpt-v9": "360gpt.png",
|
"360-gpt-v9": "360gpt.png",
|
||||||
"baichuan-53b": "baichuan.png",
|
"baichuan-53b": "baichuan.png",
|
||||||
"skylark-chat": "skylark.jpg"
|
"skylark-chat": "skylark.jpg",
|
||||||
};
|
};
|
||||||
|
|
||||||
export const modelPricingLink = "https://docs.chatnio.net/ai-mo-xing-ji-ji-fei";
|
|
||||||
|
|
||||||
export function login() {
|
export function login() {
|
||||||
location.href = `https://deeptrain.net/login?app=${dev ? "dev" : "chatnio"}`;
|
location.href = `https://deeptrain.net/login?app=${dev ? "dev" : "chatnio"}`;
|
||||||
}
|
}
|
||||||
|
@ -23,6 +23,7 @@ import { useToast } from "@/components/ui/use-toast.ts";
|
|||||||
import { copyClipboard } from "@/utils/dom.ts";
|
import { copyClipboard } from "@/utils/dom.ts";
|
||||||
import { useEffectAsync } from "@/utils/hook.ts";
|
import { useEffectAsync } from "@/utils/hook.ts";
|
||||||
import { selectInit } from "@/store/auth.ts";
|
import { selectInit } from "@/store/auth.ts";
|
||||||
|
import { docsEndpoint } from "@/utils/env.ts";
|
||||||
|
|
||||||
function ApikeyDialog() {
|
function ApikeyDialog() {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
@ -58,7 +59,7 @@ function ApikeyDialog() {
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
<Button variant={`outline`} asChild>
|
<Button variant={`outline`} asChild>
|
||||||
<a href={`https://docs.chatnio.net`} target={`_blank`}>
|
<a href={docsEndpoint} target={`_blank`}>
|
||||||
<ExternalLink className={`h-4 w-4 mr-2`} />
|
<ExternalLink className={`h-4 w-4 mr-2`} />
|
||||||
{t("buy.learn-more")}
|
{t("buy.learn-more")}
|
||||||
</a>
|
</a>
|
||||||
|
@ -43,6 +43,7 @@ import { useToast } from "@/components/ui/use-toast.ts";
|
|||||||
import { useEffectAsync } from "@/utils/hook.ts";
|
import { useEffectAsync } from "@/utils/hook.ts";
|
||||||
import { selectAuthenticated } from "@/store/auth.ts";
|
import { selectAuthenticated } from "@/store/auth.ts";
|
||||||
import { ToastAction } from "@/components/ui/toast.tsx";
|
import { ToastAction } from "@/components/ui/toast.tsx";
|
||||||
|
import { docsEndpoint } from "@/utils/env.ts";
|
||||||
|
|
||||||
type AmountComponentProps = {
|
type AmountComponentProps = {
|
||||||
amount: number;
|
amount: number;
|
||||||
@ -314,7 +315,7 @@ function QuotaDialog() {
|
|||||||
</div>
|
</div>
|
||||||
<div className={`tip`}>
|
<div className={`tip`}>
|
||||||
<Button variant={`outline`} asChild>
|
<Button variant={`outline`} asChild>
|
||||||
<a href={`https://docs.chatnio.net`} target={`_blank`}>
|
<a href={docsEndpoint} target={`_blank`}>
|
||||||
<ExternalLink className={`h-4 w-4 mr-2`} />
|
<ExternalLink className={`h-4 w-4 mr-2`} />
|
||||||
{t("buy.learn-more")}
|
{t("buy.learn-more")}
|
||||||
</a>
|
</a>
|
||||||
|
@ -1,3 +1,10 @@
|
|||||||
|
export const useDeeptrain = !!import.meta.env.VITE_USE_DEEPTRAIN;
|
||||||
|
export const backendEndpoint = import.meta.env.VITE_BACKEND_ENDPOINT || "/api";
|
||||||
|
export const blobEndpoint =
|
||||||
|
import.meta.env.VITE_BLOB_ENDPOINT || "https://blob.chatnio.net";
|
||||||
|
export const docsEndpoint =
|
||||||
|
import.meta.env.VITE_DOCS_ENDPOINT || "https://docs.chatnio.net";
|
||||||
|
|
||||||
export function getDev(): boolean {
|
export function getDev(): boolean {
|
||||||
/**
|
/**
|
||||||
* return if the current environment is development
|
* return if the current environment is development
|
||||||
@ -9,14 +16,24 @@ export function getRestApi(deploy: boolean): string {
|
|||||||
/**
|
/**
|
||||||
* return the REST API address
|
* return the REST API address
|
||||||
*/
|
*/
|
||||||
return !deploy ? "http://localhost:8094" : "https://api.chatnio.net";
|
return !deploy ? "http://localhost:8094" : backendEndpoint;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getWebsocketApi(deploy: boolean): string {
|
export function getWebsocketApi(deploy: boolean): string {
|
||||||
/**
|
/**
|
||||||
* return the WebSocket API address
|
* return the WebSocket API address
|
||||||
*/
|
*/
|
||||||
return !deploy ? "ws://localhost:8094" : "wss://api.chatnio.net";
|
if (!deploy) return "ws://localhost:8094";
|
||||||
|
|
||||||
|
if (backendEndpoint.startsWith("http://"))
|
||||||
|
return `ws://${backendEndpoint.slice(7)}`;
|
||||||
|
if (backendEndpoint.startsWith("https://"))
|
||||||
|
return `wss://${backendEndpoint.slice(8)}`;
|
||||||
|
if (backendEndpoint.startsWith("/"))
|
||||||
|
return location.protocol === "https:"
|
||||||
|
? `wss://${location.host}${backendEndpoint}`
|
||||||
|
: `ws://${location.host}${backendEndpoint}`;
|
||||||
|
return backendEndpoint;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getTokenField(deploy: boolean): string {
|
export function getTokenField(deploy: boolean): string {
|
||||||
|
@ -64,5 +64,15 @@ export default defineConfig({
|
|||||||
chunkFileNames: `assets/[name].[hash].js`,
|
chunkFileNames: `assets/[name].[hash].js`,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
},
|
||||||
|
server: {
|
||||||
|
proxy: {
|
||||||
|
"/api": {
|
||||||
|
target: "http://localhost:8094",
|
||||||
|
changeOrigin: true,
|
||||||
|
rewrite: (path) => path.replace(/^\/api/, ""),
|
||||||
|
ws: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -11,7 +11,7 @@ func CanEnableModel(db *sql.DB, user *User, model string) bool {
|
|||||||
switch model {
|
switch model {
|
||||||
case globals.GPT3Turbo, globals.GPT3TurboInstruct, globals.GPT3Turbo0301, globals.GPT3Turbo0613:
|
case globals.GPT3Turbo, globals.GPT3TurboInstruct, globals.GPT3Turbo0301, globals.GPT3Turbo0613:
|
||||||
return true
|
return true
|
||||||
case globals.GPT4, globals.GPT40613, globals.GPT40314, globals.GPT41106Preview,
|
case globals.GPT4, globals.GPT40613, globals.GPT40314, globals.GPT41106Preview, globals.GPT41106VisionPreview,
|
||||||
globals.GPT4Dalle, globals.GPT4Vision, globals.Dalle3:
|
globals.GPT4Dalle, globals.GPT4Vision, globals.Dalle3:
|
||||||
return user != nil && user.GetQuota(db) >= 5
|
return user != nil && user.GetQuota(db) >= 5
|
||||||
case globals.GPT432k, globals.GPT432k0613, globals.GPT432k0314:
|
case globals.GPT432k, globals.GPT432k0613, globals.GPT432k0314:
|
||||||
|
@ -13,6 +13,7 @@ type ChatSegmentResponse struct {
|
|||||||
Keyword string `json:"keyword"`
|
Keyword string `json:"keyword"`
|
||||||
Message string `json:"message"`
|
Message string `json:"message"`
|
||||||
End bool `json:"end"`
|
End bool `json:"end"`
|
||||||
|
Plan bool `json:"plan"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GenerationSegmentResponse struct {
|
type GenerationSegmentResponse struct {
|
||||||
|
@ -38,63 +38,64 @@ func OriginIsOpen(c *gin.Context) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
GPT3Turbo = "gpt-3.5-turbo"
|
GPT3Turbo = "gpt-3.5-turbo"
|
||||||
GPT3TurboInstruct = "gpt-3.5-turbo-instruct"
|
GPT3TurboInstruct = "gpt-3.5-turbo-instruct"
|
||||||
GPT3Turbo0613 = "gpt-3.5-turbo-0613"
|
GPT3Turbo0613 = "gpt-3.5-turbo-0613"
|
||||||
GPT3Turbo0301 = "gpt-3.5-turbo-0301"
|
GPT3Turbo0301 = "gpt-3.5-turbo-0301"
|
||||||
GPT3Turbo1106 = "gpt-3.5-turbo-1106"
|
GPT3Turbo1106 = "gpt-3.5-turbo-1106"
|
||||||
GPT3Turbo16k = "gpt-3.5-turbo-16k"
|
GPT3Turbo16k = "gpt-3.5-turbo-16k"
|
||||||
GPT3Turbo16k0613 = "gpt-3.5-turbo-16k-0613"
|
GPT3Turbo16k0613 = "gpt-3.5-turbo-16k-0613"
|
||||||
GPT3Turbo16k0301 = "gpt-3.5-turbo-16k-0301"
|
GPT3Turbo16k0301 = "gpt-3.5-turbo-16k-0301"
|
||||||
GPT4 = "gpt-4"
|
GPT4 = "gpt-4"
|
||||||
GPT4All = "gpt-4-all"
|
GPT4All = "gpt-4-all"
|
||||||
GPT4Vision = "gpt-4-v"
|
GPT4Vision = "gpt-4-v"
|
||||||
GPT4Dalle = "gpt-4-dalle"
|
GPT4Dalle = "gpt-4-dalle"
|
||||||
GPT40314 = "gpt-4-0314"
|
GPT40314 = "gpt-4-0314"
|
||||||
GPT40613 = "gpt-4-0613"
|
GPT40613 = "gpt-4-0613"
|
||||||
GPT41106Preview = "gpt-4-1106-preview"
|
GPT41106Preview = "gpt-4-1106-preview"
|
||||||
GPT432k = "gpt-4-32k"
|
GPT41106VisionPreview = "gpt-4-vision-preview"
|
||||||
GPT432k0314 = "gpt-4-32k-0314"
|
GPT432k = "gpt-4-32k"
|
||||||
GPT432k0613 = "gpt-4-32k-0613"
|
GPT432k0314 = "gpt-4-32k-0314"
|
||||||
Dalle2 = "dall-e-2"
|
GPT432k0613 = "gpt-4-32k-0613"
|
||||||
Dalle3 = "dall-e-3"
|
Dalle2 = "dall-e-2"
|
||||||
Claude1 = "claude-1"
|
Dalle3 = "dall-e-3"
|
||||||
Claude1100k = "claude-1.3"
|
Claude1 = "claude-1"
|
||||||
Claude2 = "claude-1-100k"
|
Claude1100k = "claude-1.3"
|
||||||
Claude2100k = "claude-2"
|
Claude2 = "claude-1-100k"
|
||||||
ClaudeSlack = "claude-slack"
|
Claude2100k = "claude-2"
|
||||||
SparkDesk = "spark-desk-v1.5"
|
ClaudeSlack = "claude-slack"
|
||||||
SparkDeskV2 = "spark-desk-v2"
|
SparkDesk = "spark-desk-v1.5"
|
||||||
SparkDeskV3 = "spark-desk-v3"
|
SparkDeskV2 = "spark-desk-v2"
|
||||||
ChatBison001 = "chat-bison-001"
|
SparkDeskV3 = "spark-desk-v3"
|
||||||
BingCreative = "bing-creative"
|
ChatBison001 = "chat-bison-001"
|
||||||
BingBalanced = "bing-balanced"
|
BingCreative = "bing-creative"
|
||||||
BingPrecise = "bing-precise"
|
BingBalanced = "bing-balanced"
|
||||||
ZhiPuChatGLMTurbo = "zhipu-chatglm-turbo"
|
BingPrecise = "bing-precise"
|
||||||
ZhiPuChatGLMPro = "zhipu-chatglm-pro"
|
ZhiPuChatGLMTurbo = "zhipu-chatglm-turbo"
|
||||||
ZhiPuChatGLMStd = "zhipu-chatglm-std"
|
ZhiPuChatGLMPro = "zhipu-chatglm-pro"
|
||||||
ZhiPuChatGLMLite = "zhipu-chatglm-lite"
|
ZhiPuChatGLMStd = "zhipu-chatglm-std"
|
||||||
QwenTurbo = "qwen-turbo"
|
ZhiPuChatGLMLite = "zhipu-chatglm-lite"
|
||||||
QwenPlus = "qwen-plus"
|
QwenTurbo = "qwen-turbo"
|
||||||
QwenTurboNet = "qwen-turbo-net"
|
QwenPlus = "qwen-plus"
|
||||||
QwenPlusNet = "qwen-plus-net"
|
QwenTurboNet = "qwen-turbo-net"
|
||||||
Midjourney = "midjourney"
|
QwenPlusNet = "qwen-plus-net"
|
||||||
MidjourneyFast = "midjourney-fast"
|
Midjourney = "midjourney"
|
||||||
MidjourneyTurbo = "midjourney-turbo"
|
MidjourneyFast = "midjourney-fast"
|
||||||
StableDiffusion = "stable-diffusion"
|
MidjourneyTurbo = "midjourney-turbo"
|
||||||
LLaMa270B = "llama-2-70b"
|
StableDiffusion = "stable-diffusion"
|
||||||
LLaMa213B = "llama-2-13b"
|
LLaMa270B = "llama-2-70b"
|
||||||
LLaMa27B = "llama-2-7b"
|
LLaMa213B = "llama-2-13b"
|
||||||
CodeLLaMa34B = "code-llama-34b"
|
LLaMa27B = "llama-2-7b"
|
||||||
CodeLLaMa13B = "code-llama-13b"
|
CodeLLaMa34B = "code-llama-34b"
|
||||||
CodeLLaMa7B = "code-llama-7b"
|
CodeLLaMa13B = "code-llama-13b"
|
||||||
Hunyuan = "hunyuan"
|
CodeLLaMa7B = "code-llama-7b"
|
||||||
GPT360V9 = "360-gpt-v9"
|
Hunyuan = "hunyuan"
|
||||||
Baichuan53B = "baichuan-53b"
|
GPT360V9 = "360-gpt-v9"
|
||||||
SkylarkLite = "skylark-lite-public"
|
Baichuan53B = "baichuan-53b"
|
||||||
SkylarkPlus = "skylark-plus-public"
|
SkylarkLite = "skylark-lite-public"
|
||||||
SkylarkPro = "skylark-pro-public"
|
SkylarkPlus = "skylark-plus-public"
|
||||||
SkylarkChat = "skylark-chat"
|
SkylarkPro = "skylark-pro-public"
|
||||||
|
SkylarkChat = "skylark-chat"
|
||||||
)
|
)
|
||||||
|
|
||||||
var GPT3TurboArray = []string{
|
var GPT3TurboArray = []string{
|
||||||
@ -112,7 +113,7 @@ var GPT3Turbo16kArray = []string{
|
|||||||
}
|
}
|
||||||
|
|
||||||
var GPT4Array = []string{
|
var GPT4Array = []string{
|
||||||
GPT4, GPT40314, GPT40613, GPT41106Preview,
|
GPT4, GPT40314, GPT40613, GPT41106Preview, GPT41106VisionPreview,
|
||||||
GPT4Vision, GPT4Dalle, GPT4All,
|
GPT4Vision, GPT4Dalle, GPT4All,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -173,7 +174,7 @@ var SkylarkModelArray = []string{
|
|||||||
|
|
||||||
var LongContextModelArray = []string{
|
var LongContextModelArray = []string{
|
||||||
GPT3Turbo16k, GPT3Turbo16k0613, GPT3Turbo16k0301,
|
GPT3Turbo16k, GPT3Turbo16k0613, GPT3Turbo16k0301,
|
||||||
GPT41106Preview, GPT432k, GPT432k0314, GPT432k0613,
|
GPT41106Preview, GPT41106VisionPreview, GPT432k, GPT432k0314, GPT432k0613,
|
||||||
Claude1, Claude1100k,
|
Claude1, Claude1100k,
|
||||||
CodeLLaMa34B, LLaMa270B,
|
CodeLLaMa34B, LLaMa270B,
|
||||||
Claude2, Claude2100k,
|
Claude2, Claude2100k,
|
||||||
@ -200,7 +201,7 @@ var FreeModelArray = []string{
|
|||||||
var AllModels = []string{
|
var AllModels = []string{
|
||||||
GPT3Turbo, GPT3TurboInstruct, GPT3Turbo0613, GPT3Turbo0301, GPT3Turbo1106,
|
GPT3Turbo, GPT3TurboInstruct, GPT3Turbo0613, GPT3Turbo0301, GPT3Turbo1106,
|
||||||
GPT3Turbo16k, GPT3Turbo16k0613, GPT3Turbo16k0301,
|
GPT3Turbo16k, GPT3Turbo16k0613, GPT3Turbo16k0301,
|
||||||
GPT4, GPT40314, GPT40613, GPT4Vision, GPT4All, GPT41106Preview, GPT4Dalle,
|
GPT4, GPT40314, GPT40613, GPT4Vision, GPT4All, GPT41106Preview, GPT4Dalle, GPT41106VisionPreview,
|
||||||
GPT432k, GPT432k0314, GPT432k0613,
|
GPT432k, GPT432k0314, GPT432k0613,
|
||||||
Dalle2, Dalle3,
|
Dalle2, Dalle3,
|
||||||
Claude1, Claude1100k, Claude2, Claude2100k, ClaudeSlack,
|
Claude1, Claude1100k, Claude2, Claude2100k, ClaudeSlack,
|
||||||
|
@ -16,10 +16,6 @@ import (
|
|||||||
const defaultMessage = "Sorry, I don't understand. Please try again."
|
const defaultMessage = "Sorry, I don't understand. Please try again."
|
||||||
const defaultQuotaMessage = "You don't have enough quota to use this model. please [buy](/buy) or [subscribe](/subscribe) to get more. (or try to refresh the page)"
|
const defaultQuotaMessage = "You don't have enough quota to use this model. please [buy](/buy) or [subscribe](/subscribe) to get more. (or try to refresh the page)"
|
||||||
|
|
||||||
func GetErrorQuota(model string) float32 {
|
|
||||||
return utils.Multi[float32](globals.IsGPT4Model(model), -0xe, 0) // special value for error
|
|
||||||
}
|
|
||||||
|
|
||||||
func CollectQuota(c *gin.Context, user *auth.User, buffer *utils.Buffer, uncountable bool) {
|
func CollectQuota(c *gin.Context, user *auth.User, buffer *utils.Buffer, uncountable bool) {
|
||||||
db := utils.GetDBFromContext(c)
|
db := utils.GetDBFromContext(c)
|
||||||
quota := buffer.GetQuota()
|
quota := buffer.GetQuota()
|
||||||
@ -91,6 +87,7 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
|
|||||||
Model: model,
|
Model: model,
|
||||||
Message: segment,
|
Message: segment,
|
||||||
Plan: plan,
|
Plan: plan,
|
||||||
|
Buffer: *buffer,
|
||||||
}, func(data string) error {
|
}, func(data string) error {
|
||||||
if signal := conn.PeekWithType(StopType); signal != nil {
|
if signal := conn.PeekWithType(StopType); signal != nil {
|
||||||
// stop signal from client
|
// stop signal from client
|
||||||
@ -100,6 +97,7 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
|
|||||||
Message: buffer.Write(data),
|
Message: buffer.Write(data),
|
||||||
Quota: buffer.GetQuota(),
|
Quota: buffer.GetQuota(),
|
||||||
End: false,
|
End: false,
|
||||||
|
Plan: plan,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -111,7 +109,6 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
|
|||||||
CollectQuota(conn.GetCtx(), user, buffer, plan)
|
CollectQuota(conn.GetCtx(), user, buffer, plan)
|
||||||
conn.Send(globals.ChatSegmentResponse{
|
conn.Send(globals.ChatSegmentResponse{
|
||||||
Message: err.Error(),
|
Message: err.Error(),
|
||||||
Quota: GetErrorQuota(model),
|
|
||||||
End: true,
|
End: true,
|
||||||
})
|
})
|
||||||
return err.Error()
|
return err.Error()
|
||||||
@ -122,13 +119,16 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
|
|||||||
if buffer.IsEmpty() {
|
if buffer.IsEmpty() {
|
||||||
conn.Send(globals.ChatSegmentResponse{
|
conn.Send(globals.ChatSegmentResponse{
|
||||||
Message: defaultMessage,
|
Message: defaultMessage,
|
||||||
Quota: GetErrorQuota(model),
|
|
||||||
End: true,
|
End: true,
|
||||||
})
|
})
|
||||||
return defaultMessage
|
return defaultMessage
|
||||||
}
|
}
|
||||||
|
|
||||||
conn.Send(globals.ChatSegmentResponse{End: true, Quota: buffer.GetQuota()})
|
conn.Send(globals.ChatSegmentResponse{
|
||||||
|
End: true,
|
||||||
|
Quota: buffer.GetQuota(),
|
||||||
|
Plan: plan,
|
||||||
|
})
|
||||||
|
|
||||||
result := buffer.ReadWithDefault(defaultMessage)
|
result := buffer.ReadWithDefault(defaultMessage)
|
||||||
|
|
||||||
|
@ -43,6 +43,7 @@ func NativeChatHandler(c *gin.Context, user *auth.User, model string, message []
|
|||||||
Model: model,
|
Model: model,
|
||||||
Plan: plan,
|
Plan: plan,
|
||||||
Message: segment,
|
Message: segment,
|
||||||
|
Buffer: *buffer,
|
||||||
}, func(resp string) error {
|
}, func(resp string) error {
|
||||||
buffer.Write(resp)
|
buffer.Write(resp)
|
||||||
return nil
|
return nil
|
||||||
@ -52,7 +53,7 @@ func NativeChatHandler(c *gin.Context, user *auth.User, model string, message []
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
auth.RevertSubscriptionUsage(cache, user, model, plan)
|
auth.RevertSubscriptionUsage(cache, user, model, plan)
|
||||||
CollectQuota(c, user, buffer, plan)
|
CollectQuota(c, user, buffer, plan)
|
||||||
return err.Error(), GetErrorQuota(model)
|
return err.Error(), 0
|
||||||
}
|
}
|
||||||
|
|
||||||
CollectQuota(c, user, buffer, plan)
|
CollectQuota(c, user, buffer, plan)
|
||||||
|
@ -133,7 +133,7 @@ func TranshipmentAPI(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetProps(form TranshipmentForm, plan bool) *adapter.ChatProps {
|
func GetProps(form TranshipmentForm, buffer *utils.Buffer, plan bool) *adapter.ChatProps {
|
||||||
return &adapter.ChatProps{
|
return &adapter.ChatProps{
|
||||||
Model: form.Model,
|
Model: form.Model,
|
||||||
Message: form.Messages,
|
Message: form.Messages,
|
||||||
@ -147,12 +147,13 @@ func GetProps(form TranshipmentForm, plan bool) *adapter.ChatProps {
|
|||||||
TopK: form.TopK,
|
TopK: form.TopK,
|
||||||
Tools: form.Tools,
|
Tools: form.Tools,
|
||||||
ToolChoice: form.ToolChoice,
|
ToolChoice: form.ToolChoice,
|
||||||
|
Buffer: *buffer,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func sendTranshipmentResponse(c *gin.Context, form TranshipmentForm, id string, created int64, user *auth.User, plan bool) {
|
func sendTranshipmentResponse(c *gin.Context, form TranshipmentForm, id string, created int64, user *auth.User, plan bool) {
|
||||||
buffer := utils.NewBuffer(form.Model, form.Messages)
|
buffer := utils.NewBuffer(form.Model, form.Messages)
|
||||||
err := adapter.NewChatRequest(GetProps(form, plan), func(data string) error {
|
err := adapter.NewChatRequest(GetProps(form, buffer, plan), func(data string) error {
|
||||||
buffer.Write(data)
|
buffer.Write(data)
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
@ -215,7 +216,7 @@ func sendStreamTranshipmentResponse(c *gin.Context, form TranshipmentForm, id st
|
|||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
buffer := utils.NewBuffer(form.Model, form.Messages)
|
buffer := utils.NewBuffer(form.Model, form.Messages)
|
||||||
err := adapter.NewChatRequest(GetProps(form, plan), func(data string) error {
|
err := adapter.NewChatRequest(GetProps(form, buffer, plan), func(data string) error {
|
||||||
channel <- getStreamTranshipmentForm(id, created, form, buffer.Write(data), buffer, false)
|
channel <- getStreamTranshipmentForm(id, created, form, buffer.Write(data), buffer, false)
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
@ -165,3 +165,17 @@ func GetPtrVal[T any](ptr *T, def T) T {
|
|||||||
}
|
}
|
||||||
return *ptr
|
return *ptr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func LimitMax[T int | int64 | float32 | float64](value T, max T) T {
|
||||||
|
if value > max {
|
||||||
|
return max
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
func LimitMin[T int | int64 | float32 | float64](value T, min T) T {
|
||||||
|
if value < min {
|
||||||
|
return min
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
@ -6,19 +6,19 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Buffer struct {
|
type Buffer struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Quota float32 `json:"quota"`
|
Quota float32 `json:"quota"`
|
||||||
Data string `json:"data"`
|
Data string `json:"data"`
|
||||||
Cursor int `json:"cursor"`
|
Latest string `json:"latest"`
|
||||||
Times int `json:"times"`
|
Cursor int `json:"cursor"`
|
||||||
History []globals.Message `json:"history"`
|
Times int `json:"times"`
|
||||||
|
History []globals.Message `json:"history"`
|
||||||
|
Images Images `json:"images"`
|
||||||
|
ToolCalls *globals.ToolCalls `json:"tool_calls"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBuffer(model string, history []globals.Message) *Buffer {
|
func NewBuffer(model string, history []globals.Message) *Buffer {
|
||||||
return &Buffer{
|
return &Buffer{
|
||||||
Data: "",
|
|
||||||
Cursor: 0,
|
|
||||||
Times: 0,
|
|
||||||
Model: model,
|
Model: model,
|
||||||
Quota: CountInputToken(model, history),
|
Quota: CountInputToken(model, history),
|
||||||
History: history,
|
History: history,
|
||||||
@ -37,24 +37,49 @@ func (b *Buffer) Write(data string) string {
|
|||||||
b.Data += data
|
b.Data += data
|
||||||
b.Cursor += len(data)
|
b.Cursor += len(data)
|
||||||
b.Times++
|
b.Times++
|
||||||
|
b.Latest = data
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b *Buffer) GetChunk() string {
|
||||||
|
return b.Latest
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Buffer) SetImages(images Images) {
|
||||||
|
b.Images = images
|
||||||
|
|
||||||
|
b.Quota += Sum(Each(images, func(image Image) float32 {
|
||||||
|
return float32(image.CountTokens(b.Model)) * 0.7
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Buffer) GetImages() Images {
|
||||||
|
return b.Images
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Buffer) SetToolCalls(toolCalls *globals.ToolCalls) {
|
||||||
|
if toolCalls == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
b.ToolCalls = toolCalls
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Buffer) GetToolCalls() *globals.ToolCalls {
|
||||||
|
return b.ToolCalls
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *Buffer) IsFunctionCalling() bool {
|
||||||
|
return b.GetToolCalls() != nil
|
||||||
|
}
|
||||||
|
|
||||||
func (b *Buffer) WriteBytes(data []byte) []byte {
|
func (b *Buffer) WriteBytes(data []byte) []byte {
|
||||||
b.Data += string(data)
|
b.Write(string(data))
|
||||||
b.Cursor += len(data)
|
|
||||||
b.Times++
|
|
||||||
return data
|
return data
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *Buffer) IsEmpty() bool {
|
func (b *Buffer) IsEmpty() bool {
|
||||||
return b.Cursor == 0
|
return b.Cursor == 0 && !b.IsFunctionCalling()
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Buffer) Reset() {
|
|
||||||
b.Data = ""
|
|
||||||
b.Cursor = 0
|
|
||||||
b.Times = 0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *Buffer) Read() string {
|
func (b *Buffer) Read() string {
|
||||||
@ -66,7 +91,7 @@ func (b *Buffer) ReadBytes() []byte {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (b *Buffer) ReadWithDefault(_default string) string {
|
func (b *Buffer) ReadWithDefault(_default string) string {
|
||||||
if b.IsEmpty() || len(strings.TrimSpace(b.Data)) == 0 {
|
if b.IsEmpty() || (len(strings.TrimSpace(b.Data)) == 0 && !b.IsFunctionCalling()) {
|
||||||
return _default
|
return _default
|
||||||
}
|
}
|
||||||
return b.Data
|
return b.Data
|
||||||
|
63
utils/image.go
Normal file
63
utils/image.go
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"chat/globals"
|
||||||
|
"image"
|
||||||
|
"math"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Image struct {
|
||||||
|
Object image.Image
|
||||||
|
}
|
||||||
|
type Images []Image
|
||||||
|
|
||||||
|
func NewImage(url string) (*Image, error) {
|
||||||
|
res, err := http.Get(url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer res.Body.Close()
|
||||||
|
img, _, err := image.Decode(res.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &Image{Object: img}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) GetWidth() int {
|
||||||
|
return i.Object.Bounds().Max.X
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) GetHeight() int {
|
||||||
|
return i.Object.Bounds().Max.Y
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) GetPixel(x int, y int) (uint32, uint32, uint32, uint32) {
|
||||||
|
return i.Object.At(x, y).RGBA()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) GetPixelColor(x int, y int) (int, int, int) {
|
||||||
|
r, g, b, _ := i.GetPixel(x, y)
|
||||||
|
return int(r), int(g), int(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *Image) CountTokens(model string) int {
|
||||||
|
switch model {
|
||||||
|
case globals.GPT41106VisionPreview:
|
||||||
|
// tile size is 512x512
|
||||||
|
// the max size of image is 2048x2048
|
||||||
|
// the image that is larger than 2048x2048 will be resized in 16 tiles
|
||||||
|
|
||||||
|
x := LimitMax(math.Ceil(float64(i.GetWidth())/512), 4)
|
||||||
|
y := LimitMax(math.Ceil(float64(i.GetHeight())/512), 4)
|
||||||
|
tiles := int(x) * int(y)
|
||||||
|
|
||||||
|
return 85 + 170*tiles
|
||||||
|
|
||||||
|
default:
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
}
|
@ -18,7 +18,7 @@ func GetWeightByModel(model string) int {
|
|||||||
return 2
|
return 2
|
||||||
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo1106,
|
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo1106,
|
||||||
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613,
|
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613,
|
||||||
globals.GPT4, globals.GPT4Vision, globals.GPT4Dalle, globals.GPT4All, globals.GPT40314, globals.GPT40613, globals.GPT41106Preview,
|
globals.GPT4, globals.GPT4Vision, globals.GPT4Dalle, globals.GPT4All, globals.GPT40314, globals.GPT40613, globals.GPT41106Preview, globals.GPT41106VisionPreview,
|
||||||
globals.GPT432k, globals.GPT432k0613, globals.GPT432k0314,
|
globals.GPT432k, globals.GPT432k0613, globals.GPT432k0314,
|
||||||
globals.LLaMa27B, globals.LLaMa213B, globals.LLaMa270B,
|
globals.LLaMa27B, globals.LLaMa213B, globals.LLaMa270B,
|
||||||
globals.CodeLLaMa34B, globals.CodeLLaMa13B, globals.CodeLLaMa7B,
|
globals.CodeLLaMa34B, globals.CodeLLaMa13B, globals.CodeLLaMa7B,
|
||||||
@ -81,7 +81,7 @@ func CountInputToken(model string, v []globals.Message) float32 {
|
|||||||
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo0301, globals.GPT3TurboInstruct, globals.GPT3Turbo1106,
|
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo0301, globals.GPT3TurboInstruct, globals.GPT3Turbo1106,
|
||||||
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613, globals.GPT3Turbo16k0301:
|
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613, globals.GPT3Turbo16k0301:
|
||||||
return 0
|
return 0
|
||||||
case globals.GPT41106Preview:
|
case globals.GPT41106Preview, globals.GPT41106VisionPreview:
|
||||||
return float32(CountTokenPrice(v, model)) / 1000 * 0.7 * 0.6
|
return float32(CountTokenPrice(v, model)) / 1000 * 0.7 * 0.6
|
||||||
case globals.GPT4, globals.GPT4Vision, globals.GPT4All, globals.GPT4Dalle, globals.GPT40314, globals.GPT40613:
|
case globals.GPT4, globals.GPT4Vision, globals.GPT4All, globals.GPT4Dalle, globals.GPT40314, globals.GPT40613:
|
||||||
return float32(CountTokenPrice(v, model)) / 1000 * 2.1 * 0.6
|
return float32(CountTokenPrice(v, model)) / 1000 * 2.1 * 0.6
|
||||||
@ -129,7 +129,7 @@ func CountOutputToken(model string, t int) float32 {
|
|||||||
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo0301, globals.GPT3TurboInstruct, globals.GPT3Turbo1106,
|
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo0301, globals.GPT3TurboInstruct, globals.GPT3Turbo1106,
|
||||||
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613, globals.GPT3Turbo16k0301:
|
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613, globals.GPT3Turbo16k0301:
|
||||||
return 0
|
return 0
|
||||||
case globals.GPT41106Preview:
|
case globals.GPT41106Preview, globals.GPT41106VisionPreview:
|
||||||
return float32(t*GetWeightByModel(model)) / 1000 * 2.1 * 0.6
|
return float32(t*GetWeightByModel(model)) / 1000 * 2.1 * 0.6
|
||||||
case globals.GPT4, globals.GPT4Vision, globals.GPT4All, globals.GPT4Dalle, globals.GPT40314, globals.GPT40613:
|
case globals.GPT4, globals.GPT4Vision, globals.GPT4All, globals.GPT4Dalle, globals.GPT40314, globals.GPT40613:
|
||||||
return float32(t*GetWeightByModel(model)) / 1000 * 4.3 * 0.6
|
return float32(t*GetWeightByModel(model)) / 1000 * 4.3 * 0.6
|
||||||
|
Loading…
Reference in New Issue
Block a user