update buffer and vision image counter

This commit is contained in:
Zhang Minghan 2023-11-21 21:58:44 +08:00
parent b7535111e0
commit bb783f04b2
39 changed files with 340 additions and 201 deletions

View File

@ -42,13 +42,13 @@ type ChatProps struct {
TopK *int
Tools *globals.FunctionTools
ToolChoice *interface{}
Buffer utils.Buffer
}
func createChatRequest(props *ChatProps, hook globals.Hook) error {
if oneapi.IsHit(props.Model) {
return oneapi.HandleRequest(&oneapi.AdapterProps{
return oneapi.NewChatInstanceFromConfig().CreateStreamChatRequest(&oneapi.ChatProps{
Model: props.Model,
Plan: props.Plan,
Message: props.Message,
Token: utils.Multi(
props.Token == 0,
@ -61,6 +61,7 @@ func createChatRequest(props *ChatProps, hook globals.Hook) error {
TopP: props.TopP,
Tools: props.Tools,
ToolChoice: props.ToolChoice,
Buffer: props.Buffer,
}, hook)
} else if globals.IsChatGPTModel(props.Model) {
@ -82,6 +83,7 @@ func createChatRequest(props *ChatProps, hook globals.Hook) error {
TopP: props.TopP,
Tools: props.Tools,
ToolChoice: props.ToolChoice,
Buffer: props.Buffer,
}, hook)
} else if globals.IsClaudeModel(props.Model) {
@ -102,6 +104,7 @@ func createChatRequest(props *ChatProps, hook globals.Hook) error {
Temperature: props.Temperature,
TopK: props.TopK,
Tools: props.Tools,
Buffer: props.Buffer,
}, hook)
} else if globals.IsPalm2Model(props.Model) {

View File

@ -17,6 +17,7 @@ type ChatProps struct {
TopP *float32
Tools *globals.FunctionTools
ToolChoice *interface{}
Buffer utils.Buffer
}
func (c *ChatInstance) GetChatEndpoint(props *ChatProps) string {
@ -113,7 +114,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
c.GetHeader(),
c.GetChatBody(props, true),
func(data string) error {
data, err := c.ProcessLine(instruct, buf, data)
data, err := c.ProcessLine(props.Buffer, instruct, buf, data)
chunk += data
if err != nil {

View File

@ -106,7 +106,15 @@ func getCompletionChoices(form *CompletionResponse) string {
return form.Data.Choices[0].Text
}
func (c *ChatInstance) ProcessLine(instruct bool, buf, data string) (string, error) {
func getToolCalls(form *ChatStreamResponse) *globals.ToolCalls {
if len(form.Data.Choices) == 0 {
return nil
}
return form.Data.Choices[0].Delta.ToolCalls
}
func (c *ChatInstance) ProcessLine(obj utils.Buffer, instruct bool, buf, data string) (string, error) {
item := processFormat(buf + data)
if isDone(item) {
return "", nil
@ -122,7 +130,7 @@ func (c *ChatInstance) ProcessLine(instruct bool, buf, data string) (string, err
// recursive call
if len(buf) > 0 {
return c.ProcessLine(instruct, "", buf+item)
return c.ProcessLine(obj, instruct, "", buf+item)
}
if err := processChatErrorResponse(item); err == nil || err.Data.Error.Message == "" {
@ -133,6 +141,7 @@ func (c *ChatInstance) ProcessLine(instruct bool, buf, data string) (string, err
}
} else {
obj.SetToolCalls(getToolCalls(form))
return getChoices(form), nil
}
}

View File

@ -52,7 +52,8 @@ func NewChatInstanceFromModel(props *InstanceProps) *ChatInstance {
globals.GPT432k, globals.GPT432k0613, globals.GPT432k0314:
return NewChatInstanceFromConfig("gpt4")
case globals.GPT3Turbo1106, globals.GPT41106Preview, globals.GPT4Vision, globals.GPT4Dalle, globals.Dalle3, globals.GPT4All:
case globals.GPT3Turbo1106, globals.GPT41106Preview, globals.GPT41106VisionPreview,
globals.GPT4Vision, globals.GPT4Dalle, globals.Dalle3, globals.GPT4All:
return NewChatInstanceFromConfig("reverse")
case globals.GPT3Turbo, globals.GPT3TurboInstruct, globals.GPT3Turbo0613, globals.GPT3Turbo0301,

View File

@ -17,6 +17,7 @@ type ChatProps struct {
TopP *float32 `json:"top_p"`
Tools *globals.FunctionTools `json:"tools"`
ToolChoice *interface{} `json:"tool_choice"` // string or object
Buffer utils.Buffer
}
func (c *ChatInstance) GetChatEndpoint() string {
@ -69,7 +70,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
c.GetHeader(),
c.GetChatBody(props, true),
func(data string) error {
data, err := c.ProcessLine(buf, data)
data, err := c.ProcessLine(props.Buffer, buf, data)
if err != nil {
if strings.HasPrefix(err.Error(), "oneapi error") {

View File

@ -12,18 +12,6 @@ var HitModels = []string{
globals.CodeLLaMa34B, globals.CodeLLaMa13B, globals.CodeLLaMa7B,
}
func (c *ChatInstance) Process(data string) string {
return data
}
func (c *ChatInstance) FormatMessage(message []globals.Message) []globals.Message {
return message
}
func (c *ChatInstance) FormatModel(model string) string {
return model
}
func (c *ChatInstance) GetToken(model string) int {
switch model {
case globals.Claude1, globals.Claude2:

View File

@ -1,36 +0,0 @@
package oneapi
import (
"chat/globals"
)
type AdapterProps struct {
Model string
Plan bool
Infinity bool
Message []globals.Message
Token *int
PresencePenalty *float32
FrequencyPenalty *float32
Temperature *float32
TopP *float32
Tools *globals.FunctionTools
ToolChoice *interface{}
}
func HandleRequest(props *AdapterProps, hook globals.Hook) error {
instance := NewChatInstanceFromConfig()
return instance.CreateStreamChatRequest(&ChatProps{
Model: instance.FormatModel(props.Model),
Message: instance.FormatMessage(props.Message),
Token: props.Token,
PresencePenalty: props.PresencePenalty,
FrequencyPenalty: props.FrequencyPenalty,
Temperature: props.Temperature,
TopP: props.TopP,
Tools: props.Tools,
ToolChoice: props.ToolChoice,
}, func(data string) error {
return hook(instance.Process(data))
})
}

View File

@ -72,7 +72,15 @@ func getChoices(form *ChatStreamResponse) string {
return form.Data.Choices[0].Delta.Content
}
func (c *ChatInstance) ProcessLine(buf, data string) (string, error) {
func getToolCalls(form *ChatStreamResponse) *globals.ToolCalls {
if len(form.Data.Choices) == 0 {
return nil
}
return form.Data.Choices[0].Delta.ToolCalls
}
func (c *ChatInstance) ProcessLine(obj utils.Buffer, buf, data string) (string, error) {
item := processFormat(buf + data)
if isDone(item) {
return "", nil
@ -81,7 +89,7 @@ func (c *ChatInstance) ProcessLine(buf, data string) (string, error) {
if form := processChatResponse(item); form == nil {
// recursive call
if len(buf) > 0 {
return c.ProcessLine("", buf+item)
return c.ProcessLine(obj, "", buf+item)
}
if err := processChatErrorResponse(item); err == nil {
@ -92,6 +100,7 @@ func (c *ChatInstance) ProcessLine(buf, data string) (string, error) {
}
} else {
obj.SetToolCalls(getToolCalls(form))
return getChoices(form), nil
}
}

View File

@ -13,6 +13,7 @@ type ChatProps struct {
Temperature *float32
TopK *int
Tools *globals.FunctionTools
Buffer utils.Buffer
}
func GetToken(props *ChatProps) *int {
@ -65,6 +66,21 @@ func (c *ChatInstance) GetFunctionCalling(props *ChatProps) *FunctionsPayload {
}
}
func getChoice(form *ChatResponse, buffer utils.Buffer) string {
resp := form.Payload.Choices.Text
if len(resp) == 0 {
return ""
}
buffer.SetToolCalls(&globals.ToolCalls{
globals.ToolCall{
Type: "text",
Id: globals.ToolCallId(form.Header.Sid),
},
})
return resp[0].Content
}
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error {
var conn *utils.WebSocket
if conn = utils.NewWebsocketClient(c.GenerateUrl()); conn == nil {
@ -102,7 +118,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Ho
return fmt.Errorf("sparkdesk error: %s (sid: %s)", form.Header.Message, form.Header.Sid)
}
if err := hook(form.Payload.Choices.Text[0].Content); err != nil {
if err := hook(getChoice(form, props.Buffer)); err != nil {
return err
}
}

View File

@ -35,7 +35,6 @@ type FunctionCall struct {
type MessagePayload struct {
Text []Message `json:"text"`
Functions FunctionsPayload `json:"functions"`
}
type RequestParameter struct {

View File

@ -21,6 +21,7 @@ func CreateGeneration(model string, prompt string, path string, plan bool, hook
Message: message,
Plan: plan,
Infinity: true,
Buffer: *buffer,
}, func(data string) error {
buffer.Write(data)
hook(buffer, data)

2
app/.env.deeptrain Normal file
View File

@ -0,0 +1,2 @@
VITE_USE_DEEPTRAIN=true
VITE_BACKEND_ENDPOINT=https://api.chatnio.net

View File

@ -9,7 +9,7 @@
<meta name="description" content="👋 Chat Nio, lightweight Web ChatGPT chat site 👋 Chat Nio, 一个轻量级的联网版 AI 聊天网站">
<meta name="author" content="Deeptrain Team">
<meta name="theme-color" content="#000000">
<meta itemprop="image" content="https://chatnio.net/favicon.ico">
<meta itemprop="image" content="/favicon.ico">
<meta name="baidu-site-verification" content="codeva-TJkbi40ZBi" />
<link href="https://open.lightxi.com/fonts/Andika" rel="stylesheet">
<link href="https://open.lightxi.com/fonts/Jetbrains-Mono" rel="stylesheet">

View File

@ -8,6 +8,7 @@ export type StreamMessage = {
quota?: number;
message: string;
end: boolean;
plan?: boolean;
};
export type ChatProps = {

View File

@ -188,11 +188,13 @@ export class Conversation {
keyword?: string,
quota?: number,
end?: boolean,
plan?: boolean,
) {
this.data[idx].content += message;
if (keyword) this.data[idx].keyword = keyword;
if (quota) this.data[idx].quota = quota;
this.data[idx].end = end;
this.data[idx].plan = plan;
this.triggerCallback();
}
@ -209,6 +211,7 @@ export class Conversation {
message.keyword,
message.quota,
message.end,
message.plan,
);
if (message.end) {
this.end = true;

View File

@ -1,5 +1,5 @@
import axios from "axios";
import { blob_api } from "@/conf.ts";
import { blobEndpoint } from "@/utils/env.ts";
export type BlobParserResponse = {
status: boolean;
@ -18,7 +18,7 @@ export type FileArray = FileObject[];
export async function blobParser(file: File): Promise<BlobParserResponse> {
try {
const resp = await axios.post(
`${blob_api}/upload`,
`${blobEndpoint}/upload`,
{ file },
{
headers: { "Content-Type": "multipart/form-data" },

View File

@ -6,6 +6,7 @@ export type Message = {
keyword?: string;
quota?: number;
end?: boolean;
plan?: boolean;
};
export type Model = {

View File

@ -54,7 +54,7 @@
--model-card-background: rgba(222,214,200,.2);
--gold: 45, 100%, 50%;
--gold: 45 100% 50%;
}
.dark {

View File

@ -111,6 +111,7 @@
.quota {
font-size: 14px;
color: hsl(var(--text-secondary));
transition: .25s;
}
.icon {

View File

@ -47,7 +47,7 @@ function MessageSegment(props: MessageProps) {
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<div className={`message-quota`}>
<div className={`message-quota ${message.plan ? "subscription" : ""}`}>
<Cloud className={`h-4 w-4 icon`} />
<span className={`quota`}>
{(message.quota < 0 ? 0 : message.quota).toFixed(2)}

View File

@ -10,26 +10,31 @@ import { useState } from "react";
import { useSelector } from "react-redux";
import { selectInit } from "@/store/auth.ts";
import { useEffectAsync } from "@/utils/hook.ts";
import {BroadcastInfo, createBroadcast, getBroadcastList} from "@/api/broadcast.ts";
import {
BroadcastInfo,
createBroadcast,
getBroadcastList,
} from "@/api/broadcast.ts";
import { useTranslation } from "react-i18next";
import { extractMessage } from "@/utils/processor.ts";
import { Button } from "@/components/ui/button.tsx";
import {Plus, RotateCcw} from "lucide-react";
import {useToast} from "@/components/ui/use-toast.ts";
import { Plus, RotateCcw } from "lucide-react";
import { useToast } from "@/components/ui/use-toast.ts";
import {
Dialog,
DialogContent,
DialogDescription, DialogFooter,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
DialogTrigger
DialogTrigger,
} from "@/components/ui/dialog.tsx";
import {Textarea} from "@/components/ui/textarea.tsx";
import {DialogClose} from "@radix-ui/react-dialog";
import { Textarea } from "@/components/ui/textarea.tsx";
import { DialogClose } from "@radix-ui/react-dialog";
type CreateBroadcastDialogProps = {
onCreated?: () => void;
}
};
function CreateBroadcastDialog(props: CreateBroadcastDialogProps) {
const { t } = useTranslation();
@ -43,21 +48,20 @@ function CreateBroadcastDialog(props: CreateBroadcastDialogProps) {
const resp = await createBroadcast(broadcast);
if (resp.status) {
toast({
title: t('admin.post-success'),
description: t('admin.post-success-prompt'),
title: t("admin.post-success"),
description: t("admin.post-success-prompt"),
});
setContent("");
setOpen(false);
props.onCreated?.();
} else {
toast({
title: t('admin.post-failed'),
description: t('admin.post-failed-prompt', {reason: resp.error}),
title: t("admin.post-failed"),
description: t("admin.post-failed-prompt", { reason: resp.error }),
});
}
}
return (
<Dialog open={open} onOpenChange={setOpen}>
<DialogTrigger asChild>
@ -90,7 +94,7 @@ function CreateBroadcastDialog(props: CreateBroadcastDialogProps) {
</DialogFooter>
</DialogContent>
</Dialog>
)
);
}
function BroadcastTable() {
const { t } = useTranslation();
@ -114,9 +118,9 @@ function BroadcastTable() {
<RotateCcw className={`w-4 h-4`} />
</Button>
<div className={`grow`} />
<CreateBroadcastDialog onCreated={async () => (
setData(await getBroadcastList())
)} />
<CreateBroadcastDialog
onCreated={async () => setData(await getBroadcastList())}
/>
</div>
<Table>
<TableHeader>

View File

@ -27,6 +27,7 @@ import { openDialog as openInvitationDialog } from "@/store/invitation.ts";
import { openDialog as openSharingDialog } from "@/store/sharing.ts";
import { openDialog as openApiDialog } from "@/store/api.ts";
import router from "@/router.tsx";
import { useDeeptrain } from "@/utils/env.ts";
type MenuBarProps = {
children: React.ReactNode;
@ -58,10 +59,12 @@ function MenuBar({ children, className }: MenuBarProps) {
<CalendarPlus className={`h-4 w-4 mr-1`} />
{t("sub.title")}
</DropdownMenuItem>
{useDeeptrain && (
<DropdownMenuItem onClick={() => dispatch(openPackageDialog())}>
<Boxes className={`h-4 w-4 mr-1`} />
{t("pkg.title")}
</DropdownMenuItem>
)}
<DropdownMenuItem onClick={() => dispatch(openInvitationDialog())}>
<Gift className={`h-4 w-4 mr-1`} />
{t("invitation.title")}

View File

@ -20,6 +20,7 @@ import {
} from "@/components/ui/dialog.tsx";
import { getLanguage } from "@/i18n.ts";
import { selectAuthenticated } from "@/store/auth.ts";
import { docsEndpoint, useDeeptrain } from "@/utils/env.ts";
function ChatSpace() {
const [open, setOpen] = useState(false);
@ -31,11 +32,13 @@ function ChatSpace() {
return (
<div className={`chat-product`}>
{useDeeptrain && (
<Button variant={`outline`} onClick={() => setOpen(true)}>
<Users2 className={`h-4 w-4 mr-1.5`} />
{t("contact.title")}
<ChevronRight className={`h-4 w-4 ml-2`} />
</Button>
)}
{subscription && (
<Button variant={`outline`} onClick={() => router.navigate("/article")}>
<Newspaper className={`h-4 w-4 mr-1.5`} />
@ -58,9 +61,7 @@ function ChatSpace() {
<Button
className={`mx-auto`}
variant={`outline`}
onClick={() =>
window.open("https://docs.chatnio.net", "_blank")
}
onClick={() => window.open(docsEndpoint, "_blank")}
>
<BookMarked className={`h-4 w-4 mr-1.5`} />
{t("docs.title")}

View File

@ -13,7 +13,6 @@ import React, { useMemo, useState } from "react";
import {
login,
modelAvatars,
modelPricingLink,
planModels,
studentModels,
supportModels,
@ -35,6 +34,7 @@ import { teenagerSelector } from "@/store/package.ts";
import { ToastAction } from "@/components/ui/toast.tsx";
import { selectAuthenticated } from "@/store/auth.ts";
import { useToast } from "@/components/ui/use-toast.ts";
import { docsEndpoint } from "@/utils/env.ts";
type SearchBarProps = {
value: string;
@ -231,7 +231,7 @@ function MarketFooter() {
return (
<div className={`market-footer`}>
<a href={modelPricingLink} target={`_blank`}>
<a href={docsEndpoint} target={`_blank`}>
<Link size={14} className={`mr-1`} />
{t("pricing")}
</a>

View File

@ -8,14 +8,12 @@ import {
} from "@/utils/env.ts";
import { getMemory } from "@/utils/memory.ts";
export const version = "3.6.32";
export const version = "3.6.33";
export const dev: boolean = getDev();
export const deploy: boolean = true;
export let rest_api: string = getRestApi(deploy);
export let ws_api: string = getWebsocketApi(deploy);
export let blob_api: string = "https://blob.chatnio.net";
export const tokenField = getTokenField(deploy);
export const supportModels: Model[] = [
// openai models
{
@ -388,11 +386,9 @@ export const modelAvatars: Record<string, string> = {
hunyuan: "hunyuan.png",
"360-gpt-v9": "360gpt.png",
"baichuan-53b": "baichuan.png",
"skylark-chat": "skylark.jpg"
"skylark-chat": "skylark.jpg",
};
export const modelPricingLink = "https://docs.chatnio.net/ai-mo-xing-ji-ji-fei";
export function login() {
location.href = `https://deeptrain.net/login?app=${dev ? "dev" : "chatnio"}`;
}

View File

@ -23,6 +23,7 @@ import { useToast } from "@/components/ui/use-toast.ts";
import { copyClipboard } from "@/utils/dom.ts";
import { useEffectAsync } from "@/utils/hook.ts";
import { selectInit } from "@/store/auth.ts";
import { docsEndpoint } from "@/utils/env.ts";
function ApikeyDialog() {
const { t } = useTranslation();
@ -58,7 +59,7 @@ function ApikeyDialog() {
</Button>
</div>
<Button variant={`outline`} asChild>
<a href={`https://docs.chatnio.net`} target={`_blank`}>
<a href={docsEndpoint} target={`_blank`}>
<ExternalLink className={`h-4 w-4 mr-2`} />
{t("buy.learn-more")}
</a>

View File

@ -43,6 +43,7 @@ import { useToast } from "@/components/ui/use-toast.ts";
import { useEffectAsync } from "@/utils/hook.ts";
import { selectAuthenticated } from "@/store/auth.ts";
import { ToastAction } from "@/components/ui/toast.tsx";
import { docsEndpoint } from "@/utils/env.ts";
type AmountComponentProps = {
amount: number;
@ -314,7 +315,7 @@ function QuotaDialog() {
</div>
<div className={`tip`}>
<Button variant={`outline`} asChild>
<a href={`https://docs.chatnio.net`} target={`_blank`}>
<a href={docsEndpoint} target={`_blank`}>
<ExternalLink className={`h-4 w-4 mr-2`} />
{t("buy.learn-more")}
</a>

View File

@ -1,3 +1,10 @@
export const useDeeptrain = !!import.meta.env.VITE_USE_DEEPTRAIN;
export const backendEndpoint = import.meta.env.VITE_BACKEND_ENDPOINT || "/api";
export const blobEndpoint =
import.meta.env.VITE_BLOB_ENDPOINT || "https://blob.chatnio.net";
export const docsEndpoint =
import.meta.env.VITE_DOCS_ENDPOINT || "https://docs.chatnio.net";
export function getDev(): boolean {
/**
* return if the current environment is development
@ -9,14 +16,24 @@ export function getRestApi(deploy: boolean): string {
/**
* return the REST API address
*/
return !deploy ? "http://localhost:8094" : "https://api.chatnio.net";
return !deploy ? "http://localhost:8094" : backendEndpoint;
}
export function getWebsocketApi(deploy: boolean): string {
/**
* return the WebSocket API address
*/
return !deploy ? "ws://localhost:8094" : "wss://api.chatnio.net";
if (!deploy) return "ws://localhost:8094";
if (backendEndpoint.startsWith("http://"))
return `ws://${backendEndpoint.slice(7)}`;
if (backendEndpoint.startsWith("https://"))
return `wss://${backendEndpoint.slice(8)}`;
if (backendEndpoint.startsWith("/"))
return location.protocol === "https:"
? `wss://${location.host}${backendEndpoint}`
: `ws://${location.host}${backendEndpoint}`;
return backendEndpoint;
}
export function getTokenField(deploy: boolean): string {

View File

@ -64,5 +64,15 @@ export default defineConfig({
chunkFileNames: `assets/[name].[hash].js`,
},
},
},
server: {
proxy: {
"/api": {
target: "http://localhost:8094",
changeOrigin: true,
rewrite: (path) => path.replace(/^\/api/, ""),
ws: true,
}
}
}
});

View File

@ -11,7 +11,7 @@ func CanEnableModel(db *sql.DB, user *User, model string) bool {
switch model {
case globals.GPT3Turbo, globals.GPT3TurboInstruct, globals.GPT3Turbo0301, globals.GPT3Turbo0613:
return true
case globals.GPT4, globals.GPT40613, globals.GPT40314, globals.GPT41106Preview,
case globals.GPT4, globals.GPT40613, globals.GPT40314, globals.GPT41106Preview, globals.GPT41106VisionPreview,
globals.GPT4Dalle, globals.GPT4Vision, globals.Dalle3:
return user != nil && user.GetQuota(db) >= 5
case globals.GPT432k, globals.GPT432k0613, globals.GPT432k0314:

View File

@ -13,6 +13,7 @@ type ChatSegmentResponse struct {
Keyword string `json:"keyword"`
Message string `json:"message"`
End bool `json:"end"`
Plan bool `json:"plan"`
}
type GenerationSegmentResponse struct {

View File

@ -53,6 +53,7 @@ const (
GPT40314 = "gpt-4-0314"
GPT40613 = "gpt-4-0613"
GPT41106Preview = "gpt-4-1106-preview"
GPT41106VisionPreview = "gpt-4-vision-preview"
GPT432k = "gpt-4-32k"
GPT432k0314 = "gpt-4-32k-0314"
GPT432k0613 = "gpt-4-32k-0613"
@ -112,7 +113,7 @@ var GPT3Turbo16kArray = []string{
}
var GPT4Array = []string{
GPT4, GPT40314, GPT40613, GPT41106Preview,
GPT4, GPT40314, GPT40613, GPT41106Preview, GPT41106VisionPreview,
GPT4Vision, GPT4Dalle, GPT4All,
}
@ -173,7 +174,7 @@ var SkylarkModelArray = []string{
var LongContextModelArray = []string{
GPT3Turbo16k, GPT3Turbo16k0613, GPT3Turbo16k0301,
GPT41106Preview, GPT432k, GPT432k0314, GPT432k0613,
GPT41106Preview, GPT41106VisionPreview, GPT432k, GPT432k0314, GPT432k0613,
Claude1, Claude1100k,
CodeLLaMa34B, LLaMa270B,
Claude2, Claude2100k,
@ -200,7 +201,7 @@ var FreeModelArray = []string{
var AllModels = []string{
GPT3Turbo, GPT3TurboInstruct, GPT3Turbo0613, GPT3Turbo0301, GPT3Turbo1106,
GPT3Turbo16k, GPT3Turbo16k0613, GPT3Turbo16k0301,
GPT4, GPT40314, GPT40613, GPT4Vision, GPT4All, GPT41106Preview, GPT4Dalle,
GPT4, GPT40314, GPT40613, GPT4Vision, GPT4All, GPT41106Preview, GPT4Dalle, GPT41106VisionPreview,
GPT432k, GPT432k0314, GPT432k0613,
Dalle2, Dalle3,
Claude1, Claude1100k, Claude2, Claude2100k, ClaudeSlack,

View File

@ -16,10 +16,6 @@ import (
const defaultMessage = "Sorry, I don't understand. Please try again."
const defaultQuotaMessage = "You don't have enough quota to use this model. please [buy](/buy) or [subscribe](/subscribe) to get more. (or try to refresh the page)"
func GetErrorQuota(model string) float32 {
return utils.Multi[float32](globals.IsGPT4Model(model), -0xe, 0) // special value for error
}
func CollectQuota(c *gin.Context, user *auth.User, buffer *utils.Buffer, uncountable bool) {
db := utils.GetDBFromContext(c)
quota := buffer.GetQuota()
@ -91,6 +87,7 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
Model: model,
Message: segment,
Plan: plan,
Buffer: *buffer,
}, func(data string) error {
if signal := conn.PeekWithType(StopType); signal != nil {
// stop signal from client
@ -100,6 +97,7 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
Message: buffer.Write(data),
Quota: buffer.GetQuota(),
End: false,
Plan: plan,
})
})
@ -111,7 +109,6 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
CollectQuota(conn.GetCtx(), user, buffer, plan)
conn.Send(globals.ChatSegmentResponse{
Message: err.Error(),
Quota: GetErrorQuota(model),
End: true,
})
return err.Error()
@ -122,13 +119,16 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
if buffer.IsEmpty() {
conn.Send(globals.ChatSegmentResponse{
Message: defaultMessage,
Quota: GetErrorQuota(model),
End: true,
})
return defaultMessage
}
conn.Send(globals.ChatSegmentResponse{End: true, Quota: buffer.GetQuota()})
conn.Send(globals.ChatSegmentResponse{
End: true,
Quota: buffer.GetQuota(),
Plan: plan,
})
result := buffer.ReadWithDefault(defaultMessage)

View File

@ -43,6 +43,7 @@ func NativeChatHandler(c *gin.Context, user *auth.User, model string, message []
Model: model,
Plan: plan,
Message: segment,
Buffer: *buffer,
}, func(resp string) error {
buffer.Write(resp)
return nil
@ -52,7 +53,7 @@ func NativeChatHandler(c *gin.Context, user *auth.User, model string, message []
if err != nil {
auth.RevertSubscriptionUsage(cache, user, model, plan)
CollectQuota(c, user, buffer, plan)
return err.Error(), GetErrorQuota(model)
return err.Error(), 0
}
CollectQuota(c, user, buffer, plan)

View File

@ -133,7 +133,7 @@ func TranshipmentAPI(c *gin.Context) {
}
}
func GetProps(form TranshipmentForm, plan bool) *adapter.ChatProps {
func GetProps(form TranshipmentForm, buffer *utils.Buffer, plan bool) *adapter.ChatProps {
return &adapter.ChatProps{
Model: form.Model,
Message: form.Messages,
@ -147,12 +147,13 @@ func GetProps(form TranshipmentForm, plan bool) *adapter.ChatProps {
TopK: form.TopK,
Tools: form.Tools,
ToolChoice: form.ToolChoice,
Buffer: *buffer,
}
}
func sendTranshipmentResponse(c *gin.Context, form TranshipmentForm, id string, created int64, user *auth.User, plan bool) {
buffer := utils.NewBuffer(form.Model, form.Messages)
err := adapter.NewChatRequest(GetProps(form, plan), func(data string) error {
err := adapter.NewChatRequest(GetProps(form, buffer, plan), func(data string) error {
buffer.Write(data)
return nil
})
@ -215,7 +216,7 @@ func sendStreamTranshipmentResponse(c *gin.Context, form TranshipmentForm, id st
go func() {
buffer := utils.NewBuffer(form.Model, form.Messages)
err := adapter.NewChatRequest(GetProps(form, plan), func(data string) error {
err := adapter.NewChatRequest(GetProps(form, buffer, plan), func(data string) error {
channel <- getStreamTranshipmentForm(id, created, form, buffer.Write(data), buffer, false)
return nil
})

View File

@ -165,3 +165,17 @@ func GetPtrVal[T any](ptr *T, def T) T {
}
return *ptr
}
func LimitMax[T int | int64 | float32 | float64](value T, max T) T {
if value > max {
return max
}
return value
}
func LimitMin[T int | int64 | float32 | float64](value T, min T) T {
if value < min {
return min
}
return value
}

View File

@ -9,16 +9,16 @@ type Buffer struct {
Model string `json:"model"`
Quota float32 `json:"quota"`
Data string `json:"data"`
Latest string `json:"latest"`
Cursor int `json:"cursor"`
Times int `json:"times"`
History []globals.Message `json:"history"`
Images Images `json:"images"`
ToolCalls *globals.ToolCalls `json:"tool_calls"`
}
func NewBuffer(model string, history []globals.Message) *Buffer {
return &Buffer{
Data: "",
Cursor: 0,
Times: 0,
Model: model,
Quota: CountInputToken(model, history),
History: history,
@ -37,24 +37,49 @@ func (b *Buffer) Write(data string) string {
b.Data += data
b.Cursor += len(data)
b.Times++
b.Latest = data
return data
}
func (b *Buffer) GetChunk() string {
return b.Latest
}
func (b *Buffer) SetImages(images Images) {
b.Images = images
b.Quota += Sum(Each(images, func(image Image) float32 {
return float32(image.CountTokens(b.Model)) * 0.7
}))
}
func (b *Buffer) GetImages() Images {
return b.Images
}
func (b *Buffer) SetToolCalls(toolCalls *globals.ToolCalls) {
if toolCalls == nil {
return
}
b.ToolCalls = toolCalls
}
func (b *Buffer) GetToolCalls() *globals.ToolCalls {
return b.ToolCalls
}
func (b *Buffer) IsFunctionCalling() bool {
return b.GetToolCalls() != nil
}
func (b *Buffer) WriteBytes(data []byte) []byte {
b.Data += string(data)
b.Cursor += len(data)
b.Times++
b.Write(string(data))
return data
}
func (b *Buffer) IsEmpty() bool {
return b.Cursor == 0
}
func (b *Buffer) Reset() {
b.Data = ""
b.Cursor = 0
b.Times = 0
return b.Cursor == 0 && !b.IsFunctionCalling()
}
func (b *Buffer) Read() string {
@ -66,7 +91,7 @@ func (b *Buffer) ReadBytes() []byte {
}
func (b *Buffer) ReadWithDefault(_default string) string {
if b.IsEmpty() || len(strings.TrimSpace(b.Data)) == 0 {
if b.IsEmpty() || (len(strings.TrimSpace(b.Data)) == 0 && !b.IsFunctionCalling()) {
return _default
}
return b.Data

63
utils/image.go Normal file
View File

@ -0,0 +1,63 @@
package utils
import (
"chat/globals"
"image"
"math"
"net/http"
)
type Image struct {
Object image.Image
}
type Images []Image
func NewImage(url string) (*Image, error) {
res, err := http.Get(url)
if err != nil {
return nil, err
}
defer res.Body.Close()
img, _, err := image.Decode(res.Body)
if err != nil {
return nil, err
}
return &Image{Object: img}, nil
}
func (i *Image) GetWidth() int {
return i.Object.Bounds().Max.X
}
func (i *Image) GetHeight() int {
return i.Object.Bounds().Max.Y
}
func (i *Image) GetPixel(x int, y int) (uint32, uint32, uint32, uint32) {
return i.Object.At(x, y).RGBA()
}
func (i *Image) GetPixelColor(x int, y int) (int, int, int) {
r, g, b, _ := i.GetPixel(x, y)
return int(r), int(g), int(b)
}
func (i *Image) CountTokens(model string) int {
switch model {
case globals.GPT41106VisionPreview:
// tile size is 512x512
// the max size of image is 2048x2048
// the image that is larger than 2048x2048 will be resized in 16 tiles
x := LimitMax(math.Ceil(float64(i.GetWidth())/512), 4)
y := LimitMax(math.Ceil(float64(i.GetHeight())/512), 4)
tiles := int(x) * int(y)
return 85 + 170*tiles
default:
return 0
}
}

View File

@ -18,7 +18,7 @@ func GetWeightByModel(model string) int {
return 2
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo1106,
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613,
globals.GPT4, globals.GPT4Vision, globals.GPT4Dalle, globals.GPT4All, globals.GPT40314, globals.GPT40613, globals.GPT41106Preview,
globals.GPT4, globals.GPT4Vision, globals.GPT4Dalle, globals.GPT4All, globals.GPT40314, globals.GPT40613, globals.GPT41106Preview, globals.GPT41106VisionPreview,
globals.GPT432k, globals.GPT432k0613, globals.GPT432k0314,
globals.LLaMa27B, globals.LLaMa213B, globals.LLaMa270B,
globals.CodeLLaMa34B, globals.CodeLLaMa13B, globals.CodeLLaMa7B,
@ -81,7 +81,7 @@ func CountInputToken(model string, v []globals.Message) float32 {
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo0301, globals.GPT3TurboInstruct, globals.GPT3Turbo1106,
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613, globals.GPT3Turbo16k0301:
return 0
case globals.GPT41106Preview:
case globals.GPT41106Preview, globals.GPT41106VisionPreview:
return float32(CountTokenPrice(v, model)) / 1000 * 0.7 * 0.6
case globals.GPT4, globals.GPT4Vision, globals.GPT4All, globals.GPT4Dalle, globals.GPT40314, globals.GPT40613:
return float32(CountTokenPrice(v, model)) / 1000 * 2.1 * 0.6
@ -129,7 +129,7 @@ func CountOutputToken(model string, t int) float32 {
case globals.GPT3Turbo, globals.GPT3Turbo0613, globals.GPT3Turbo0301, globals.GPT3TurboInstruct, globals.GPT3Turbo1106,
globals.GPT3Turbo16k, globals.GPT3Turbo16k0613, globals.GPT3Turbo16k0301:
return 0
case globals.GPT41106Preview:
case globals.GPT41106Preview, globals.GPT41106VisionPreview:
return float32(t*GetWeightByModel(model)) / 1000 * 2.1 * 0.6
case globals.GPT4, globals.GPT4Vision, globals.GPT4All, globals.GPT4Dalle, globals.GPT40314, globals.GPT40613:
return float32(t*GetWeightByModel(model)) / 1000 * 4.3 * 0.6