feat: support function calling and tools calling in relay chat_completions! (#20) (#30)

This commit is contained in:
Zhang Minghan 2024-02-18 12:08:25 +08:00
parent ccd9f13eaa
commit efc76c3642
34 changed files with 247 additions and 232 deletions

View File

@ -99,33 +99,40 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
if url, err := c.CreateImage(props); err != nil { if url, err := c.CreateImage(props); err != nil {
return err return err
} else { } else {
return callback(url) return callback(&globals.Chunk{Content: url})
} }
} }
isCompletionType := props.Model == globals.GPT3TurboInstruct isCompletionType := props.Model == globals.GPT3TurboInstruct
ticks := 0
err := utils.EventScanner(&utils.EventScannerProps{ err := utils.EventScanner(&utils.EventScannerProps{
Method: "POST", Method: "POST",
Uri: c.GetChatEndpoint(props), Uri: c.GetChatEndpoint(props),
Headers: c.GetHeader(), Headers: c.GetHeader(),
Body: c.GetChatBody(props, true), Body: c.GetChatBody(props, true),
Callback: func(data string) error { Callback: func(data string) error {
partial, err := c.ProcessLine(props.Buffer, data, isCompletionType) ticks += 1
partial, err := c.ProcessLine(data, isCompletionType)
if err != nil { if err != nil {
return err return err
} }
return callback(partial) return callback(partial)
}, },
}) })
if err != nil { if err != nil {
if form := processChatErrorResponse(err.Body); form != nil { if form := processChatErrorResponse(err.Body); form != nil {
return errors.New(fmt.Sprintf("%s (type: %s)", form.Error.Message, form.Error.Type)) msg := fmt.Sprintf("%s (type: %s)", form.Error.Message, form.Error.Type)
return errors.New(msg)
} }
return err.Error return err.Error
} }
if ticks == 0 {
return errors.New("no response")
}
return nil return nil
} }

View File

@ -74,16 +74,18 @@ func processChatErrorResponse(data string) *ChatStreamErrorResponse {
return utils.UnmarshalForm[ChatStreamErrorResponse](data) return utils.UnmarshalForm[ChatStreamErrorResponse](data)
} }
func getChoices(buffer utils.Buffer, form *ChatStreamResponse) string { func getChoices(form *ChatStreamResponse) *globals.Chunk {
if len(form.Choices) == 0 { if len(form.Choices) == 0 {
return "" return &globals.Chunk{Content: ""}
} }
choice := form.Choices[0].Delta choice := form.Choices[0].Delta
buffer.AddToolCalls(choice.ToolCalls) return &globals.Chunk{
buffer.SetFunctionCall(choice.FunctionCall) Content: choice.Content,
return choice.Content ToolCall: choice.ToolCalls,
FunctionCall: choice.FunctionCall,
}
} }
func getCompletionChoices(form *CompletionResponse) string { func getCompletionChoices(form *CompletionResponse) string {
@ -109,25 +111,27 @@ func getRobustnessResult(chunk string) string {
} }
} }
func (c *ChatInstance) ProcessLine(obj utils.Buffer, data string, isCompletionType bool) (string, error) { func (c *ChatInstance) ProcessLine(data string, isCompletionType bool) (*globals.Chunk, error) {
if isCompletionType { if isCompletionType {
// legacy support // openai legacy support
if completion := processCompletionResponse(data); completion != nil { if completion := processCompletionResponse(data); completion != nil {
return getCompletionChoices(completion), nil return &globals.Chunk{
Content: getCompletionChoices(completion),
}, nil
} }
globals.Warn(fmt.Sprintf("chatgpt error: cannot parse completion response: %s", data)) globals.Warn(fmt.Sprintf("chatgpt error: cannot parse completion response: %s", data))
return "", errors.New("parser error: cannot parse completion response") return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse completion response")
} }
if form := processChatResponse(data); form != nil { if form := processChatResponse(data); form != nil {
return getChoices(obj, form), nil return getChoices(form), nil
} }
if form := processChatErrorResponse(data); form != nil { if form := processChatErrorResponse(data); form != nil {
return "", errors.New(fmt.Sprintf("chatgpt error: %s (type: %s)", form.Error.Message, form.Error.Type)) return &globals.Chunk{Content: ""}, errors.New(fmt.Sprintf("chatgpt error: %s (type: %s)", form.Error.Message, form.Error.Type))
} }
globals.Warn(fmt.Sprintf("chatgpt error: cannot parse chat completion response: %s", data)) globals.Warn(fmt.Sprintf("chatgpt error: cannot parse chat completion response: %s", data))
return "", errors.New("parser error: cannot parse chat completion response") return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse chat completion response")
} }

View File

@ -3,8 +3,8 @@ package baichuan
import ( import (
"chat/globals" "chat/globals"
"chat/utils" "chat/utils"
"errors"
"fmt" "fmt"
"strings"
) )
type ChatProps struct { type ChatProps struct {
@ -72,44 +72,26 @@ func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
// CreateStreamChatRequest is the stream response body for baichuan // CreateStreamChatRequest is the stream response body for baichuan
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error { func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
buf := "" err := utils.EventScanner(&utils.EventScannerProps{
cursor := 0 Method: "POST",
chunk := "" Uri: c.GetChatEndpoint(),
Headers: c.GetHeader(),
err := utils.EventSource( Body: c.GetChatBody(props, true),
"POST", Callback: func(data string) error {
c.GetChatEndpoint(), partial, err := c.ProcessLine(data)
c.GetHeader(),
c.GetChatBody(props, true),
func(data string) error {
data, err := c.ProcessLine(buf, data)
chunk += data
if err != nil { if err != nil {
if strings.HasPrefix(err.Error(), "baichuan error") {
return err return err
} }
return callback(partial)
// error when break line
buf = buf + data
return nil
}
buf = ""
if data != "" {
cursor += 1
if err := callback(data); err != nil {
return err
}
}
return nil
}, },
) })
if err != nil { if err != nil {
return err if form := processChatErrorResponse(err.Body); form != nil {
} else if len(chunk) == 0 { msg := fmt.Sprintf("%s (type: %s)", form.Error.Message, form.Error.Type)
return fmt.Errorf("empty response") return errors.New(msg)
}
return err.Error
} }
return nil return nil

View File

@ -5,95 +5,37 @@ import (
"chat/utils" "chat/utils"
"errors" "errors"
"fmt" "fmt"
"strings"
) )
func processFormat(data string) string {
rep := strings.NewReplacer(
"data: {",
"\"data\": {",
)
item := rep.Replace(data)
if !strings.HasPrefix(item, "{") {
item = "{" + item
}
if !strings.HasSuffix(item, "}}") {
item = item + "}"
}
return item
}
func processChatResponse(data string) *ChatStreamResponse { func processChatResponse(data string) *ChatStreamResponse {
if strings.HasPrefix(data, "{") { return utils.UnmarshalForm[ChatStreamResponse](data)
var form *ChatStreamResponse
if form = utils.UnmarshalForm[ChatStreamResponse](data); form != nil {
return form
}
if form = utils.UnmarshalForm[ChatStreamResponse](data[:len(data)-1]); form != nil {
return form
}
if form = utils.UnmarshalForm[ChatStreamResponse](data + "}"); form != nil {
return form
}
}
return nil
} }
func processChatErrorResponse(data string) *ChatStreamErrorResponse { func processChatErrorResponse(data string) *ChatStreamErrorResponse {
if strings.HasPrefix(data, "{") { return utils.UnmarshalForm[ChatStreamErrorResponse](data)
var form *ChatStreamErrorResponse
if form = utils.UnmarshalForm[ChatStreamErrorResponse](data); form != nil {
return form
} }
if form = utils.UnmarshalForm[ChatStreamErrorResponse](data + "}"); form != nil {
return form func getChoices(form *ChatStreamResponse) *globals.Chunk {
if len(form.Choices) == 0 {
return &globals.Chunk{Content: ""}
}
choice := form.Choices[0].Delta
return &globals.Chunk{
Content: choice.Content,
} }
} }
return nil func (c *ChatInstance) ProcessLine(data string) (*globals.Chunk, error) {
} if form := processChatResponse(data); form != nil {
func isDone(data string) bool {
return utils.Contains[string](data, []string{
"{data: [DONE]}", "{data: [DONE]}}", "null}}", "{null}",
"{[DONE]}", "{data:}", "{data:}}", "data: [DONE]}}",
})
}
func getChoices(form *ChatStreamResponse) string {
if len(form.Data.Choices) == 0 {
if len(form.Choices) > 0 {
return form.Choices[0].Delta.Content
}
}
return form.Data.Choices[0].Delta.Content
}
func (c *ChatInstance) ProcessLine(buf, data string) (string, error) {
item := processFormat(buf + data)
if isDone(item) {
return "", nil
}
if form := processChatResponse(item); form == nil {
// recursive call
if len(buf) > 0 {
return c.ProcessLine("", buf+item)
}
if err := processChatErrorResponse(item); err == nil || err.Data.Error.Message == "" {
globals.Warn(fmt.Sprintf("baichuan error: cannot parse response: %s", item))
return data, errors.New("parser error: cannot parse response")
} else {
return "", fmt.Errorf("baichuan error: %s (type: %s)", err.Data.Error.Message, err.Data.Error.Type)
}
} else {
return getChoices(form), nil return getChoices(form), nil
} }
if form := processChatErrorResponse(data); form != nil {
return &globals.Chunk{Content: ""}, errors.New(fmt.Sprintf("baichuan error: %s (type: %s)", form.Error.Message, form.Error.Type))
}
globals.Warn(fmt.Sprintf("baichuan error: cannot parse chat completion response: %s", data))
return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse chat completion response")
} }

View File

@ -32,19 +32,6 @@ type ChatResponse struct {
// ChatStreamResponse is the stream response body for baichuan // ChatStreamResponse is the stream response body for baichuan
type ChatStreamResponse struct { type ChatStreamResponse struct {
Data struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Model string `json:"model"`
Choices []struct {
Delta struct {
Content string `json:"content"`
}
Index int `json:"index"`
} `json:"choices"`
} `json:"data"`
ID string `json:"id"` ID string `json:"id"`
Object string `json:"object"` Object string `json:"object"`
Created int64 `json:"created"` Created int64 `json:"created"`
@ -58,10 +45,8 @@ type ChatStreamResponse struct {
} }
type ChatStreamErrorResponse struct { type ChatStreamErrorResponse struct {
Data struct {
Error struct { Error struct {
Message string `json:"message"` Message string `json:"message"`
Type string `json:"type"` Type string `json:"type"`
} `json:"error"` } `json:"error"`
} `json:"data"`
} }

View File

@ -39,7 +39,9 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Ho
return nil return nil
} }
if err := hook(form.Response); err != nil { if err := hook(&globals.Chunk{
Content: form.Response,
}); err != nil {
return err return err
} }
} }

View File

@ -109,7 +109,9 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
if url, err := c.CreateImage(props); err != nil { if url, err := c.CreateImage(props); err != nil {
return err return err
} else { } else {
return callback(url) return callback(&globals.Chunk{
Content: url,
})
} }
} }
@ -124,7 +126,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
Callback: func(data string) error { Callback: func(data string) error {
ticks += 1 ticks += 1
partial, err := c.ProcessLine(props.Buffer, data, isCompletionType) partial, err := c.ProcessLine(data, isCompletionType)
if err != nil { if err != nil {
return err return err
} }

View File

@ -72,16 +72,18 @@ func processChatErrorResponse(data string) *ChatStreamErrorResponse {
return utils.UnmarshalForm[ChatStreamErrorResponse](data) return utils.UnmarshalForm[ChatStreamErrorResponse](data)
} }
func getChoices(buffer utils.Buffer, form *ChatStreamResponse) string { func getChoices(form *ChatStreamResponse) *globals.Chunk {
if len(form.Choices) == 0 { if len(form.Choices) == 0 {
return "" return &globals.Chunk{Content: ""}
} }
choice := form.Choices[0].Delta choice := form.Choices[0].Delta
buffer.AddToolCalls(choice.ToolCalls) return &globals.Chunk{
buffer.SetFunctionCall(choice.FunctionCall) Content: choice.Content,
return choice.Content ToolCall: choice.ToolCalls,
FunctionCall: choice.FunctionCall,
}
} }
func getCompletionChoices(form *CompletionResponse) string { func getCompletionChoices(form *CompletionResponse) string {
@ -107,25 +109,27 @@ func getRobustnessResult(chunk string) string {
} }
} }
func (c *ChatInstance) ProcessLine(obj utils.Buffer, data string, isCompletionType bool) (string, error) { func (c *ChatInstance) ProcessLine(data string, isCompletionType bool) (*globals.Chunk, error) {
if isCompletionType { if isCompletionType {
// legacy support // openai legacy support
if completion := processCompletionResponse(data); completion != nil { if completion := processCompletionResponse(data); completion != nil {
return getCompletionChoices(completion), nil return &globals.Chunk{
Content: getCompletionChoices(completion),
}, nil
} }
globals.Warn(fmt.Sprintf("chatgpt error: cannot parse completion response: %s", data)) globals.Warn(fmt.Sprintf("chatgpt error: cannot parse completion response: %s", data))
return "", errors.New("parser error: cannot parse completion response") return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse completion response")
} }
if form := processChatResponse(data); form != nil { if form := processChatResponse(data); form != nil {
return getChoices(obj, form), nil return getChoices(form), nil
} }
if form := processChatErrorResponse(data); form != nil { if form := processChatErrorResponse(data); form != nil {
return "", errors.New(fmt.Sprintf("chatgpt error: %s (type: %s)", form.Error.Message, form.Error.Type)) return &globals.Chunk{Content: ""}, errors.New(fmt.Sprintf("chatgpt error: %s (type: %s)", form.Error.Message, form.Error.Type))
} }
globals.Warn(fmt.Sprintf("chatgpt error: cannot parse chat completion response: %s", data)) globals.Warn(fmt.Sprintf("chatgpt error: cannot parse chat completion response: %s", data))
return "", errors.New("parser error: cannot parse chat completion response") return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse chat completion response")
} }

View File

@ -127,7 +127,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Ho
if resp, err := c.ProcessLine(buf, data); err == nil && len(resp) > 0 { if resp, err := c.ProcessLine(buf, data); err == nil && len(resp) > 0 {
buf = "" buf = ""
if err := hook(resp); err != nil { if err := hook(&globals.Chunk{Content: resp}); err != nil {
return err return err
} }
} else { } else {

View File

@ -126,7 +126,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
return fmt.Errorf("dashscope error: %s", form.Message) return fmt.Errorf("dashscope error: %s", form.Message)
} }
if err := callback(form.Output.Text); err != nil { if err := callback(&globals.Chunk{Content: form.Output.Text}); err != nil {
return err return err
} }
return nil return nil

View File

@ -50,7 +50,12 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
break break
} }
if err := callback(chunk.Choices[0].Delta.Content); err != nil { if len(chunk.Choices) == 0 {
continue
}
choice := chunk.Choices[0].Delta
if err := callback(&globals.Chunk{Content: choice.Content}); err != nil {
return err return err
} }
} }

View File

@ -79,21 +79,21 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
form, err := c.CreateStreamTask(action, prompt, func(form *StorageForm, progress int) error { form, err := c.CreateStreamTask(action, prompt, func(form *StorageForm, progress int) error {
if progress == 0 { if progress == 0 {
begin = true begin = true
if err := callback("```progress\n"); err != nil { if err := callback(&globals.Chunk{Content: "```progress\n"}); err != nil {
return err return err
} }
} else if progress == 100 && !begin { } else if progress == 100 && !begin {
if err := callback("```progress\n"); err != nil { if err := callback(&globals.Chunk{Content: "```progress\n"}); err != nil {
return err return err
} }
} }
if err := callback(fmt.Sprintf("%d\n", progress)); err != nil { if err := callback(&globals.Chunk{Content: fmt.Sprintf("%d\n", progress)}); err != nil {
return err return err
} }
if progress == 100 { if progress == 100 {
if err := callback("```\n"); err != nil { if err := callback(&globals.Chunk{Content: "```\n"}); err != nil {
return err return err
} }
} }
@ -105,7 +105,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
return fmt.Errorf("error from midjourney: %s", err.Error()) return fmt.Errorf("error from midjourney: %s", err.Error())
} }
if err := callback(utils.GetImageMarkdown(form.Url)); err != nil { if err := callback(&globals.Chunk{Content: utils.GetImageMarkdown(form.Url)}); err != nil {
return err return err
} }
@ -133,5 +133,7 @@ func (c *ChatInstance) CallbackActions(form *StorageForm, callback globals.Hook)
reroll := fmt.Sprintf("[REROLL](%s)", toVirtualMessage(fmt.Sprintf("/REROLL %s", form.Task))) reroll := fmt.Sprintf("[REROLL](%s)", toVirtualMessage(fmt.Sprintf("/REROLL %s", form.Task)))
return callback(fmt.Sprintf("\n\n%s\n\n%s\n\n%s\n", upscale, variation, reroll)) return callback(&globals.Chunk{
Content: fmt.Sprintf("\n\n%s\n\n%s\n\n%s\n", upscale, variation, reroll),
})
} }

View File

@ -129,7 +129,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
} }
for _, item := range utils.SplitItem(response, " ") { for _, item := range utils.SplitItem(response, " ") {
if err := callback(item); err != nil { if err := callback(&globals.Chunk{Content: item}); err != nil {
return err return err
} }
} }

View File

@ -66,14 +66,17 @@ func (c *ChatInstance) CreateRequest(props *ChatProps) *api.ChatReq {
} }
} }
func getChoice(choice *api.ChatResp, buffer utils.Buffer) string { func getChoice(choice *api.ChatResp) *globals.Chunk {
if choice == nil { if choice == nil {
return "" return &globals.Chunk{Content: ""}
} }
calls := choice.Choice.Message.FunctionCall message := choice.Choice.Message
if calls != nil {
buffer.AddToolCalls(&globals.ToolCalls{ calls := message.FunctionCall
return &globals.Chunk{
Content: message.Content,
ToolCall: utils.Multi(calls != nil, &globals.ToolCalls{
globals.ToolCall{ globals.ToolCall{
Type: "function", Type: "function",
Id: globals.ToolCallId(fmt.Sprintf("%s-%s", calls.Name, choice.ReqId)), Id: globals.ToolCallId(fmt.Sprintf("%s-%s", calls.Name, choice.ReqId)),
@ -82,9 +85,8 @@ func getChoice(choice *api.ChatResp, buffer utils.Buffer) string {
Arguments: calls.Arguments, Arguments: calls.Arguments,
}, },
}, },
}) }, nil),
} }
return choice.Choice.Message.Content
} }
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error { func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
@ -99,7 +101,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
return partial.Error return partial.Error
} }
if err := callback(getChoice(partial, props.Buffer)); err != nil { if err := callback(getChoice(partial)); err != nil {
return err return err
} }
} }

View File

@ -77,7 +77,7 @@ func (c *ChatInstance) ProcessPartialResponse(res chan types.PartialResponse, ho
if data.Error != nil { if data.Error != nil {
return data.Error return data.Error
} else if data.Text != "" { } else if data.Text != "" {
if err := hook(data.Text); err != nil { if err := hook(&globals.Chunk{Content: data.Text}); err != nil {
return err return err
} }
} }

View File

@ -67,26 +67,26 @@ func (c *ChatInstance) GetFunctionCalling(props *ChatProps) *FunctionsPayload {
} }
} }
func getChoice(form *ChatResponse, buffer utils.Buffer) string { func getChoice(form *ChatResponse) *globals.Chunk {
resp := form.Payload.Choices.Text if len(form.Payload.Choices.Text) == 0 {
if len(resp) == 0 { return &globals.Chunk{Content: ""}
return ""
} }
if resp[0].FunctionCall != nil { choice := form.Payload.Choices.Text[0]
buffer.AddToolCalls(&globals.ToolCalls{
return &globals.Chunk{
Content: choice.Content,
ToolCall: utils.Multi(choice.FunctionCall != nil, &globals.ToolCalls{
globals.ToolCall{ globals.ToolCall{
Type: "function", Type: "function",
Id: globals.ToolCallId(fmt.Sprintf("%s-%s", resp[0].FunctionCall.Name, resp[0].FunctionCall.Arguments)), Id: globals.ToolCallId(fmt.Sprintf("%s-%s", choice.FunctionCall.Name, choice.FunctionCall.Arguments)),
Function: globals.ToolCallFunction{ Function: globals.ToolCallFunction{
Name: resp[0].FunctionCall.Name, Name: choice.FunctionCall.Name,
Arguments: resp[0].FunctionCall.Arguments, Arguments: choice.FunctionCall.Arguments,
}, },
}, },
}) }, nil),
} }
return resp[0].Content
} }
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error { func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error {
@ -130,7 +130,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Ho
return fmt.Errorf("sparkdesk error: %s (sid: %s)", form.Header.Message, form.Header.Sid) return fmt.Errorf("sparkdesk error: %s (sid: %s)", form.Header.Message, form.Header.Sid)
} }
if err := hook(getChoice(form, props.Buffer)); err != nil { if err := hook(getChoice(form)); err != nil {
return err return err
} }
} }

View File

@ -103,7 +103,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
buf = "" buf = ""
if data != "" { if data != "" {
cursor += 1 cursor += 1
if err := callback(data); err != nil { if err := callback(&globals.Chunk{Content: data}); err != nil {
return err return err
} }
} }

View File

@ -73,7 +73,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Ho
} }
data = strings.TrimPrefix(data, "data:") data = strings.TrimPrefix(data, "data:")
return hook(data) return hook(&globals.Chunk{Content: data})
}, },
) )
} }

View File

@ -21,9 +21,9 @@ func CreateGeneration(group, model, prompt, path string, hook func(buffer *utils
Model: model, Model: model,
Message: message, Message: message,
Buffer: *buffer, Buffer: *buffer,
}, func(data string) error { }, func(data *globals.Chunk) error {
buffer.Write(data) buffer.WriteChunk(data)
hook(buffer, data) hook(buffer, data.Content)
return nil return nil
}) })

View File

@ -24,7 +24,9 @@ export function parseFile(data: string, acceptDownload?: boolean) {
const b64image = useMemo(() => { const b64image = useMemo(() => {
// get base64 image from content (like: data:image/png;base64,xxxxx) // get base64 image from content (like: data:image/png;base64,xxxxx)
const match = content.match(/data:image\/([^;]+);base64,([a-zA-Z0-9+/=]+)/g); const match = content.match(
/data:image\/([^;]+);base64,([a-zA-Z0-9+/=]+)/g,
);
return match ? match[0] : ""; return match ? match[0] : "";
}, [filename, content]); }, [filename, content]);

View File

@ -356,7 +356,7 @@
"align": "聊天框居中", "align": "聊天框居中",
"memory": "内存占用", "memory": "内存占用",
"max-tokens": "最大回复 Token 数", "max-tokens": "最大回复 Token 数",
"max-tokens-tip": "最大回复 Token 数,超过此数值将会被截断", "max-tokens-tip": "最大回复 Token 数,超过此数值将会被截断(过高的数值可能会导致超过模型的最大 Token 导致请求失败)",
"temperature": "温度", "temperature": "温度",
"temperature-tip": "随机采样的比例,高温度会产生更多的随机性,低温度会产生较集中和确定性的文本", "temperature-tip": "随机采样的比例,高温度会产生更多的随机性,低温度会产生较集中和确定性的文本",
"top-p": "核采样概率阈值", "top-p": "核采样概率阈值",

View File

@ -306,7 +306,7 @@
"temperature": "temperature", "temperature": "temperature",
"temperature-tip": "Random sampling ratio, high temperature produces more randomness, low temperature produces more concentrated and deterministic text", "temperature-tip": "Random sampling ratio, high temperature produces more randomness, low temperature produces more concentrated and deterministic text",
"max-tokens": "Maximum number of response tokens", "max-tokens": "Maximum number of response tokens",
"max-tokens-tip": "Maximum number of reply tokens, exceeding this value will be truncated", "max-tokens-tip": "Maximum number of reply tokens, exceeding this value will be truncated (too high value may cause the request to fail due to exceeding the model's maximum token)",
"top-p": "Kernel Sampling Probability Threshold", "top-p": "Kernel Sampling Probability Threshold",
"top-p-tip": "(TopP) The higher the probability value, the higher the randomness generated; the lower the value, the higher the certainty generated", "top-p-tip": "(TopP) The higher the probability value, the higher the randomness generated; the lower the value, the higher the certainty generated",
"top-k": "Sample Candidate Set Size", "top-k": "Sample Candidate Set Size",

View File

@ -306,7 +306,7 @@
"temperature": "温度", "temperature": "温度",
"temperature-tip": "ランダムサンプリング比、高温はよりランダム性を生み、低温はより集中的で決定論的なテキストを生成します", "temperature-tip": "ランダムサンプリング比、高温はよりランダム性を生み、低温はより集中的で決定論的なテキストを生成します",
"max-tokens": "レスポンストークンの最大数", "max-tokens": "レスポンストークンの最大数",
"max-tokens-tip": "この値を超える返信トークンの最大数は切り捨てられます", "max-tokens-tip": "この値を超えるリプライトークンの最大数は切り捨てられます(値が高すぎると、モデルの最大トークンを超えるために要求が失敗する可能性があります)",
"top-p": "カーネルサンプリング確率閾値", "top-p": "カーネルサンプリング確率閾値",
"top-p-tip": " TopP )確率値が高いほど生成されるランダム性が高く、値が低いほど生成される確実性が高くなります", "top-p-tip": " TopP )確率値が高いほど生成されるランダム性が高く、値が低いほど生成される確実性が高くなります",
"top-k": "サンプル候補セットサイズ", "top-k": "サンプル候補セットサイズ",

View File

@ -306,7 +306,7 @@
"temperature": "Температура", "temperature": "Температура",
"temperature-tip": "Коэффициент случайной выборки, высокая температура создает больше случайности, низкая температура создает более концентрированный и детерминированный текст", "temperature-tip": "Коэффициент случайной выборки, высокая температура создает больше случайности, низкая температура создает более концентрированный и детерминированный текст",
"max-tokens": "Максимальное количество маркеров ответа", "max-tokens": "Максимальное количество маркеров ответа",
"max-tokens-tip": "Максимальное количество маркеров ответа, превышающее это значение, будет усечено", "max-tokens-tip": "Максимальное количество маркеров ответа, превышающее это значение, будет усечено (слишком высокое значение может привести к сбою запроса из-за превышения максимального маркера модели)",
"top-p": "Порог вероятности отбора проб ядра", "top-p": "Порог вероятности отбора проб ядра",
"top-p-tip": "(TopP) Чем выше значение вероятности, тем выше генерируемая случайность; чем ниже значение, тем выше генерируемая определенность", "top-p-tip": "(TopP) Чем выше значение вероятности, тем выше генерируемая случайность; чем ниже значение, тем выше генерируемая определенность",
"top-k": "Размер набора образцов-кандидатов", "top-k": "Размер набора образцов-кандидатов",

View File

@ -46,5 +46,7 @@ export function doTranslate(
from = getFormattedLanguage(from); from = getFormattedLanguage(from);
to = getFormattedLanguage(to); to = getFormattedLanguage(to);
if (content.startsWith("!!")) content = content.substring(2);
return translate(content, from, to); return translate(content, from, to);
} }

View File

@ -23,6 +23,7 @@ export function getMigration(
switch (typeof template) { switch (typeof template) {
case "string": case "string":
if (typeof translation !== "string") return val; if (typeof translation !== "string") return val;
else if (template.startsWith("!!")) return val;
break; break;
case "object": case "object":
return getMigration(template, translation, val[0]); return getMigration(template, translation, val[0]);

View File

@ -53,7 +53,12 @@ func PreflightCache(cache *redis.Client, hash string, buffer *utils.Buffer, hook
buffer.SetInputTokens(buf.CountInputToken()) buffer.SetInputTokens(buf.CountInputToken())
buffer.SetToolCalls(buf.GetToolCalls()) buffer.SetToolCalls(buf.GetToolCalls())
buffer.SetFunctionCall(buf.GetFunctionCall()) buffer.SetFunctionCall(buf.GetFunctionCall())
return idx, true, hook(data)
return idx, true, hook(&globals.Chunk{
Content: data,
FunctionCall: buf.GetFunctionCall(),
ToolCall: buf.GetToolCalls(),
})
} }
func StoreCache(cache *redis.Client, hash string, index int64, buffer *utils.Buffer) { func StoreCache(cache *redis.Client, hash string, index int64, buffer *utils.Buffer) {

View File

@ -1,6 +1,7 @@
package globals package globals
type Hook func(data string) error type Hook func(data *Chunk) error
type Message struct { type Message struct {
Role string `json:"role"` Role string `json:"role"`
Content string `json:"content"` Content string `json:"content"`
@ -10,6 +11,12 @@ type Message struct {
ToolCalls *ToolCalls `json:"tool_calls,omitempty"` // only `assistant` role ToolCalls *ToolCalls `json:"tool_calls,omitempty"` // only `assistant` role
} }
type Chunk struct {
Content string `json:"content"`
ToolCall *ToolCalls `json:"tool_call,omitempty"`
FunctionCall *FunctionCall `json:"function_call,omitempty"`
}
type ChatSegmentResponse struct { type ChatSegmentResponse struct {
Conversation int64 `json:"conversation"` Conversation int64 `json:"conversation"`
Quota float32 `json:"quota"` Quota float32 `json:"quota"`

View File

@ -100,13 +100,13 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
FrequencyPenalty: instance.GetFrequencyPenalty(), FrequencyPenalty: instance.GetFrequencyPenalty(),
RepetitionPenalty: instance.GetRepetitionPenalty(), RepetitionPenalty: instance.GetRepetitionPenalty(),
}, },
func(data string) error { func(data *globals.Chunk) error {
if signal := conn.PeekWithType(StopType); signal != nil { if signal := conn.PeekWithType(StopType); signal != nil {
// stop signal from client // stop signal from client
return fmt.Errorf("signal") return fmt.Errorf("signal")
} }
return conn.SendClient(globals.ChatSegmentResponse{ return conn.SendClient(globals.ChatSegmentResponse{
Message: buffer.Write(data), Message: buffer.WriteChunk(data),
Quota: buffer.GetQuota(), Quota: buffer.GetQuota(),
End: false, End: false,
Plan: plan, Plan: plan,

View File

@ -93,8 +93,8 @@ func sendTranshipmentResponse(c *gin.Context, form RelayForm, messages []globals
cache := utils.GetCacheFromContext(c) cache := utils.GetCacheFromContext(c)
buffer := utils.NewBuffer(form.Model, messages, channel.ChargeInstance.GetCharge(form.Model)) buffer := utils.NewBuffer(form.Model, messages, channel.ChargeInstance.GetCharge(form.Model))
hit, err := channel.NewChatRequestWithCache(cache, buffer, auth.GetGroup(db, user), getChatProps(form, messages, buffer, plan), func(data string) error { hit, err := channel.NewChatRequestWithCache(cache, buffer, auth.GetGroup(db, user), getChatProps(form, messages, buffer, plan), func(data *globals.Chunk) error {
buffer.Write(data) buffer.WriteChunk(data)
return nil return nil
}) })
@ -137,14 +137,7 @@ func sendTranshipmentResponse(c *gin.Context, form RelayForm, messages []globals
}) })
} }
func getStreamTranshipmentForm(id string, created int64, form RelayForm, data string, buffer *utils.Buffer, end bool, err error) RelayStreamResponse { func getStreamTranshipmentForm(id string, created int64, form RelayForm, data *globals.Chunk, buffer *utils.Buffer, end bool, err error) RelayStreamResponse {
toolCalling := buffer.GetToolCalls()
var functionCalling *globals.FunctionCall
if end {
functionCalling = buffer.GetFunctionCall()
}
return RelayStreamResponse{ return RelayStreamResponse{
Id: fmt.Sprintf("chatcmpl-%s", id), Id: fmt.Sprintf("chatcmpl-%s", id),
Object: "chat.completion.chunk", Object: "chat.completion.chunk",
@ -155,9 +148,9 @@ func getStreamTranshipmentForm(id string, created int64, form RelayForm, data st
Index: 0, Index: 0,
Delta: globals.Message{ Delta: globals.Message{
Role: globals.Assistant, Role: globals.Assistant,
Content: data, Content: data.Content,
ToolCalls: toolCalling, ToolCalls: data.ToolCall,
FunctionCall: functionCalling, FunctionCall: data.FunctionCall,
}, },
FinishReason: utils.Multi[interface{}](end, "stop", nil), FinishReason: utils.Multi[interface{}](end, "stop", nil),
}, },
@ -177,23 +170,30 @@ func sendStreamTranshipmentResponse(c *gin.Context, form RelayForm, messages []g
db := utils.GetDBFromContext(c) db := utils.GetDBFromContext(c)
cache := utils.GetCacheFromContext(c) cache := utils.GetCacheFromContext(c)
group := auth.GetGroup(db, user)
charge := channel.ChargeInstance.GetCharge(form.Model)
go func() { go func() {
buffer := utils.NewBuffer(form.Model, messages, channel.ChargeInstance.GetCharge(form.Model)) buffer := utils.NewBuffer(form.Model, messages, charge)
hit, err := channel.NewChatRequestWithCache(cache, buffer, auth.GetGroup(db, user), getChatProps(form, messages, buffer, plan), func(data string) error { hit, err := channel.NewChatRequestWithCache(
partial <- getStreamTranshipmentForm(id, created, form, buffer.Write(data), buffer, false, nil) cache, buffer, group, getChatProps(form, messages, buffer, plan),
func(data *globals.Chunk) error {
buffer.WriteChunk(data)
partial <- getStreamTranshipmentForm(id, created, form, data, buffer, false, nil)
return nil return nil
}) },
)
admin.AnalysisRequest(form.Model, buffer, err) admin.AnalysisRequest(form.Model, buffer, err)
if err != nil { if err != nil {
auth.RevertSubscriptionUsage(db, cache, user, form.Model) auth.RevertSubscriptionUsage(db, cache, user, form.Model)
globals.Warn(fmt.Sprintf("error from chat request api: %s (instance: %s, client: %s)", err.Error(), form.Model, c.ClientIP())) globals.Warn(fmt.Sprintf("error from chat request api: %s (instance: %s, client: %s)", err.Error(), form.Model, c.ClientIP()))
partial <- getStreamTranshipmentForm(id, created, form, err.Error(), buffer, true, err) partial <- getStreamTranshipmentForm(id, created, form, &globals.Chunk{Content: err.Error()}, buffer, true, err)
close(partial) close(partial)
return return
} }
partial <- getStreamTranshipmentForm(id, created, form, "", buffer, true, nil) partial <- getStreamTranshipmentForm(id, created, form, &globals.Chunk{Content: ""}, buffer, true, nil)
if !hit { if !hit {
CollectQuota(c, user, buffer, plan, err) CollectQuota(c, user, buffer, plan, err)

View File

@ -40,8 +40,8 @@ func NativeChatHandler(c *gin.Context, user *auth.User, model string, message []
Message: segment, Message: segment,
Buffer: *buffer, Buffer: *buffer,
}, },
func(resp string) error { func(resp *globals.Chunk) error {
buffer.Write(resp) buffer.WriteChunk(resp)
return nil return nil
}, },
) )

View File

@ -89,8 +89,8 @@ func createRelayImageObject(c *gin.Context, form RelayImageForm, prompt string,
} }
buffer := utils.NewBuffer(form.Model, messages, channel.ChargeInstance.GetCharge(form.Model)) buffer := utils.NewBuffer(form.Model, messages, channel.ChargeInstance.GetCharge(form.Model))
hit, err := channel.NewChatRequestWithCache(cache, buffer, auth.GetGroup(db, user), getImageProps(form, messages, buffer), func(data string) error { hit, err := channel.NewChatRequestWithCache(cache, buffer, auth.GetGroup(db, user), getImageProps(form, messages, buffer), func(data *globals.Chunk) error {
buffer.Write(data) buffer.WriteChunk(data)
return nil return nil
}) })

View File

@ -81,6 +81,15 @@ func InsertSlice[T any](arr []T, index int, value []T) []T {
return arr return arr
} }
func Collect[T any](arr ...[]T) []T {
res := make([]T, 0)
for _, v := range arr {
res = append(res, v...)
}
return res
}
func Append[T any](arr []T, value T) []T { func Append[T any](arr []T, value T) []T {
return append(arr, value) return append(arr, value)
} }

View File

@ -90,6 +90,18 @@ func (b *Buffer) Write(data string) string {
return data return data
} }
func (b *Buffer) WriteChunk(data *globals.Chunk) string {
if data == nil {
return ""
}
b.Write(data.Content)
b.AddToolCalls(data.ToolCall)
b.SetFunctionCall(data.FunctionCall)
return data.Content
}
func (b *Buffer) GetChunk() string { func (b *Buffer) GetChunk() string {
return b.Latest return b.Latest
} }
@ -114,12 +126,52 @@ func (b *Buffer) SetToolCalls(toolCalls *globals.ToolCalls) {
b.ToolCalls = toolCalls b.ToolCalls = toolCalls
} }
func hitTool(tool globals.ToolCall, tools globals.ToolCalls) (int, *globals.ToolCall) {
for i, t := range tools {
if t.Id == tool.Id {
return i, &t
}
}
if len(tool.Type) == 0 && len(tool.Id) == 0 {
length := len(tools)
if length > 0 {
// if the tool is empty, return the last tool as the hit
return length - 1, &tools[length-1]
}
}
return 0, nil
}
func mixTools(source *globals.ToolCalls, target *globals.ToolCalls) *globals.ToolCalls {
if source == nil {
return target
}
tools := make(globals.ToolCalls, 0)
arr := Collect[globals.ToolCall](*source, *target)
for _, tool := range arr {
idx, hit := hitTool(tool, tools)
if hit != nil {
tools[idx].Function.Arguments += tool.Function.Arguments
} else {
tools = append(tools, tool)
}
}
return &tools
}
func (b *Buffer) AddToolCalls(toolCalls *globals.ToolCalls) { func (b *Buffer) AddToolCalls(toolCalls *globals.ToolCalls) {
if toolCalls == nil { if toolCalls == nil {
return return
} }
b.ToolCalls = toolCalls b.ToolCalls = mixTools(b.ToolCalls, toolCalls)
} }
func (b *Buffer) SetFunctionCall(functionCall *globals.FunctionCall) { func (b *Buffer) SetFunctionCall(functionCall *globals.FunctionCall) {