mirror of
https://github.com/coaidev/coai.git
synced 2025-05-21 05:50:14 +09:00
fix markdown table overflow bug and add function chatgpt adapter calling feature
This commit is contained in:
parent
71b3b79809
commit
eedc8db4b9
@ -8,9 +8,14 @@ import (
|
||||
)
|
||||
|
||||
type ChatProps struct {
|
||||
Model string
|
||||
Message []globals.Message
|
||||
Token int
|
||||
Model string
|
||||
Message []globals.Message
|
||||
Token *int
|
||||
PresencePenalty *float32 `json:"presence_penalty"`
|
||||
FrequencyPenalty *float32 `json:"frequency_penalty"`
|
||||
Temperature *float32 `json:"temperature"`
|
||||
TopP *float32 `json:"top_p"`
|
||||
ToolChoice *interface{} `json:"tool_choice"` // string or object
|
||||
}
|
||||
|
||||
func (c *ChatInstance) GetChatEndpoint(props *ChatProps) string {
|
||||
@ -39,31 +44,24 @@ func (c *ChatInstance) GetLatestPrompt(props *ChatProps) string {
|
||||
func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
|
||||
if props.Model == globals.GPT3TurboInstruct {
|
||||
// for completions
|
||||
return utils.Multi[interface{}](props.Token != -1, CompletionRequest{
|
||||
return CompletionRequest{
|
||||
Model: props.Model,
|
||||
Prompt: c.GetCompletionPrompt(props.Message),
|
||||
MaxToken: props.Token,
|
||||
Stream: stream,
|
||||
}, CompletionWithInfinity{
|
||||
Model: props.Model,
|
||||
Prompt: c.GetCompletionPrompt(props.Message),
|
||||
Stream: stream,
|
||||
})
|
||||
}
|
||||
|
||||
if props.Token != -1 {
|
||||
return ChatRequest{
|
||||
Model: props.Model,
|
||||
Messages: formatMessages(props),
|
||||
MaxToken: props.Token,
|
||||
Stream: stream,
|
||||
}
|
||||
}
|
||||
|
||||
return ChatRequestWithInfinity{
|
||||
Model: props.Model,
|
||||
Messages: formatMessages(props),
|
||||
Stream: stream,
|
||||
return ChatRequest{
|
||||
Model: props.Model,
|
||||
Messages: formatMessages(props),
|
||||
MaxToken: props.Token,
|
||||
Stream: stream,
|
||||
PresencePenalty: props.PresencePenalty,
|
||||
FrequencyPenalty: props.FrequencyPenalty,
|
||||
Temperature: props.Temperature,
|
||||
TopP: props.TopP,
|
||||
ToolChoice: props.ToolChoice,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,7 @@ func (c *ChatInstance) Test() bool {
|
||||
result, err := c.CreateChatRequest(&ChatProps{
|
||||
Model: globals.GPT3Turbo,
|
||||
Message: []globals.Message{{Role: globals.User, Content: "hi"}},
|
||||
Token: 1,
|
||||
Token: utils.ToPtr(1),
|
||||
})
|
||||
if err != nil {
|
||||
fmt.Println(fmt.Sprintf("%s: test failed (%s)", c.GetApiKey(), err.Error()))
|
||||
|
@ -6,30 +6,24 @@ import "chat/globals"
|
||||
type ChatRequest struct {
|
||||
Model string `json:"model"`
|
||||
Messages []globals.Message `json:"messages"`
|
||||
MaxToken int `json:"max_tokens"`
|
||||
MaxToken *int `json:"max_tokens"`
|
||||
Stream bool `json:"stream"`
|
||||
}
|
||||
|
||||
type ChatRequestWithInfinity struct {
|
||||
Model string `json:"model"`
|
||||
Messages []globals.Message `json:"messages"`
|
||||
Stream bool `json:"stream"`
|
||||
PresencePenalty *float32 `json:"presence_penalty"`
|
||||
FrequencyPenalty *float32 `json:"frequency_penalty"`
|
||||
Temperature *float32 `json:"temperature"`
|
||||
TopP *float32 `json:"top_p"`
|
||||
ToolChoice *interface{} `json:"tool_choice"` // string or object
|
||||
}
|
||||
|
||||
// CompletionRequest ChatRequest is the request body for chatgpt completion
|
||||
type CompletionRequest struct {
|
||||
Model string `json:"model"`
|
||||
Prompt string `json:"prompt"`
|
||||
MaxToken int `json:"max_tokens"`
|
||||
MaxToken *int `json:"max_tokens"`
|
||||
Stream bool `json:"stream"`
|
||||
}
|
||||
|
||||
type CompletionWithInfinity struct {
|
||||
Model string `json:"model"`
|
||||
Prompt string `json:"prompt"`
|
||||
Stream bool `json:"stream"`
|
||||
}
|
||||
|
||||
// ChatResponse is the native http request body for chatgpt
|
||||
type ChatResponse struct {
|
||||
ID string `json:"id"`
|
||||
@ -37,9 +31,9 @@ type ChatResponse struct {
|
||||
Created int64 `json:"created"`
|
||||
Model string `json:"model"`
|
||||
Choices []struct {
|
||||
Message struct {
|
||||
Content string `json:"content"`
|
||||
}
|
||||
Index int `json:"index"`
|
||||
Message globals.Message `json:"message"`
|
||||
FinishReason string `json:"finish_reason"`
|
||||
} `json:"choices"`
|
||||
Error struct {
|
||||
Message string `json:"message"`
|
||||
@ -54,10 +48,9 @@ type ChatStreamResponse struct {
|
||||
Model string `json:"model"`
|
||||
Data struct {
|
||||
Choices []struct {
|
||||
Delta struct {
|
||||
Content string `json:"content"`
|
||||
}
|
||||
Index int `json:"index"`
|
||||
Delta globals.Message `json:"delta"`
|
||||
Index int `json:"index"`
|
||||
FinishReason string `json:"finish_reason"`
|
||||
} `json:"choices"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
@ -28,8 +28,8 @@ func retryChatGPTPool(props *ChatProps, hook globals.Hook, retry int) error {
|
||||
Message: props.Message,
|
||||
Token: utils.Multi(
|
||||
props.Token == 0,
|
||||
utils.Multi(globals.IsGPT4Model(props.Model) || props.Plan || props.Infinity, -1, 2500),
|
||||
props.Token,
|
||||
utils.Multi(globals.IsGPT4Model(props.Model) || props.Plan || props.Infinity, nil, utils.ToPtr(2500)),
|
||||
&props.Token,
|
||||
),
|
||||
}, hook)
|
||||
|
||||
|
@ -14,7 +14,7 @@ func UsingWebSegment(instance *conversation.Conversation) []globals.Message {
|
||||
return chatgpt.NewChatInstanceFromConfig("gpt3").CreateChatRequest(&chatgpt.ChatProps{
|
||||
Model: globals.GPT3TurboInstruct,
|
||||
Message: message,
|
||||
Token: token,
|
||||
Token: &token,
|
||||
})
|
||||
}, segment, globals.IsLongContextModel(instance.GetModel()))
|
||||
}
|
||||
@ -28,7 +28,7 @@ func UsingWebNativeSegment(enable bool, message []globals.Message) []globals.Mes
|
||||
return chatgpt.NewChatInstanceFromConfig("gpt3").CreateChatRequest(&chatgpt.ChatProps{
|
||||
Model: globals.GPT3TurboInstruct,
|
||||
Message: message,
|
||||
Token: token,
|
||||
Token: &token,
|
||||
})
|
||||
}, message, false)
|
||||
} else {
|
||||
|
@ -64,6 +64,7 @@
|
||||
.content-wrapper {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
max-width: 100%;
|
||||
|
||||
.message-toolbar {
|
||||
display: flex;
|
||||
@ -124,6 +125,8 @@
|
||||
|
||||
.message-content {
|
||||
display: flex;
|
||||
flex: 1 1 auto;
|
||||
min-width: 0;
|
||||
flex-direction: column;
|
||||
max-width: 100%;
|
||||
padding: 8px 16px;
|
||||
|
@ -505,7 +505,8 @@
|
||||
}
|
||||
|
||||
.chat-container {
|
||||
flex-grow: 1;
|
||||
flex: 1 1 auto;
|
||||
min-width: 0;
|
||||
height: 100%;
|
||||
background: hsl(var(--background-container));
|
||||
transition: width 0.2s ease-in-out;
|
||||
|
@ -8,7 +8,7 @@ import {
|
||||
} from "@/utils/env.ts";
|
||||
import { getMemory } from "@/utils/memory.ts";
|
||||
|
||||
export const version = "3.6.31";
|
||||
export const version = "3.6.32";
|
||||
export const dev: boolean = getDev();
|
||||
export const deploy: boolean = true;
|
||||
export let rest_api: string = getRestApi(deploy);
|
||||
|
@ -4,4 +4,5 @@ const (
|
||||
System = "system"
|
||||
User = "user"
|
||||
Assistant = "assistant"
|
||||
Tool = "tool"
|
||||
)
|
||||
|
40
globals/tools.go
Normal file
40
globals/tools.go
Normal file
@ -0,0 +1,40 @@
|
||||
package globals
|
||||
|
||||
type ToolCallId string
|
||||
type FunctionTools []FunctionTool
|
||||
type FunctionTool struct {
|
||||
Type string `json:"type"`
|
||||
Function ToolFunction `json:"function"`
|
||||
}
|
||||
|
||||
type ToolFunction struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Parameters ToolParameters `json:"parameters"`
|
||||
}
|
||||
|
||||
type ToolParameters struct {
|
||||
Type string `json:"type"`
|
||||
Properties ToolProperties `json:"properties"`
|
||||
Required []string `json:"required"`
|
||||
}
|
||||
|
||||
type ToolProperties map[string]ToolProperty
|
||||
|
||||
type ToolProperty struct {
|
||||
Type string `json:"type"`
|
||||
Description string `json:"description"`
|
||||
Enum []string `json:"enum"`
|
||||
}
|
||||
|
||||
type ToolCallFunction struct {
|
||||
Name string `json:"name"`
|
||||
Arguments string `json:"arguments"`
|
||||
}
|
||||
|
||||
type ToolCall struct {
|
||||
Type string `json:"type"`
|
||||
Id ToolCallId `json:"id"`
|
||||
Function ToolCallFunction `json:"function"`
|
||||
}
|
||||
type ToolCalls []ToolCall
|
@ -2,8 +2,10 @@ package globals
|
||||
|
||||
type Hook func(data string) error
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
Role string `json:"role"`
|
||||
Content string `json:"content"`
|
||||
ToolCallId *string `json:"tool_call_id"` // only `tool` role
|
||||
ToolCalls *ToolCalls `json:"tool_calls"` // only `assistant` role
|
||||
}
|
||||
|
||||
type ChatSegmentResponse struct {
|
||||
|
@ -23,6 +23,10 @@ func Contains[T comparable](value T, slice []T) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func ToPtr[T any](value T) *T {
|
||||
return &value
|
||||
}
|
||||
|
||||
func TryGet[T any](arr []T, index int) T {
|
||||
if index >= len(arr) {
|
||||
return arr[0]
|
||||
|
Loading…
Reference in New Issue
Block a user