mirror of
https://github.com/coaidev/coai.git
synced 2025-05-19 04:50:14 +09:00
restruct: restruct channel factories
This commit is contained in:
parent
3335e81c1e
commit
422da58b24
@ -4,11 +4,12 @@ import (
|
|||||||
"chat/adapter/azure"
|
"chat/adapter/azure"
|
||||||
"chat/adapter/baichuan"
|
"chat/adapter/baichuan"
|
||||||
"chat/adapter/bing"
|
"chat/adapter/bing"
|
||||||
"chat/adapter/chatgpt"
|
|
||||||
"chat/adapter/claude"
|
"chat/adapter/claude"
|
||||||
|
"chat/adapter/common"
|
||||||
"chat/adapter/dashscope"
|
"chat/adapter/dashscope"
|
||||||
"chat/adapter/hunyuan"
|
"chat/adapter/hunyuan"
|
||||||
"chat/adapter/midjourney"
|
"chat/adapter/midjourney"
|
||||||
|
"chat/adapter/openai"
|
||||||
"chat/adapter/palm2"
|
"chat/adapter/palm2"
|
||||||
"chat/adapter/skylark"
|
"chat/adapter/skylark"
|
||||||
"chat/adapter/slack"
|
"chat/adapter/slack"
|
||||||
@ -16,171 +17,33 @@ import (
|
|||||||
"chat/adapter/zhinao"
|
"chat/adapter/zhinao"
|
||||||
"chat/adapter/zhipuai"
|
"chat/adapter/zhipuai"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
type RequestProps struct {
|
var channelFactories = map[string]adaptercommon.FactoryCreator{
|
||||||
MaxRetries *int
|
globals.OpenAIChannelType: openai.NewChatInstanceFromConfig,
|
||||||
Current int
|
globals.AzureOpenAIChannelType: azure.NewChatInstanceFromConfig,
|
||||||
Group string
|
globals.ClaudeChannelType: claude.NewChatInstanceFromConfig,
|
||||||
|
globals.SlackChannelType: slack.NewChatInstanceFromConfig,
|
||||||
|
globals.BingChannelType: bing.NewChatInstanceFromConfig,
|
||||||
|
globals.PalmChannelType: palm2.NewChatInstanceFromConfig,
|
||||||
|
globals.SparkdeskChannelType: sparkdesk.NewChatInstanceFromConfig,
|
||||||
|
globals.ChatGLMChannelType: zhipuai.NewChatInstanceFromConfig,
|
||||||
|
globals.QwenChannelType: dashscope.NewChatInstanceFromConfig,
|
||||||
|
globals.HunyuanChannelType: hunyuan.NewChatInstanceFromConfig,
|
||||||
|
globals.BaichuanChannelType: baichuan.NewChatInstanceFromConfig,
|
||||||
|
globals.SkylarkChannelType: skylark.NewChatInstanceFromConfig,
|
||||||
|
globals.ZhinaoChannelType: zhinao.NewChatInstanceFromConfig,
|
||||||
|
globals.MidjourneyChannelType: midjourney.NewChatInstanceFromConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatProps struct {
|
func createChatRequest(conf globals.ChannelConfig, props *adaptercommon.ChatProps, hook globals.Hook) error {
|
||||||
RequestProps
|
props.Model = conf.GetModelReflect(props.OriginalModel)
|
||||||
|
|
||||||
Model string
|
factoryType := conf.GetType()
|
||||||
Message []globals.Message
|
if factory, ok := channelFactories[factoryType]; ok {
|
||||||
MaxTokens *int
|
return factory(conf).CreateStreamChatRequest(props, hook)
|
||||||
PresencePenalty *float32
|
|
||||||
FrequencyPenalty *float32
|
|
||||||
RepetitionPenalty *float32
|
|
||||||
Temperature *float32
|
|
||||||
TopP *float32
|
|
||||||
TopK *int
|
|
||||||
Tools *globals.FunctionTools
|
|
||||||
ToolChoice *interface{}
|
|
||||||
Buffer utils.Buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
func createChatRequest(conf globals.ChannelConfig, props *ChatProps, hook globals.Hook) error {
|
|
||||||
model := conf.GetModelReflect(props.Model)
|
|
||||||
|
|
||||||
switch conf.GetType() {
|
|
||||||
case globals.OpenAIChannelType:
|
|
||||||
return chatgpt.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&chatgpt.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
Token: props.MaxTokens,
|
|
||||||
PresencePenalty: props.PresencePenalty,
|
|
||||||
FrequencyPenalty: props.FrequencyPenalty,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
TopP: props.TopP,
|
|
||||||
Tools: props.Tools,
|
|
||||||
ToolChoice: props.ToolChoice,
|
|
||||||
Buffer: props.Buffer,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.AzureOpenAIChannelType:
|
|
||||||
return azure.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&azure.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
Token: props.MaxTokens,
|
|
||||||
PresencePenalty: props.PresencePenalty,
|
|
||||||
FrequencyPenalty: props.FrequencyPenalty,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
TopP: props.TopP,
|
|
||||||
Tools: props.Tools,
|
|
||||||
ToolChoice: props.ToolChoice,
|
|
||||||
Buffer: props.Buffer,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.ClaudeChannelType:
|
|
||||||
return claude.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&claude.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
Token: props.MaxTokens,
|
|
||||||
TopP: props.TopP,
|
|
||||||
TopK: props.TopK,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.SlackChannelType:
|
|
||||||
return slack.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&slack.ChatProps{
|
|
||||||
Message: props.Message,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.BingChannelType:
|
|
||||||
return bing.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&bing.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.PalmChannelType:
|
|
||||||
return palm2.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&palm2.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.SparkdeskChannelType:
|
|
||||||
return sparkdesk.NewChatInstance(conf, model).CreateStreamChatRequest(&sparkdesk.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
Token: props.MaxTokens,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
TopK: props.TopK,
|
|
||||||
Tools: props.Tools,
|
|
||||||
Buffer: props.Buffer,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.ChatGLMChannelType:
|
|
||||||
return zhipuai.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&zhipuai.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
TopP: props.TopP,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.QwenChannelType:
|
|
||||||
return dashscope.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&dashscope.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
Token: props.MaxTokens,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
TopP: props.TopP,
|
|
||||||
TopK: props.TopK,
|
|
||||||
RepetitionPenalty: props.RepetitionPenalty,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.HunyuanChannelType:
|
|
||||||
return hunyuan.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&hunyuan.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
TopP: props.TopP,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.BaichuanChannelType:
|
|
||||||
return baichuan.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&baichuan.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
TopP: props.TopP,
|
|
||||||
TopK: props.TopK,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.SkylarkChannelType:
|
|
||||||
return skylark.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&skylark.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
Token: props.MaxTokens,
|
|
||||||
TopP: props.TopP,
|
|
||||||
TopK: props.TopK,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
FrequencyPenalty: props.FrequencyPenalty,
|
|
||||||
PresencePenalty: props.PresencePenalty,
|
|
||||||
RepeatPenalty: props.RepetitionPenalty,
|
|
||||||
Tools: props.Tools,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.ZhinaoChannelType:
|
|
||||||
return zhinao.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&zhinao.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Message: props.Message,
|
|
||||||
Token: props.MaxTokens,
|
|
||||||
TopP: props.TopP,
|
|
||||||
TopK: props.TopK,
|
|
||||||
Temperature: props.Temperature,
|
|
||||||
RepetitionPenalty: props.RepetitionPenalty,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
case globals.MidjourneyChannelType:
|
|
||||||
return midjourney.NewChatInstanceFromConfig(conf).CreateStreamChatRequest(&midjourney.ChatProps{
|
|
||||||
Model: model,
|
|
||||||
Messages: props.Message,
|
|
||||||
}, hook)
|
|
||||||
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("unknown channel type %s (model: %s)", conf.GetType(), props.Model)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return fmt.Errorf("unknown channel type %s (channel #%d)", conf.GetType(), conf.GetId())
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package azure
|
package azure
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"errors"
|
"errors"
|
||||||
@ -8,20 +9,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChatProps struct {
|
func (c *ChatInstance) GetChatEndpoint(props *adaptercommon.ChatProps) string {
|
||||||
Model string
|
|
||||||
Message []globals.Message
|
|
||||||
Token *int
|
|
||||||
PresencePenalty *float32
|
|
||||||
FrequencyPenalty *float32
|
|
||||||
Temperature *float32
|
|
||||||
TopP *float32
|
|
||||||
Tools *globals.FunctionTools
|
|
||||||
ToolChoice *interface{}
|
|
||||||
Buffer utils.Buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatEndpoint(props *ChatProps) string {
|
|
||||||
model := strings.ReplaceAll(props.Model, ".", "")
|
model := strings.ReplaceAll(props.Model, ".", "")
|
||||||
if props.Model == globals.GPT3TurboInstruct {
|
if props.Model == globals.GPT3TurboInstruct {
|
||||||
return fmt.Sprintf("%s/openai/deployments/%s/completions?api-version=%s", c.GetResource(), model, c.GetEndpoint())
|
return fmt.Sprintf("%s/openai/deployments/%s/completions?api-version=%s", c.GetResource(), model, c.GetEndpoint())
|
||||||
@ -37,7 +25,7 @@ func (c *ChatInstance) GetCompletionPrompt(messages []globals.Message) string {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetLatestPrompt(props *ChatProps) string {
|
func (c *ChatInstance) GetLatestPrompt(props *adaptercommon.ChatProps) string {
|
||||||
if len(props.Message) == 0 {
|
if len(props.Message) == 0 {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
@ -45,19 +33,19 @@ func (c *ChatInstance) GetLatestPrompt(props *ChatProps) string {
|
|||||||
return props.Message[len(props.Message)-1].Content
|
return props.Message[len(props.Message)-1].Content
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
|
func (c *ChatInstance) GetChatBody(props *adaptercommon.ChatProps, stream bool) interface{} {
|
||||||
if props.Model == globals.GPT3TurboInstruct {
|
if props.Model == globals.GPT3TurboInstruct {
|
||||||
// for completions
|
// for completions
|
||||||
return CompletionRequest{
|
return CompletionRequest{
|
||||||
Prompt: c.GetCompletionPrompt(props.Message),
|
Prompt: c.GetCompletionPrompt(props.Message),
|
||||||
MaxToken: props.Token,
|
MaxToken: props.MaxTokens,
|
||||||
Stream: stream,
|
Stream: stream,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ChatRequest{
|
return ChatRequest{
|
||||||
Messages: formatMessages(props),
|
Messages: formatMessages(props),
|
||||||
MaxToken: props.Token,
|
MaxToken: props.MaxTokens,
|
||||||
Stream: stream,
|
Stream: stream,
|
||||||
PresencePenalty: props.PresencePenalty,
|
PresencePenalty: props.PresencePenalty,
|
||||||
FrequencyPenalty: props.FrequencyPenalty,
|
FrequencyPenalty: props.FrequencyPenalty,
|
||||||
@ -68,8 +56,8 @@ func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateChatRequest is the native http request body for chatgpt
|
// CreateChatRequest is the native http request body for openai
|
||||||
func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
func (c *ChatInstance) CreateChatRequest(props *adaptercommon.ChatProps) (string, error) {
|
||||||
if globals.IsOpenAIDalleModel(props.Model) {
|
if globals.IsOpenAIDalleModel(props.Model) {
|
||||||
return c.CreateImage(props)
|
return c.CreateImage(props)
|
||||||
}
|
}
|
||||||
@ -81,20 +69,20 @@ func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
|||||||
)
|
)
|
||||||
|
|
||||||
if err != nil || res == nil {
|
if err != nil || res == nil {
|
||||||
return "", fmt.Errorf("chatgpt error: %s", err.Error())
|
return "", fmt.Errorf("openai error: %s", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
data := utils.MapToStruct[ChatResponse](res)
|
data := utils.MapToStruct[ChatResponse](res)
|
||||||
if data == nil {
|
if data == nil {
|
||||||
return "", fmt.Errorf("chatgpt error: cannot parse response")
|
return "", fmt.Errorf("openai error: cannot parse response")
|
||||||
} else if data.Error.Message != "" {
|
} else if data.Error.Message != "" {
|
||||||
return "", fmt.Errorf("chatgpt error: %s", data.Error.Message)
|
return "", fmt.Errorf("openai error: %s", data.Error.Message)
|
||||||
}
|
}
|
||||||
return data.Choices[0].Message.Content, nil
|
return data.Choices[0].Message.Content, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateStreamChatRequest is the stream response body for chatgpt
|
// CreateStreamChatRequest is the stream response body for openai
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
if globals.IsOpenAIDalleModel(props.Model) {
|
if globals.IsOpenAIDalleModel(props.Model) {
|
||||||
if url, err := c.CreateImage(props); err != nil {
|
if url, err := c.CreateImage(props); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package azure
|
package azure
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -32,21 +33,21 @@ func (c *ChatInstance) CreateImageRequest(props ImageProps) (string, error) {
|
|||||||
N: 1,
|
N: 1,
|
||||||
})
|
})
|
||||||
if err != nil || res == nil {
|
if err != nil || res == nil {
|
||||||
return "", fmt.Errorf("chatgpt error: %s", err.Error())
|
return "", fmt.Errorf("openai error: %s", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
data := utils.MapToStruct[ImageResponse](res)
|
data := utils.MapToStruct[ImageResponse](res)
|
||||||
if data == nil {
|
if data == nil {
|
||||||
return "", fmt.Errorf("chatgpt error: cannot parse response")
|
return "", fmt.Errorf("openai error: cannot parse response")
|
||||||
} else if data.Error.Message != "" {
|
} else if data.Error.Message != "" {
|
||||||
return "", fmt.Errorf("chatgpt error: %s", data.Error.Message)
|
return "", fmt.Errorf("openai error: %s", data.Error.Message)
|
||||||
}
|
}
|
||||||
|
|
||||||
return data.Data[0].Url, nil
|
return data.Data[0].Url, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateImage will create a dalle image from prompt, return markdown of image
|
// CreateImage will create a dalle image from prompt, return markdown of image
|
||||||
func (c *ChatInstance) CreateImage(props *ChatProps) (string, error) {
|
func (c *ChatInstance) CreateImage(props *adaptercommon.ChatProps) (string, error) {
|
||||||
url, err := c.CreateImageRequest(ImageProps{
|
url, err := c.CreateImageRequest(ImageProps{
|
||||||
Model: props.Model,
|
Model: props.Model,
|
||||||
Prompt: c.GetLatestPrompt(props),
|
Prompt: c.GetLatestPrompt(props),
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package azure
|
package azure
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"errors"
|
"errors"
|
||||||
@ -8,7 +9,7 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
)
|
)
|
||||||
|
|
||||||
func formatMessages(props *ChatProps) interface{} {
|
func formatMessages(props *adaptercommon.ChatProps) interface{} {
|
||||||
if globals.IsVisionModel(props.Model) {
|
if globals.IsVisionModel(props.Model) {
|
||||||
return utils.Each[globals.Message, Message](props.Message, func(message globals.Message) Message {
|
return utils.Each[globals.Message, Message](props.Message, func(message globals.Message) Message {
|
||||||
if message.Role == globals.User {
|
if message.Role == globals.User {
|
||||||
@ -120,7 +121,7 @@ func (c *ChatInstance) ProcessLine(data string, isCompletionType bool) (*globals
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
globals.Warn(fmt.Sprintf("chatgpt error: cannot parse completion response: %s", data))
|
globals.Warn(fmt.Sprintf("openai error: cannot parse completion response: %s", data))
|
||||||
return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse completion response")
|
return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse completion response")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,9 +130,9 @@ func (c *ChatInstance) ProcessLine(data string, isCompletionType bool) (*globals
|
|||||||
}
|
}
|
||||||
|
|
||||||
if form := processChatErrorResponse(data); form != nil {
|
if form := processChatErrorResponse(data); form != nil {
|
||||||
return &globals.Chunk{Content: ""}, errors.New(fmt.Sprintf("chatgpt error: %s (type: %s)", form.Error.Message, form.Error.Type))
|
return &globals.Chunk{Content: ""}, errors.New(fmt.Sprintf("openai error: %s (type: %s)", form.Error.Message, form.Error.Type))
|
||||||
}
|
}
|
||||||
|
|
||||||
globals.Warn(fmt.Sprintf("chatgpt error: cannot parse chat completion response: %s", data))
|
globals.Warn(fmt.Sprintf("openai error: cannot parse chat completion response: %s", data))
|
||||||
return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse chat completion response")
|
return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse chat completion response")
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package azure
|
package azure
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -42,7 +43,7 @@ func NewChatInstance(endpoint, apiKey string, resource string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
param := conf.SplitRandomSecret(2)
|
param := conf.SplitRandomSecret(2)
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
conf.GetEndpoint(),
|
conf.GetEndpoint(),
|
||||||
|
@ -24,7 +24,7 @@ type Message struct {
|
|||||||
ToolCalls *globals.ToolCalls `json:"tool_calls,omitempty"` // only `assistant` role
|
ToolCalls *globals.ToolCalls `json:"tool_calls,omitempty"` // only `assistant` role
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChatRequest is the request body for chatgpt
|
// ChatRequest is the request body for openai
|
||||||
type ChatRequest struct {
|
type ChatRequest struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Messages interface{} `json:"messages"`
|
Messages interface{} `json:"messages"`
|
||||||
@ -38,7 +38,7 @@ type ChatRequest struct {
|
|||||||
ToolChoice *interface{} `json:"tool_choice,omitempty"` // string or object
|
ToolChoice *interface{} `json:"tool_choice,omitempty"` // string or object
|
||||||
}
|
}
|
||||||
|
|
||||||
// CompletionRequest is the request body for chatgpt completion
|
// CompletionRequest is the request body for openai completion
|
||||||
type CompletionRequest struct {
|
type CompletionRequest struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Prompt string `json:"prompt"`
|
Prompt string `json:"prompt"`
|
||||||
@ -46,7 +46,7 @@ type CompletionRequest struct {
|
|||||||
Stream bool `json:"stream"`
|
Stream bool `json:"stream"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChatResponse is the native http request body for chatgpt
|
// ChatResponse is the native http request body for openai
|
||||||
type ChatResponse struct {
|
type ChatResponse struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Object string `json:"object"`
|
Object string `json:"object"`
|
||||||
@ -62,7 +62,7 @@ type ChatResponse struct {
|
|||||||
} `json:"error"`
|
} `json:"error"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChatStreamResponse is the stream response body for chatgpt
|
// ChatStreamResponse is the stream response body for openai
|
||||||
type ChatStreamResponse struct {
|
type ChatStreamResponse struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Object string `json:"object"`
|
Object string `json:"object"`
|
||||||
@ -75,7 +75,7 @@ type ChatStreamResponse struct {
|
|||||||
} `json:"choices"`
|
} `json:"choices"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// CompletionResponse is the native http request body / stream response body for chatgpt completion
|
// CompletionResponse is the native http request body / stream response body for openai completion
|
||||||
type CompletionResponse struct {
|
type CompletionResponse struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Object string `json:"object"`
|
Object string `json:"object"`
|
||||||
@ -96,7 +96,7 @@ type ChatStreamErrorResponse struct {
|
|||||||
|
|
||||||
type ImageSize string
|
type ImageSize string
|
||||||
|
|
||||||
// ImageRequest is the request body for chatgpt dalle image generation
|
// ImageRequest is the request body for openai dalle image generation
|
||||||
type ImageRequest struct {
|
type ImageRequest struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Prompt string `json:"prompt"`
|
Prompt string `json:"prompt"`
|
||||||
|
@ -1,20 +1,13 @@
|
|||||||
package baichuan
|
package baichuan
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChatProps struct {
|
|
||||||
Model string
|
|
||||||
Message []globals.Message
|
|
||||||
TopP *float32
|
|
||||||
TopK *int
|
|
||||||
Temperature *float32
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatEndpoint() string {
|
func (c *ChatInstance) GetChatEndpoint() string {
|
||||||
return fmt.Sprintf("%s/v1/chat/completions", c.GetEndpoint())
|
return fmt.Sprintf("%s/v1/chat/completions", c.GetEndpoint())
|
||||||
}
|
}
|
||||||
@ -38,7 +31,7 @@ func (c *ChatInstance) GetMessages(messages []globals.Message) []globals.Message
|
|||||||
return messages
|
return messages
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) ChatRequest {
|
func (c *ChatInstance) GetChatBody(props *adaptercommon.ChatProps, stream bool) ChatRequest {
|
||||||
return ChatRequest{
|
return ChatRequest{
|
||||||
Model: c.GetModel(props.Model),
|
Model: c.GetModel(props.Model),
|
||||||
Messages: c.GetMessages(props.Message),
|
Messages: c.GetMessages(props.Message),
|
||||||
@ -50,7 +43,7 @@ func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) ChatRequest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateChatRequest is the native http request body for baichuan
|
// CreateChatRequest is the native http request body for baichuan
|
||||||
func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
func (c *ChatInstance) CreateChatRequest(props *adaptercommon.ChatProps) (string, error) {
|
||||||
res, err := utils.Post(
|
res, err := utils.Post(
|
||||||
c.GetChatEndpoint(),
|
c.GetChatEndpoint(),
|
||||||
c.GetHeader(),
|
c.GetHeader(),
|
||||||
@ -71,7 +64,7 @@ func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateStreamChatRequest is the stream response body for baichuan
|
// CreateStreamChatRequest is the stream response body for baichuan
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
err := utils.EventScanner(&utils.EventScannerProps{
|
err := utils.EventScanner(&utils.EventScannerProps{
|
||||||
Method: "POST",
|
Method: "POST",
|
||||||
Uri: c.GetChatEndpoint(),
|
Uri: c.GetChatEndpoint(),
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package baichuan
|
package baichuan
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
@ -32,7 +33,7 @@ func NewChatInstance(endpoint, apiKey string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
conf.GetEndpoint(),
|
conf.GetEndpoint(),
|
||||||
conf.GetRandomSecret(),
|
conf.GetRandomSecret(),
|
||||||
|
@ -1,18 +1,14 @@
|
|||||||
package bing
|
package bing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChatProps struct {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, hook globals.Hook) error {
|
||||||
Message []globals.Message
|
|
||||||
Model string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error {
|
|
||||||
var conn *utils.WebSocket
|
var conn *utils.WebSocket
|
||||||
if conn = utils.NewWebsocketClient(c.GetEndpoint()); conn == nil {
|
if conn = utils.NewWebsocketClient(c.GetEndpoint()); conn == nil {
|
||||||
return fmt.Errorf("bing error: websocket connection failed")
|
return fmt.Errorf("bing error: websocket connection failed")
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package bing
|
package bing
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
@ -21,7 +22,7 @@ func NewChatInstance(endpoint, secret string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
conf.GetEndpoint(),
|
conf.GetEndpoint(),
|
||||||
conf.GetRandomSecret(),
|
conf.GetRandomSecret(),
|
||||||
|
@ -1,47 +0,0 @@
|
|||||||
package chatgpt
|
|
||||||
|
|
||||||
import (
|
|
||||||
"chat/globals"
|
|
||||||
"chat/utils"
|
|
||||||
"fmt"
|
|
||||||
"github.com/spf13/viper"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (c *ChatInstance) Test() bool {
|
|
||||||
result, err := c.CreateChatRequest(&ChatProps{
|
|
||||||
Model: globals.GPT3Turbo,
|
|
||||||
Message: []globals.Message{{Role: globals.User, Content: "hi"}},
|
|
||||||
Token: utils.ToPtr(1),
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(fmt.Sprintf("%s: test failed (%s)", c.GetApiKey(), err.Error()))
|
|
||||||
}
|
|
||||||
|
|
||||||
return err == nil && len(result) > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func FilterKeys(v string) []string {
|
|
||||||
endpoint := viper.GetString(fmt.Sprintf("openai.%s.endpoint", v))
|
|
||||||
keys := strings.Split(viper.GetString(fmt.Sprintf("openai.%s.apikey", v)), "|")
|
|
||||||
|
|
||||||
return FilterKeysNative(endpoint, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
func FilterKeysNative(endpoint string, keys []string) []string {
|
|
||||||
stack := make(chan string, len(keys))
|
|
||||||
for _, key := range keys {
|
|
||||||
go func(key string) {
|
|
||||||
instance := NewChatInstance(endpoint, key)
|
|
||||||
stack <- utils.Multi[string](instance.Test(), key, "")
|
|
||||||
}(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
var result []string
|
|
||||||
for i := 0; i < len(keys); i++ {
|
|
||||||
if res := <-stack; res != "" {
|
|
||||||
result = append(result, res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
@ -1,6 +1,7 @@
|
|||||||
package claude
|
package claude
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -9,15 +10,6 @@ import (
|
|||||||
|
|
||||||
const defaultTokens = 2500
|
const defaultTokens = 2500
|
||||||
|
|
||||||
type ChatProps struct {
|
|
||||||
Model string
|
|
||||||
Message []globals.Message
|
|
||||||
Token *int
|
|
||||||
Temperature *float32
|
|
||||||
TopP *float32
|
|
||||||
TopK *int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatEndpoint() string {
|
func (c *ChatInstance) GetChatEndpoint() string {
|
||||||
return fmt.Sprintf("%s/v1/complete", c.GetEndpoint())
|
return fmt.Sprintf("%s/v1/complete", c.GetEndpoint())
|
||||||
}
|
}
|
||||||
@ -52,15 +44,15 @@ func (c *ChatInstance) ConvertMessage(message []globals.Message) string {
|
|||||||
return fmt.Sprintf("%s\n\nAssistant:", result)
|
return fmt.Sprintf("%s\n\nAssistant:", result)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetTokens(props *ChatProps) int {
|
func (c *ChatInstance) GetTokens(props *adaptercommon.ChatProps) int {
|
||||||
if props.Token == nil || *props.Token <= 0 {
|
if props.MaxTokens == nil || *props.MaxTokens <= 0 {
|
||||||
return defaultTokens
|
return defaultTokens
|
||||||
}
|
}
|
||||||
|
|
||||||
return *props.Token
|
return *props.MaxTokens
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) *ChatBody {
|
func (c *ChatInstance) GetChatBody(props *adaptercommon.ChatProps, stream bool) *ChatBody {
|
||||||
return &ChatBody{
|
return &ChatBody{
|
||||||
Prompt: c.ConvertMessage(props.Message),
|
Prompt: c.ConvertMessage(props.Message),
|
||||||
MaxTokensToSample: c.GetTokens(props),
|
MaxTokensToSample: c.GetTokens(props),
|
||||||
@ -73,7 +65,7 @@ func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) *ChatBody {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateChatRequest is the request for anthropic claude
|
// CreateChatRequest is the request for anthropic claude
|
||||||
func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
func (c *ChatInstance) CreateChatRequest(props *adaptercommon.ChatProps) (string, error) {
|
||||||
data, err := utils.Post(c.GetChatEndpoint(), c.GetChatHeaders(), c.GetChatBody(props, false))
|
data, err := utils.Post(c.GetChatEndpoint(), c.GetChatHeaders(), c.GetChatBody(props, false))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("claude error: %s", err.Error())
|
return "", fmt.Errorf("claude error: %s", err.Error())
|
||||||
@ -115,7 +107,7 @@ func (c *ChatInstance) ProcessLine(buf, data string) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateStreamChatRequest is the stream request for anthropic claude
|
// CreateStreamChatRequest is the stream request for anthropic claude
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, hook globals.Hook) error {
|
||||||
buf := ""
|
buf := ""
|
||||||
|
|
||||||
return utils.EventSource(
|
return utils.EventSource(
|
||||||
@ -124,7 +116,6 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Ho
|
|||||||
c.GetChatHeaders(),
|
c.GetChatHeaders(),
|
||||||
c.GetChatBody(props, true),
|
c.GetChatBody(props, true),
|
||||||
func(data string) error {
|
func(data string) error {
|
||||||
|
|
||||||
if resp, err := c.ProcessLine(buf, data); err == nil && len(resp) > 0 {
|
if resp, err := c.ProcessLine(buf, data); err == nil && len(resp) > 0 {
|
||||||
buf = ""
|
buf = ""
|
||||||
if err := hook(&globals.Chunk{Content: resp}); err != nil {
|
if err := hook(&globals.Chunk{Content: resp}); err != nil {
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package claude
|
package claude
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -16,7 +17,7 @@ func NewChatInstance(endpoint, apiKey string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
conf.GetEndpoint(),
|
conf.GetEndpoint(),
|
||||||
conf.GetRandomSecret(),
|
conf.GetRandomSecret(),
|
||||||
|
11
adapter/common/interface.go
Normal file
11
adapter/common/interface.go
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
package adaptercommon
|
||||||
|
|
||||||
|
import (
|
||||||
|
"chat/globals"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Factory interface {
|
||||||
|
CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error
|
||||||
|
}
|
||||||
|
|
||||||
|
type FactoryCreator func(globals.ChannelConfig) Factory
|
31
adapter/common/types.go
Normal file
31
adapter/common/types.go
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
package adaptercommon
|
||||||
|
|
||||||
|
import (
|
||||||
|
"chat/globals"
|
||||||
|
"chat/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RequestProps struct {
|
||||||
|
MaxRetries *int
|
||||||
|
Current int
|
||||||
|
Group string
|
||||||
|
}
|
||||||
|
|
||||||
|
type ChatProps struct {
|
||||||
|
RequestProps
|
||||||
|
|
||||||
|
Model string
|
||||||
|
OriginalModel string
|
||||||
|
|
||||||
|
Message []globals.Message
|
||||||
|
MaxTokens *int
|
||||||
|
PresencePenalty *float32
|
||||||
|
FrequencyPenalty *float32
|
||||||
|
RepetitionPenalty *float32
|
||||||
|
Temperature *float32
|
||||||
|
TopP *float32
|
||||||
|
TopK *int
|
||||||
|
Tools *globals.FunctionTools
|
||||||
|
ToolChoice *interface{}
|
||||||
|
Buffer utils.Buffer
|
||||||
|
}
|
@ -1,6 +1,7 @@
|
|||||||
package dashscope
|
package dashscope
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -9,16 +10,6 @@ import (
|
|||||||
|
|
||||||
const defaultMaxTokens = 1500
|
const defaultMaxTokens = 1500
|
||||||
|
|
||||||
type ChatProps struct {
|
|
||||||
Model string
|
|
||||||
Token *int
|
|
||||||
Temperature *float32
|
|
||||||
TopP *float32
|
|
||||||
TopK *int
|
|
||||||
RepetitionPenalty *float32
|
|
||||||
Message []globals.Message
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) GetHeader() map[string]string {
|
func (c *ChatInstance) GetHeader() map[string]string {
|
||||||
return map[string]string{
|
return map[string]string{
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
@ -43,16 +34,16 @@ func (c *ChatInstance) FormatMessages(message []globals.Message) []Message {
|
|||||||
return messages
|
return messages
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetMaxTokens(props *ChatProps) int {
|
func (c *ChatInstance) GetMaxTokens(props *adaptercommon.ChatProps) int {
|
||||||
// dashscope has a restriction of 1500 tokens in completion
|
// dashscope has a restriction of 1500 tokens in completion
|
||||||
if props.Token == nil || *props.Token <= 0 || *props.Token > 1500 {
|
if props.MaxTokens == nil || *props.MaxTokens <= 0 || *props.MaxTokens > 1500 {
|
||||||
return defaultMaxTokens
|
return defaultMaxTokens
|
||||||
}
|
}
|
||||||
|
|
||||||
return *props.Token
|
return *props.MaxTokens
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetTopP(props *ChatProps) *float32 {
|
func (c *ChatInstance) GetTopP(props *adaptercommon.ChatProps) *float32 {
|
||||||
// range of top_p should be (0.0, 1.0)
|
// range of top_p should be (0.0, 1.0)
|
||||||
if props.TopP == nil {
|
if props.TopP == nil {
|
||||||
return nil
|
return nil
|
||||||
@ -67,7 +58,7 @@ func (c *ChatInstance) GetTopP(props *ChatProps) *float32 {
|
|||||||
return props.TopP
|
return props.TopP
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetRepeatPenalty(props *ChatProps) *float32 {
|
func (c *ChatInstance) GetRepeatPenalty(props *adaptercommon.ChatProps) *float32 {
|
||||||
// range of repetition_penalty should greater than 0.0
|
// range of repetition_penalty should greater than 0.0
|
||||||
if props.RepetitionPenalty == nil {
|
if props.RepetitionPenalty == nil {
|
||||||
return nil
|
return nil
|
||||||
@ -80,7 +71,7 @@ func (c *ChatInstance) GetRepeatPenalty(props *ChatProps) *float32 {
|
|||||||
return props.RepetitionPenalty
|
return props.RepetitionPenalty
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatBody(props *ChatProps) ChatRequest {
|
func (c *ChatInstance) GetChatBody(props *adaptercommon.ChatProps) ChatRequest {
|
||||||
return ChatRequest{
|
return ChatRequest{
|
||||||
Model: strings.TrimSuffix(props.Model, "-net"),
|
Model: strings.TrimSuffix(props.Model, "-net"),
|
||||||
Input: ChatInput{
|
Input: ChatInput{
|
||||||
@ -102,7 +93,7 @@ func (c *ChatInstance) GetChatEndpoint() string {
|
|||||||
return fmt.Sprintf("%s/api/v1/services/aigc/text-generation/generation", c.Endpoint)
|
return fmt.Sprintf("%s/api/v1/services/aigc/text-generation/generation", c.Endpoint)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
return utils.EventSource(
|
return utils.EventSource(
|
||||||
"POST",
|
"POST",
|
||||||
c.GetChatEndpoint(),
|
c.GetChatEndpoint(),
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package dashscope
|
package dashscope
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -24,7 +25,7 @@ func NewChatInstance(endpoint string, apiKey string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
conf.GetEndpoint(),
|
conf.GetEndpoint(),
|
||||||
conf.GetRandomSecret(),
|
conf.GetRandomSecret(),
|
||||||
|
@ -1,18 +1,12 @@
|
|||||||
package hunyuan
|
package hunyuan
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChatProps struct {
|
|
||||||
Model string
|
|
||||||
Message []globals.Message
|
|
||||||
Temperature *float32
|
|
||||||
TopP *float32
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) FormatMessages(messages []globals.Message) []globals.Message {
|
func (c *ChatInstance) FormatMessages(messages []globals.Message) []globals.Message {
|
||||||
var result []globals.Message
|
var result []globals.Message
|
||||||
for _, message := range messages {
|
for _, message := range messages {
|
||||||
@ -36,7 +30,7 @@ func (c *ChatInstance) FormatMessages(messages []globals.Message) []globals.Mess
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
credential := NewCredential(c.GetSecretId(), c.GetSecretKey())
|
credential := NewCredential(c.GetSecretId(), c.GetSecretKey())
|
||||||
client := NewInstance(c.GetAppId(), c.GetEndpoint(), credential)
|
client := NewInstance(c.GetAppId(), c.GetEndpoint(), credential)
|
||||||
channel, err := client.Chat(context.Background(), NewRequest(Stream, c.FormatMessages(props.Message), props.Temperature, props.TopP))
|
channel, err := client.Chat(context.Background(), NewRequest(Stream, c.FormatMessages(props.Message), props.Temperature, props.TopP))
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package hunyuan
|
package hunyuan
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
)
|
)
|
||||||
@ -37,7 +38,7 @@ func NewChatInstance(endpoint, appId, secretId, secretKey string) *ChatInstance
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
params := conf.SplitRandomSecret(3)
|
params := conf.SplitRandomSecret(3)
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
conf.GetEndpoint(),
|
conf.GetEndpoint(),
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package midjourney
|
package midjourney
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -66,16 +67,16 @@ func (c *ChatInstance) GetCleanPrompt(model string, prompt string) string {
|
|||||||
return target
|
return target
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetPrompt(props *ChatProps) string {
|
func (c *ChatInstance) GetPrompt(props *adaptercommon.ChatProps) string {
|
||||||
if len(props.Messages) == 0 {
|
if len(props.Message) == 0 {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
content := props.Messages[len(props.Messages)-1].Content
|
content := props.Message[len(props.Message)-1].Content
|
||||||
return c.GetCleanPrompt(props.Model, content)
|
return c.GetCleanPrompt(props.Model, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
// partial response like:
|
// partial response like:
|
||||||
// ```progress
|
// ```progress
|
||||||
// 0
|
// 0
|
||||||
@ -95,6 +96,11 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
|
|||||||
var begin bool
|
var begin bool
|
||||||
|
|
||||||
form, err := c.CreateStreamTask(action, prompt, func(form *StorageForm, progress int) error {
|
form, err := c.CreateStreamTask(action, prompt, func(form *StorageForm, progress int) error {
|
||||||
|
if progress == -1 {
|
||||||
|
// ping event
|
||||||
|
return callback(&globals.Chunk{Content: ""})
|
||||||
|
}
|
||||||
|
|
||||||
if progress == 0 {
|
if progress == 0 {
|
||||||
begin = true
|
begin = true
|
||||||
if err := callback(&globals.Chunk{Content: "```progress\n"}); err != nil {
|
if err := callback(&globals.Chunk{Content: "```progress\n"}); err != nil {
|
||||||
|
@ -96,6 +96,11 @@ func (c *ChatInstance) CreateStreamTask(action string, prompt string, hook func(
|
|||||||
utils.Sleep(50)
|
utils.Sleep(50)
|
||||||
form := getStorage(task)
|
form := getStorage(task)
|
||||||
if form == nil {
|
if form == nil {
|
||||||
|
// hook for ping
|
||||||
|
if err := hook(nil, -1); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package midjourney
|
package midjourney
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
@ -47,7 +48,7 @@ func NewChatInstance(endpoint, apiSecret, whiteList string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
params := conf.SplitRandomSecret(2)
|
params := conf.SplitRandomSecret(2)
|
||||||
|
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package chatgpt
|
package openai
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"errors"
|
"errors"
|
||||||
@ -8,20 +9,7 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChatProps struct {
|
func (c *ChatInstance) GetChatEndpoint(props *adaptercommon.ChatProps) string {
|
||||||
Model string
|
|
||||||
Message []globals.Message
|
|
||||||
Token *int
|
|
||||||
PresencePenalty *float32
|
|
||||||
FrequencyPenalty *float32
|
|
||||||
Temperature *float32
|
|
||||||
TopP *float32
|
|
||||||
Tools *globals.FunctionTools
|
|
||||||
ToolChoice *interface{}
|
|
||||||
Buffer utils.Buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatEndpoint(props *ChatProps) string {
|
|
||||||
if props.Model == globals.GPT3TurboInstruct {
|
if props.Model == globals.GPT3TurboInstruct {
|
||||||
return fmt.Sprintf("%s/v1/completions", c.GetEndpoint())
|
return fmt.Sprintf("%s/v1/completions", c.GetEndpoint())
|
||||||
}
|
}
|
||||||
@ -36,7 +24,7 @@ func (c *ChatInstance) GetCompletionPrompt(messages []globals.Message) string {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetLatestPrompt(props *ChatProps) string {
|
func (c *ChatInstance) GetLatestPrompt(props *adaptercommon.ChatProps) string {
|
||||||
if len(props.Message) == 0 {
|
if len(props.Message) == 0 {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
@ -44,13 +32,13 @@ func (c *ChatInstance) GetLatestPrompt(props *ChatProps) string {
|
|||||||
return props.Message[len(props.Message)-1].Content
|
return props.Message[len(props.Message)-1].Content
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
|
func (c *ChatInstance) GetChatBody(props *adaptercommon.ChatProps, stream bool) interface{} {
|
||||||
if props.Model == globals.GPT3TurboInstruct {
|
if props.Model == globals.GPT3TurboInstruct {
|
||||||
// for completions
|
// for completions
|
||||||
return CompletionRequest{
|
return CompletionRequest{
|
||||||
Model: props.Model,
|
Model: props.Model,
|
||||||
Prompt: c.GetCompletionPrompt(props.Message),
|
Prompt: c.GetCompletionPrompt(props.Message),
|
||||||
MaxToken: props.Token,
|
MaxToken: props.MaxTokens,
|
||||||
Stream: stream,
|
Stream: stream,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -60,7 +48,7 @@ func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
|
|||||||
return ChatRequest{
|
return ChatRequest{
|
||||||
Model: props.Model,
|
Model: props.Model,
|
||||||
Messages: messages,
|
Messages: messages,
|
||||||
MaxToken: props.Token,
|
MaxToken: props.MaxTokens,
|
||||||
Stream: stream,
|
Stream: stream,
|
||||||
PresencePenalty: props.PresencePenalty,
|
PresencePenalty: props.PresencePenalty,
|
||||||
FrequencyPenalty: props.FrequencyPenalty,
|
FrequencyPenalty: props.FrequencyPenalty,
|
||||||
@ -71,8 +59,8 @@ func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateChatRequest is the native http request body for chatgpt
|
// CreateChatRequest is the native http request body for openai
|
||||||
func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
func (c *ChatInstance) CreateChatRequest(props *adaptercommon.ChatProps) (string, error) {
|
||||||
if globals.IsOpenAIDalleModel(props.Model) {
|
if globals.IsOpenAIDalleModel(props.Model) {
|
||||||
return c.CreateImage(props)
|
return c.CreateImage(props)
|
||||||
}
|
}
|
||||||
@ -84,14 +72,14 @@ func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
|||||||
)
|
)
|
||||||
|
|
||||||
if err != nil || res == nil {
|
if err != nil || res == nil {
|
||||||
return "", fmt.Errorf("chatgpt error: %s", err.Error())
|
return "", fmt.Errorf("openai error: %s", err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
data := utils.MapToStruct[ChatResponse](res)
|
data := utils.MapToStruct[ChatResponse](res)
|
||||||
if data == nil {
|
if data == nil {
|
||||||
return "", fmt.Errorf("chatgpt error: cannot parse response")
|
return "", fmt.Errorf("openai error: cannot parse response")
|
||||||
} else if data.Error.Message != "" {
|
} else if data.Error.Message != "" {
|
||||||
return "", fmt.Errorf("chatgpt error: %s", data.Error.Message)
|
return "", fmt.Errorf("openai error: %s", data.Error.Message)
|
||||||
}
|
}
|
||||||
return data.Choices[0].Message.Content, nil
|
return data.Choices[0].Message.Content, nil
|
||||||
}
|
}
|
||||||
@ -103,8 +91,8 @@ func hideRequestId(message string) string {
|
|||||||
return exp.ReplaceAllString(message, "")
|
return exp.ReplaceAllString(message, "")
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateStreamChatRequest is the stream response body for chatgpt
|
// CreateStreamChatRequest is the stream response body for openai
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
if globals.IsOpenAIDalleModel(props.Model) {
|
if globals.IsOpenAIDalleModel(props.Model) {
|
||||||
if url, err := c.CreateImage(props); err != nil {
|
if url, err := c.CreateImage(props); err != nil {
|
||||||
return err
|
return err
|
@ -1,6 +1,7 @@
|
|||||||
package chatgpt
|
package openai
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -37,7 +38,7 @@ func (c *ChatInstance) CreateImageRequest(props ImageProps) (string, error) {
|
|||||||
|
|
||||||
data := utils.MapToStruct[ImageResponse](res)
|
data := utils.MapToStruct[ImageResponse](res)
|
||||||
if data == nil {
|
if data == nil {
|
||||||
return "", fmt.Errorf("chatgpt error: cannot parse response")
|
return "", fmt.Errorf("openai error: cannot parse response")
|
||||||
} else if data.Error.Message != "" {
|
} else if data.Error.Message != "" {
|
||||||
return "", fmt.Errorf(data.Error.Message)
|
return "", fmt.Errorf(data.Error.Message)
|
||||||
}
|
}
|
||||||
@ -46,7 +47,7 @@ func (c *ChatInstance) CreateImageRequest(props ImageProps) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateImage will create a dalle image from prompt, return markdown of image
|
// CreateImage will create a dalle image from prompt, return markdown of image
|
||||||
func (c *ChatInstance) CreateImage(props *ChatProps) (string, error) {
|
func (c *ChatInstance) CreateImage(props *adaptercommon.ChatProps) (string, error) {
|
||||||
url, err := c.CreateImageRequest(ImageProps{
|
url, err := c.CreateImageRequest(ImageProps{
|
||||||
Model: props.Model,
|
Model: props.Model,
|
||||||
Prompt: c.GetLatestPrompt(props),
|
Prompt: c.GetLatestPrompt(props),
|
@ -1,6 +1,7 @@
|
|||||||
package chatgpt
|
package openai
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"errors"
|
"errors"
|
||||||
@ -8,7 +9,7 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
)
|
)
|
||||||
|
|
||||||
func formatMessages(props *ChatProps) interface{} {
|
func formatMessages(props *adaptercommon.ChatProps) interface{} {
|
||||||
if globals.IsVisionModel(props.Model) {
|
if globals.IsVisionModel(props.Model) {
|
||||||
return utils.Each[globals.Message, Message](props.Message, func(message globals.Message) Message {
|
return utils.Each[globals.Message, Message](props.Message, func(message globals.Message) Message {
|
||||||
if message.Role == globals.User {
|
if message.Role == globals.User {
|
||||||
@ -118,7 +119,7 @@ func (c *ChatInstance) ProcessLine(data string, isCompletionType bool) (*globals
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
globals.Warn(fmt.Sprintf("chatgpt error: cannot parse completion response: %s", data))
|
globals.Warn(fmt.Sprintf("openai error: cannot parse completion response: %s", data))
|
||||||
return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse completion response")
|
return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse completion response")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -127,9 +128,9 @@ func (c *ChatInstance) ProcessLine(data string, isCompletionType bool) (*globals
|
|||||||
}
|
}
|
||||||
|
|
||||||
if form := processChatErrorResponse(data); form != nil {
|
if form := processChatErrorResponse(data); form != nil {
|
||||||
return &globals.Chunk{Content: ""}, errors.New(fmt.Sprintf("chatgpt error: %s (type: %s)", form.Error.Message, form.Error.Type))
|
return &globals.Chunk{Content: ""}, errors.New(fmt.Sprintf("openai error: %s (type: %s)", form.Error.Message, form.Error.Type))
|
||||||
}
|
}
|
||||||
|
|
||||||
globals.Warn(fmt.Sprintf("chatgpt error: cannot parse chat completion response: %s", data))
|
globals.Warn(fmt.Sprintf("openai error: cannot parse chat completion response: %s", data))
|
||||||
return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse chat completion response")
|
return &globals.Chunk{Content: ""}, errors.New("parser error: cannot parse chat completion response")
|
||||||
}
|
}
|
@ -1,6 +1,7 @@
|
|||||||
package chatgpt
|
package openai
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
@ -37,7 +38,7 @@ func NewChatInstance(endpoint, apiKey string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
conf.GetEndpoint(),
|
conf.GetEndpoint(),
|
||||||
conf.GetRandomSecret(),
|
conf.GetRandomSecret(),
|
@ -1,4 +1,4 @@
|
|||||||
package chatgpt
|
package openai
|
||||||
|
|
||||||
import "chat/globals"
|
import "chat/globals"
|
||||||
|
|
||||||
@ -24,7 +24,7 @@ type Message struct {
|
|||||||
ToolCalls *globals.ToolCalls `json:"tool_calls,omitempty"` // only `assistant` role
|
ToolCalls *globals.ToolCalls `json:"tool_calls,omitempty"` // only `assistant` role
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChatRequest is the request body for chatgpt
|
// ChatRequest is the request body for openai
|
||||||
type ChatRequest struct {
|
type ChatRequest struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Messages interface{} `json:"messages"`
|
Messages interface{} `json:"messages"`
|
||||||
@ -38,7 +38,7 @@ type ChatRequest struct {
|
|||||||
ToolChoice *interface{} `json:"tool_choice,omitempty"` // string or object
|
ToolChoice *interface{} `json:"tool_choice,omitempty"` // string or object
|
||||||
}
|
}
|
||||||
|
|
||||||
// CompletionRequest is the request body for chatgpt completion
|
// CompletionRequest is the request body for openai completion
|
||||||
type CompletionRequest struct {
|
type CompletionRequest struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Prompt string `json:"prompt"`
|
Prompt string `json:"prompt"`
|
||||||
@ -46,7 +46,7 @@ type CompletionRequest struct {
|
|||||||
Stream bool `json:"stream"`
|
Stream bool `json:"stream"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChatResponse is the native http request body for chatgpt
|
// ChatResponse is the native http request body for openai
|
||||||
type ChatResponse struct {
|
type ChatResponse struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Object string `json:"object"`
|
Object string `json:"object"`
|
||||||
@ -62,7 +62,7 @@ type ChatResponse struct {
|
|||||||
} `json:"error"`
|
} `json:"error"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ChatStreamResponse is the stream response body for chatgpt
|
// ChatStreamResponse is the stream response body for openai
|
||||||
type ChatStreamResponse struct {
|
type ChatStreamResponse struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Object string `json:"object"`
|
Object string `json:"object"`
|
||||||
@ -75,7 +75,7 @@ type ChatStreamResponse struct {
|
|||||||
} `json:"choices"`
|
} `json:"choices"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// CompletionResponse is the native http request body / stream response body for chatgpt completion
|
// CompletionResponse is the native http request body / stream response body for openai completion
|
||||||
type CompletionResponse struct {
|
type CompletionResponse struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Object string `json:"object"`
|
Object string `json:"object"`
|
||||||
@ -96,7 +96,7 @@ type ChatStreamErrorResponse struct {
|
|||||||
|
|
||||||
type ImageSize string
|
type ImageSize string
|
||||||
|
|
||||||
// ImageRequest is the request body for chatgpt dalle image generation
|
// ImageRequest is the request body for openai dalle image generation
|
||||||
type ImageRequest struct {
|
type ImageRequest struct {
|
||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Prompt string `json:"prompt"`
|
Prompt string `json:"prompt"`
|
@ -1,6 +1,7 @@
|
|||||||
package palm2
|
package palm2
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -8,15 +9,6 @@ import (
|
|||||||
|
|
||||||
var geminiMaxImages = 16
|
var geminiMaxImages = 16
|
||||||
|
|
||||||
type ChatProps struct {
|
|
||||||
Model string
|
|
||||||
Message []globals.Message
|
|
||||||
Temperature *float64
|
|
||||||
TopP *float64
|
|
||||||
TopK *int
|
|
||||||
MaxOutputTokens *int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatEndpoint(model string) string {
|
func (c *ChatInstance) GetChatEndpoint(model string) string {
|
||||||
if model == globals.ChatBison001 {
|
if model == globals.ChatBison001 {
|
||||||
return fmt.Sprintf("%s/v1beta2/models/%s:generateMessage?key=%s", c.Endpoint, model, c.ApiKey)
|
return fmt.Sprintf("%s/v1beta2/models/%s:generateMessage?key=%s", c.Endpoint, model, c.ApiKey)
|
||||||
@ -51,7 +43,7 @@ func (c *ChatInstance) ConvertMessage(message []globals.Message) []PalmMessage {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetPalm2ChatBody(props *ChatProps) *PalmChatBody {
|
func (c *ChatInstance) GetPalm2ChatBody(props *adaptercommon.ChatProps) *PalmChatBody {
|
||||||
return &PalmChatBody{
|
return &PalmChatBody{
|
||||||
Prompt: PalmPrompt{
|
Prompt: PalmPrompt{
|
||||||
Messages: c.ConvertMessage(props.Message),
|
Messages: c.ConvertMessage(props.Message),
|
||||||
@ -59,12 +51,12 @@ func (c *ChatInstance) GetPalm2ChatBody(props *ChatProps) *PalmChatBody {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetGeminiChatBody(props *ChatProps) *GeminiChatBody {
|
func (c *ChatInstance) GetGeminiChatBody(props *adaptercommon.ChatProps) *GeminiChatBody {
|
||||||
return &GeminiChatBody{
|
return &GeminiChatBody{
|
||||||
Contents: c.GetGeminiContents(props.Model, props.Message),
|
Contents: c.GetGeminiContents(props.Model, props.Message),
|
||||||
GenerationConfig: GeminiConfig{
|
GenerationConfig: GeminiConfig{
|
||||||
Temperature: props.Temperature,
|
Temperature: props.Temperature,
|
||||||
MaxOutputTokens: props.MaxOutputTokens,
|
MaxOutputTokens: props.MaxTokens,
|
||||||
TopP: props.TopP,
|
TopP: props.TopP,
|
||||||
TopK: props.TopK,
|
TopK: props.TopK,
|
||||||
},
|
},
|
||||||
@ -95,7 +87,7 @@ func (c *ChatInstance) GetGeminiChatResponse(data interface{}) (string, error) {
|
|||||||
return "", fmt.Errorf("gemini: cannot parse response")
|
return "", fmt.Errorf("gemini: cannot parse response")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
func (c *ChatInstance) CreateChatRequest(props *adaptercommon.ChatProps) (string, error) {
|
||||||
uri := c.GetChatEndpoint(props.Model)
|
uri := c.GetChatEndpoint(props.Model)
|
||||||
|
|
||||||
if props.Model == globals.ChatBison001 {
|
if props.Model == globals.ChatBison001 {
|
||||||
@ -122,7 +114,7 @@ func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
|||||||
|
|
||||||
// CreateStreamChatRequest is the mock stream request for palm2
|
// CreateStreamChatRequest is the mock stream request for palm2
|
||||||
// tips: palm2 does not support stream request
|
// tips: palm2 does not support stream request
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
response, err := c.CreateChatRequest(props)
|
response, err := c.CreateChatRequest(props)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package palm2
|
package palm2
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -24,7 +25,7 @@ func NewChatInstance(endpoint string, apiKey string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
conf.GetEndpoint(),
|
conf.GetEndpoint(),
|
||||||
conf.GetRandomSecret(),
|
conf.GetRandomSecret(),
|
||||||
|
@ -31,9 +31,9 @@ type GeminiChatBody struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type GeminiConfig struct {
|
type GeminiConfig struct {
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature *float32 `json:"temperature,omitempty"`
|
||||||
MaxOutputTokens *int `json:"maxOutputTokens,omitempty"`
|
MaxOutputTokens *int `json:"maxOutputTokens,omitempty"`
|
||||||
TopP *float64 `json:"topP,omitempty"`
|
TopP *float32 `json:"topP,omitempty"`
|
||||||
TopK *int `json:"topK,omitempty"`
|
TopK *int `json:"topK,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package adapter
|
package adapter
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -21,24 +22,24 @@ func isQPSOverLimit(model string, err error) bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatRequest(conf globals.ChannelConfig, props *ChatProps, hook globals.Hook) error {
|
func NewChatRequest(conf globals.ChannelConfig, props *adaptercommon.ChatProps, hook globals.Hook) error {
|
||||||
err := createChatRequest(conf, props, hook)
|
err := createChatRequest(conf, props, hook)
|
||||||
|
|
||||||
retries := conf.GetRetry()
|
retries := conf.GetRetry()
|
||||||
props.Current++
|
props.Current++
|
||||||
|
|
||||||
if IsAvailableError(err) {
|
if IsAvailableError(err) {
|
||||||
if isQPSOverLimit(props.Model, err) {
|
if isQPSOverLimit(props.OriginalModel, err) {
|
||||||
// sleep for 0.5s to avoid qps limit
|
// sleep for 0.5s to avoid qps limit
|
||||||
|
|
||||||
globals.Info(fmt.Sprintf("qps limit for %s, sleep and retry (times: %d)", props.Model, props.Current))
|
globals.Info(fmt.Sprintf("qps limit for %s, sleep and retry (times: %d)", props.OriginalModel, props.Current))
|
||||||
time.Sleep(500 * time.Millisecond)
|
time.Sleep(500 * time.Millisecond)
|
||||||
return NewChatRequest(conf, props, hook)
|
return NewChatRequest(conf, props, hook)
|
||||||
}
|
}
|
||||||
|
|
||||||
if props.Current < retries {
|
if props.Current < retries {
|
||||||
content := strings.Replace(err.Error(), "\n", "", -1)
|
content := strings.Replace(err.Error(), "\n", "", -1)
|
||||||
globals.Warn(fmt.Sprintf("retrying chat request for %s (attempt %d/%d, error: %s)", props.Model, props.Current+1, retries, content))
|
globals.Warn(fmt.Sprintf("retrying chat request for %s (attempt %d/%d, error: %s)", props.OriginalModel, props.Current+1, retries, content))
|
||||||
return NewChatRequest(conf, props, hook)
|
return NewChatRequest(conf, props, hook)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package skylark
|
package skylark
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -10,21 +11,6 @@ import (
|
|||||||
|
|
||||||
const defaultMaxTokens int64 = 1500
|
const defaultMaxTokens int64 = 1500
|
||||||
|
|
||||||
type ChatProps struct {
|
|
||||||
Model string
|
|
||||||
Message []globals.Message
|
|
||||||
Token *int
|
|
||||||
|
|
||||||
PresencePenalty *float32
|
|
||||||
FrequencyPenalty *float32
|
|
||||||
RepeatPenalty *float32
|
|
||||||
Temperature *float32
|
|
||||||
TopP *float32
|
|
||||||
TopK *int
|
|
||||||
Tools *globals.FunctionTools
|
|
||||||
Buffer utils.Buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
func getMessages(messages []globals.Message) []*api.Message {
|
func getMessages(messages []globals.Message) []*api.Message {
|
||||||
return utils.Each[globals.Message, *api.Message](messages, func(message globals.Message) *api.Message {
|
return utils.Each[globals.Message, *api.Message](messages, func(message globals.Message) *api.Message {
|
||||||
if message.Role == globals.Tool {
|
if message.Role == globals.Tool {
|
||||||
@ -47,7 +33,7 @@ func (c *ChatInstance) GetMaxTokens(token *int) int64 {
|
|||||||
return int64(*token)
|
return int64(*token)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateRequest(props *ChatProps) *api.ChatReq {
|
func (c *ChatInstance) CreateRequest(props *adaptercommon.ChatProps) *api.ChatReq {
|
||||||
return &api.ChatReq{
|
return &api.ChatReq{
|
||||||
Model: &api.Model{
|
Model: &api.Model{
|
||||||
Name: props.Model,
|
Name: props.Model,
|
||||||
@ -59,8 +45,8 @@ func (c *ChatInstance) CreateRequest(props *ChatProps) *api.ChatReq {
|
|||||||
Temperature: utils.GetPtrVal(props.Temperature, 0.),
|
Temperature: utils.GetPtrVal(props.Temperature, 0.),
|
||||||
PresencePenalty: utils.GetPtrVal(props.PresencePenalty, 0.),
|
PresencePenalty: utils.GetPtrVal(props.PresencePenalty, 0.),
|
||||||
FrequencyPenalty: utils.GetPtrVal(props.FrequencyPenalty, 0.),
|
FrequencyPenalty: utils.GetPtrVal(props.FrequencyPenalty, 0.),
|
||||||
RepetitionPenalty: utils.GetPtrVal(props.RepeatPenalty, 0.),
|
RepetitionPenalty: utils.GetPtrVal(props.RepetitionPenalty, 0.),
|
||||||
MaxTokens: c.GetMaxTokens(props.Token),
|
MaxTokens: c.GetMaxTokens(props.MaxTokens),
|
||||||
},
|
},
|
||||||
Functions: getFunctions(props.Tools),
|
Functions: getFunctions(props.Tools),
|
||||||
}
|
}
|
||||||
@ -96,7 +82,7 @@ func getChoice(choice *api.ChatResp) *globals.Chunk {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
req := c.CreateRequest(props)
|
req := c.CreateRequest(props)
|
||||||
channel, err := c.Instance.StreamChat(req)
|
channel, err := c.Instance.StreamChat(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package skylark
|
package skylark
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"github.com/volcengine/volc-sdk-golang/service/maas"
|
"github.com/volcengine/volc-sdk-golang/service/maas"
|
||||||
"strings"
|
"strings"
|
||||||
@ -43,7 +44,7 @@ func NewChatInstance(endpoint, accessKey, secretKey string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
params := conf.SplitRandomSecret(2)
|
params := conf.SplitRandomSecret(2)
|
||||||
|
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
|
@ -1,15 +1,12 @@
|
|||||||
package slack
|
package slack
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"context"
|
"context"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChatProps struct {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, hook globals.Hook) error {
|
||||||
Message []globals.Message
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error {
|
|
||||||
if err := c.Instance.NewChannel(c.GetChannel()); err != nil {
|
if err := c.Instance.NewChannel(c.GetChannel()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package slack
|
package slack
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/bincooo/claude-api"
|
"github.com/bincooo/claude-api"
|
||||||
@ -46,7 +47,7 @@ func NewChatInstance(botId, token, channel string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
params := conf.SplitRandomSecret(2)
|
params := conf.SplitRandomSecret(2)
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
params[0], params[1],
|
params[0], params[1],
|
||||||
|
@ -1,42 +1,33 @@
|
|||||||
package sparkdesk
|
package sparkdesk
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChatProps struct {
|
func GetToken(props *adaptercommon.ChatProps) *int {
|
||||||
Model string
|
if props.MaxTokens == nil {
|
||||||
Message []globals.Message
|
|
||||||
Token *int
|
|
||||||
Temperature *float32
|
|
||||||
TopK *int
|
|
||||||
Tools *globals.FunctionTools
|
|
||||||
Buffer utils.Buffer
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetToken(props *ChatProps) *int {
|
|
||||||
if props.Token == nil {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
switch props.Model {
|
switch props.Model {
|
||||||
case globals.SparkDeskV2, globals.SparkDeskV3:
|
case globals.SparkDeskV2, globals.SparkDeskV3:
|
||||||
if *props.Token > 8192 {
|
if *props.MaxTokens > 8192 {
|
||||||
return utils.ToPtr(8192)
|
return utils.ToPtr(8192)
|
||||||
}
|
}
|
||||||
case globals.SparkDesk:
|
case globals.SparkDesk:
|
||||||
if *props.Token > 4096 {
|
if *props.MaxTokens > 4096 {
|
||||||
return utils.ToPtr(4096)
|
return utils.ToPtr(4096)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return props.Token
|
return props.MaxTokens
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetMessages(props *ChatProps) []Message {
|
func (c *ChatInstance) GetMessages(props *adaptercommon.ChatProps) []Message {
|
||||||
var messages []Message
|
var messages []Message
|
||||||
for _, message := range props.Message {
|
for _, message := range props.Message {
|
||||||
if message.Role == globals.Tool {
|
if message.Role == globals.Tool {
|
||||||
@ -54,7 +45,7 @@ func (c *ChatInstance) GetMessages(props *ChatProps) []Message {
|
|||||||
return messages
|
return messages
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetFunctionCalling(props *ChatProps) *FunctionsPayload {
|
func (c *ChatInstance) GetFunctionCalling(props *adaptercommon.ChatProps) *FunctionsPayload {
|
||||||
if props.Model != globals.SparkDeskV3 || props.Tools == nil {
|
if props.Model != globals.SparkDeskV3 || props.Tools == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -102,9 +93,11 @@ func getChoice(form *ChatResponse) *globals.Chunk {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, hook globals.Hook) error {
|
||||||
|
endpoint := fmt.Sprintf("%s/%s/chat", c.Endpoint, TransformAddr(props.Model))
|
||||||
|
|
||||||
var conn *utils.WebSocket
|
var conn *utils.WebSocket
|
||||||
if conn = utils.NewWebsocketClient(c.GenerateUrl()); conn == nil {
|
if conn = utils.NewWebsocketClient(c.GenerateUrl(endpoint)); conn == nil {
|
||||||
return fmt.Errorf("sparkdesk error: websocket connection failed")
|
return fmt.Errorf("sparkdesk error: websocket connection failed")
|
||||||
}
|
}
|
||||||
defer conn.DeferClose()
|
defer conn.DeferClose()
|
||||||
@ -121,7 +114,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Ho
|
|||||||
},
|
},
|
||||||
Parameter: RequestParameter{
|
Parameter: RequestParameter{
|
||||||
Chat: ChatParameter{
|
Chat: ChatParameter{
|
||||||
Domain: c.Model,
|
Domain: TransformModel(props.Model),
|
||||||
MaxToken: GetToken(props),
|
MaxToken: GetToken(props),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package sparkdesk
|
package sparkdesk
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"crypto/hmac"
|
"crypto/hmac"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
@ -15,7 +16,6 @@ type ChatInstance struct {
|
|||||||
AppId string
|
AppId string
|
||||||
ApiSecret string
|
ApiSecret string
|
||||||
ApiKey string
|
ApiKey string
|
||||||
Model string
|
|
||||||
Endpoint string
|
Endpoint string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -45,24 +45,23 @@ func TransformModel(model string) string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstance(conf globals.ChannelConfig, model string) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
params := conf.SplitRandomSecret(3)
|
params := conf.SplitRandomSecret(3)
|
||||||
|
|
||||||
return &ChatInstance{
|
return &ChatInstance{
|
||||||
AppId: params[0],
|
AppId: params[0],
|
||||||
ApiSecret: params[1],
|
ApiSecret: params[1],
|
||||||
ApiKey: params[2],
|
ApiKey: params[2],
|
||||||
Model: TransformModel(model),
|
Endpoint: conf.GetEndpoint(),
|
||||||
Endpoint: fmt.Sprintf("%s/%s/chat", conf.GetEndpoint(), TransformAddr(model)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateUrl(host, date, auth string) string {
|
func (c *ChatInstance) CreateUrl(endpoint, host, date, auth string) string {
|
||||||
v := make(url.Values)
|
v := make(url.Values)
|
||||||
v.Add("host", host)
|
v.Add("host", host)
|
||||||
v.Add("date", date)
|
v.Add("date", date)
|
||||||
v.Add("authorization", auth)
|
v.Add("authorization", auth)
|
||||||
return fmt.Sprintf("%s?%s", c.Endpoint, v.Encode())
|
return fmt.Sprintf("%s?%s", endpoint, v.Encode())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) Sign(data, key string) string {
|
func (c *ChatInstance) Sign(data, key string) string {
|
||||||
@ -72,8 +71,8 @@ func (c *ChatInstance) Sign(data, key string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GenerateUrl will generate the signed url for sparkdesk api
|
// GenerateUrl will generate the signed url for sparkdesk api
|
||||||
func (c *ChatInstance) GenerateUrl() string {
|
func (c *ChatInstance) GenerateUrl(endpoint string) string {
|
||||||
uri, err := url.Parse(c.Endpoint)
|
uri, err := url.Parse(endpoint)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
@ -96,5 +95,5 @@ func (c *ChatInstance) GenerateUrl() string {
|
|||||||
),
|
),
|
||||||
))
|
))
|
||||||
|
|
||||||
return c.CreateUrl(uri.Host, date, authorization)
|
return c.CreateUrl(endpoint, uri.Host, date, authorization)
|
||||||
}
|
}
|
||||||
|
@ -1,22 +1,13 @@
|
|||||||
package zhinao
|
package zhinao
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChatProps struct {
|
|
||||||
Model string
|
|
||||||
Message []globals.Message
|
|
||||||
Token *int
|
|
||||||
TopP *float32
|
|
||||||
TopK *int
|
|
||||||
Temperature *float32
|
|
||||||
RepetitionPenalty *float32
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatEndpoint() string {
|
func (c *ChatInstance) GetChatEndpoint() string {
|
||||||
return fmt.Sprintf("%s/v1/chat/completions", c.GetEndpoint())
|
return fmt.Sprintf("%s/v1/chat/completions", c.GetEndpoint())
|
||||||
}
|
}
|
||||||
@ -30,10 +21,10 @@ func (c *ChatInstance) GetModel(model string) string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
|
func (c *ChatInstance) GetChatBody(props *adaptercommon.ChatProps, stream bool) interface{} {
|
||||||
// 2048 is the max token for 360GPT
|
// 2048 is the max token for 360GPT
|
||||||
if props.Token != nil && *props.Token > 2048 {
|
if props.MaxTokens != nil && *props.MaxTokens > 2048 {
|
||||||
props.Token = utils.ToPtr(2048)
|
props.MaxTokens = utils.ToPtr(2048)
|
||||||
}
|
}
|
||||||
|
|
||||||
return ChatRequest{
|
return ChatRequest{
|
||||||
@ -45,7 +36,7 @@ func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
|
|||||||
|
|
||||||
return &message
|
return &message
|
||||||
}),
|
}),
|
||||||
MaxToken: props.Token,
|
MaxToken: props.MaxTokens,
|
||||||
Stream: stream,
|
Stream: stream,
|
||||||
Temperature: props.Temperature,
|
Temperature: props.Temperature,
|
||||||
TopP: props.TopP,
|
TopP: props.TopP,
|
||||||
@ -55,7 +46,7 @@ func (c *ChatInstance) GetChatBody(props *ChatProps, stream bool) interface{} {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateChatRequest is the native http request body for zhinao
|
// CreateChatRequest is the native http request body for zhinao
|
||||||
func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
func (c *ChatInstance) CreateChatRequest(props *adaptercommon.ChatProps) (string, error) {
|
||||||
res, err := utils.Post(
|
res, err := utils.Post(
|
||||||
c.GetChatEndpoint(),
|
c.GetChatEndpoint(),
|
||||||
c.GetHeader(),
|
c.GetHeader(),
|
||||||
@ -76,7 +67,7 @@ func (c *ChatInstance) CreateChatRequest(props *ChatProps) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateStreamChatRequest is the stream response body for zhinao
|
// CreateStreamChatRequest is the stream response body for zhinao
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
buf := ""
|
buf := ""
|
||||||
cursor := 0
|
cursor := 0
|
||||||
chunk := ""
|
chunk := ""
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package zhinao
|
package zhinao
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
@ -32,7 +33,7 @@ func NewChatInstance(endpoint, apiKey string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
return NewChatInstance(
|
return NewChatInstance(
|
||||||
conf.GetEndpoint(),
|
conf.GetEndpoint(),
|
||||||
conf.GetRandomSecret(),
|
conf.GetRandomSecret(),
|
||||||
|
@ -1,19 +1,13 @@
|
|||||||
package zhipuai
|
package zhipuai
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
adaptercommon "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChatProps struct {
|
|
||||||
Model string
|
|
||||||
Message []globals.Message
|
|
||||||
Temperature *float32 `json:"temperature,omitempty"`
|
|
||||||
TopP *float32 `json:"top_p,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ChatInstance) GetChatEndpoint(model string) string {
|
func (c *ChatInstance) GetChatEndpoint(model string) string {
|
||||||
return fmt.Sprintf("%s/api/paas/v3/model-api/%s/sse-invoke", c.GetEndpoint(), c.GetModel(model))
|
return fmt.Sprintf("%s/api/paas/v3/model-api/%s/sse-invoke", c.GetEndpoint(), c.GetModel(model))
|
||||||
}
|
}
|
||||||
@ -47,7 +41,7 @@ func (c *ChatInstance) FormatMessages(messages []globals.Message) []globals.Mess
|
|||||||
return messages
|
return messages
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetBody(props *ChatProps) ChatRequest {
|
func (c *ChatInstance) GetBody(props *adaptercommon.ChatProps) ChatRequest {
|
||||||
return ChatRequest{
|
return ChatRequest{
|
||||||
Prompt: c.FormatMessages(props.Message),
|
Prompt: c.FormatMessages(props.Message),
|
||||||
TopP: props.TopP,
|
TopP: props.TopP,
|
||||||
@ -55,7 +49,7 @@ func (c *ChatInstance) GetBody(props *ChatProps) ChatRequest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, hook globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, hook globals.Hook) error {
|
||||||
return utils.EventSource(
|
return utils.EventSource(
|
||||||
"POST",
|
"POST",
|
||||||
c.GetChatEndpoint(props.Model),
|
c.GetChatEndpoint(props.Model),
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package zhipuai
|
package zhipuai
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
factory "chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"github.com/dgrijalva/jwt-go"
|
"github.com/dgrijalva/jwt-go"
|
||||||
@ -47,6 +48,6 @@ func NewChatInstance(endpoint, apikey string) *ChatInstance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatInstanceFromConfig(conf globals.ChannelConfig) *ChatInstance {
|
func NewChatInstanceFromConfig(conf globals.ChannelConfig) factory.Factory {
|
||||||
return NewChatInstance(conf.GetEndpoint(), conf.GetRandomSecret())
|
return NewChatInstance(conf.GetEndpoint(), conf.GetRandomSecret())
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package generation
|
package generation
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"chat/adapter"
|
"chat/adapter/common"
|
||||||
"chat/admin"
|
"chat/admin"
|
||||||
"chat/channel"
|
"chat/channel"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
@ -17,10 +17,10 @@ func CreateGeneration(group, model, prompt, path string, hook func(buffer *utils
|
|||||||
message := GenerateMessage(prompt)
|
message := GenerateMessage(prompt)
|
||||||
buffer := utils.NewBuffer(model, message, channel.ChargeInstance.GetCharge(model))
|
buffer := utils.NewBuffer(model, message, channel.ChargeInstance.GetCharge(model))
|
||||||
|
|
||||||
err := channel.NewChatRequest(group, &adapter.ChatProps{
|
err := channel.NewChatRequest(group, &adaptercommon.ChatProps{
|
||||||
Model: model,
|
OriginalModel: model,
|
||||||
Message: message,
|
Message: message,
|
||||||
Buffer: *buffer,
|
Buffer: *buffer,
|
||||||
}, func(data *globals.Chunk) error {
|
}, func(data *globals.Chunk) error {
|
||||||
buffer.WriteChunk(data)
|
buffer.WriteChunk(data)
|
||||||
hook(buffer, data.Content)
|
hook(buffer, data.Content)
|
||||||
|
@ -32,18 +32,18 @@ export type ChannelInfo = {
|
|||||||
export const ChannelTypes: Record<string, string> = {
|
export const ChannelTypes: Record<string, string> = {
|
||||||
openai: "OpenAI",
|
openai: "OpenAI",
|
||||||
azure: "Azure OpenAI",
|
azure: "Azure OpenAI",
|
||||||
claude: "Claude",
|
claude: "Anthropic Claude",
|
||||||
slack: "Slack",
|
|
||||||
sparkdesk: "讯飞星火",
|
|
||||||
chatglm: "智谱 ChatGLM",
|
|
||||||
qwen: "通义千问",
|
|
||||||
hunyuan: "腾讯混元",
|
|
||||||
zhinao: "360 智脑",
|
|
||||||
baichuan: "百川 AI",
|
|
||||||
skylark: "火山方舟",
|
|
||||||
bing: "New Bing",
|
|
||||||
palm: "Google Gemini",
|
palm: "Google Gemini",
|
||||||
midjourney: "Midjourney",
|
midjourney: "Midjourney Proxy",
|
||||||
|
sparkdesk: "讯飞星火 SparkDesk",
|
||||||
|
chatglm: "智谱 ChatGLM",
|
||||||
|
qwen: "通义千问 TongYi",
|
||||||
|
hunyuan: "腾讯混元 Hunyuan",
|
||||||
|
zhinao: "360智脑 360GLM",
|
||||||
|
baichuan: "百川大模型 BaichuanAI",
|
||||||
|
skylark: "云雀大模型 SkylarkLLM",
|
||||||
|
bing: "New Bing",
|
||||||
|
slack: "Slack Claude",
|
||||||
};
|
};
|
||||||
|
|
||||||
export const ChannelInfos: Record<string, ChannelInfo> = {
|
export const ChannelInfos: Record<string, ChannelInfo> = {
|
||||||
|
@ -169,7 +169,7 @@ const chatSlice = createSlice({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const instance = conversation.messages[conversation.messages.length - 1];
|
const instance = conversation.messages[conversation.messages.length - 1];
|
||||||
instance.content += message.message;
|
if (message.message.length > 0) instance.content += message.message;
|
||||||
if (message.keyword) instance.keyword = message.keyword;
|
if (message.keyword) instance.keyword = message.keyword;
|
||||||
if (message.quota) instance.quota = message.quota;
|
if (message.quota) instance.quota = message.quota;
|
||||||
if (message.end) instance.end = message.end;
|
if (message.end) instance.end = message.end;
|
||||||
|
@ -2,6 +2,7 @@ package channel
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"chat/adapter"
|
"chat/adapter"
|
||||||
|
"chat/adapter/common"
|
||||||
"chat/globals"
|
"chat/globals"
|
||||||
"chat/utils"
|
"chat/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -9,10 +10,10 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewChatRequest(group string, props *adapter.ChatProps, hook globals.Hook) error {
|
func NewChatRequest(group string, props *adaptercommon.ChatProps, hook globals.Hook) error {
|
||||||
ticker := ConduitInstance.GetTicker(props.Model, group)
|
ticker := ConduitInstance.GetTicker(props.OriginalModel, group)
|
||||||
if ticker == nil || ticker.IsEmpty() {
|
if ticker == nil || ticker.IsEmpty() {
|
||||||
return fmt.Errorf("cannot find channel for model %s", props.Model)
|
return fmt.Errorf("cannot find channel for model %s", props.OriginalModel)
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
@ -23,14 +24,14 @@ func NewChatRequest(group string, props *adapter.ChatProps, hook globals.Hook) e
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
globals.Warn(fmt.Sprintf("[channel] caught error %s for model %s at channel %s", err.Error(), props.Model, channel.GetName()))
|
globals.Warn(fmt.Sprintf("[channel] caught error %s for model %s at channel %s", err.Error(), props.OriginalModel, channel.GetName()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
globals.Info(fmt.Sprintf("[channel] channels are exhausted for model %s", props.Model))
|
globals.Info(fmt.Sprintf("[channel] channels are exhausted for model %s", props.OriginalModel))
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
err = fmt.Errorf("channels are exhausted for model %s", props.Model)
|
err = fmt.Errorf("channels are exhausted for model %s", props.OriginalModel)
|
||||||
}
|
}
|
||||||
|
|
||||||
return err
|
return err
|
||||||
@ -78,9 +79,14 @@ func StoreCache(cache *redis.Client, hash string, index int64, buffer *utils.Buf
|
|||||||
cache.Set(cache.Context(), key, raw, expire)
|
cache.Set(cache.Context(), key, raw, expire)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewChatRequestWithCache(cache *redis.Client, buffer *utils.Buffer, group string, props *adapter.ChatProps, hook globals.Hook) (bool, error) {
|
func NewChatRequestWithCache(cache *redis.Client, buffer *utils.Buffer, group string, props *adaptercommon.ChatProps, hook globals.Hook) (bool, error) {
|
||||||
hash := utils.Md5Encrypt(utils.Marshal(props))
|
hash := utils.Md5Encrypt(utils.Marshal(props))
|
||||||
idx, hit, err := PreflightCache(cache, props.Model, hash, buffer, hook)
|
|
||||||
|
if len(props.OriginalModel) == 0 {
|
||||||
|
props.OriginalModel = props.Model
|
||||||
|
}
|
||||||
|
|
||||||
|
idx, hit, err := PreflightCache(cache, props.OriginalModel, hash, buffer, hook)
|
||||||
if hit {
|
if hit {
|
||||||
return true, err
|
return true, err
|
||||||
}
|
}
|
||||||
|
@ -12,8 +12,6 @@ func Run() bool {
|
|||||||
Help()
|
Help()
|
||||||
case "invite":
|
case "invite":
|
||||||
CreateInvitationCommand(param)
|
CreateInvitationCommand(param)
|
||||||
case "filter":
|
|
||||||
FilterApiKeyCommand(param)
|
|
||||||
case "token":
|
case "token":
|
||||||
CreateTokenCommand(param)
|
CreateTokenCommand(param)
|
||||||
case "root":
|
case "root":
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
package cli
|
|
||||||
|
|
||||||
import (
|
|
||||||
"chat/adapter/chatgpt"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func FilterApiKeyCommand(args []string) {
|
|
||||||
data := strings.Trim(strings.TrimSpace(GetArgString(args, 0)), "\"")
|
|
||||||
endpoint := "https://api.openai.com"
|
|
||||||
keys := strings.Split(data, "|")
|
|
||||||
|
|
||||||
available := chatgpt.FilterKeysNative(endpoint, keys)
|
|
||||||
|
|
||||||
outputInfo("filter", fmt.Sprintf("filtered %d keys, %d available, %d unavailable", len(keys), len(available), len(keys)-len(available)))
|
|
||||||
fmt.Println(strings.Join(available, "|"))
|
|
||||||
}
|
|
@ -10,6 +10,7 @@ type ChannelConfig interface {
|
|||||||
SplitRandomSecret(num int) []string
|
SplitRandomSecret(num int) []string
|
||||||
GetEndpoint() string
|
GetEndpoint() string
|
||||||
ProcessError(err error) error
|
ProcessError(err error) error
|
||||||
|
GetId() int
|
||||||
}
|
}
|
||||||
|
|
||||||
type AuthLike interface {
|
type AuthLike interface {
|
||||||
|
@ -2,6 +2,7 @@ package manager
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"chat/adapter"
|
"chat/adapter"
|
||||||
|
"chat/adapter/common"
|
||||||
"chat/addition/web"
|
"chat/addition/web"
|
||||||
"chat/admin"
|
"chat/admin"
|
||||||
"chat/auth"
|
"chat/auth"
|
||||||
@ -90,7 +91,7 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
|
|||||||
hit, err := channel.NewChatRequestWithCache(
|
hit, err := channel.NewChatRequestWithCache(
|
||||||
cache, buffer,
|
cache, buffer,
|
||||||
auth.GetGroup(db, user),
|
auth.GetGroup(db, user),
|
||||||
&adapter.ChatProps{
|
&adaptercommon.ChatProps{
|
||||||
Model: model,
|
Model: model,
|
||||||
Message: segment,
|
Message: segment,
|
||||||
Buffer: *buffer,
|
Buffer: *buffer,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package manager
|
package manager
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"chat/adapter"
|
"chat/adapter/common"
|
||||||
"chat/addition/web"
|
"chat/addition/web"
|
||||||
"chat/admin"
|
"chat/admin"
|
||||||
"chat/auth"
|
"chat/auth"
|
||||||
@ -76,8 +76,8 @@ func ChatRelayAPI(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getChatProps(form RelayForm, messages []globals.Message, buffer *utils.Buffer, plan bool) *adapter.ChatProps {
|
func getChatProps(form RelayForm, messages []globals.Message, buffer *utils.Buffer, plan bool) *adaptercommon.ChatProps {
|
||||||
return &adapter.ChatProps{
|
return &adaptercommon.ChatProps{
|
||||||
Model: form.Model,
|
Model: form.Model,
|
||||||
Message: messages,
|
Message: messages,
|
||||||
MaxTokens: form.MaxTokens,
|
MaxTokens: form.MaxTokens,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package manager
|
package manager
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"chat/adapter"
|
"chat/adapter/common"
|
||||||
"chat/addition/web"
|
"chat/addition/web"
|
||||||
"chat/admin"
|
"chat/admin"
|
||||||
"chat/auth"
|
"chat/auth"
|
||||||
@ -37,7 +37,7 @@ func NativeChatHandler(c *gin.Context, user *auth.User, model string, message []
|
|||||||
hit, err := channel.NewChatRequestWithCache(
|
hit, err := channel.NewChatRequestWithCache(
|
||||||
cache, buffer,
|
cache, buffer,
|
||||||
auth.GetGroup(db, user),
|
auth.GetGroup(db, user),
|
||||||
&adapter.ChatProps{
|
&adaptercommon.ChatProps{
|
||||||
Model: model,
|
Model: model,
|
||||||
Message: segment,
|
Message: segment,
|
||||||
Buffer: *buffer,
|
Buffer: *buffer,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package manager
|
package manager
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"chat/adapter"
|
"chat/adapter/common"
|
||||||
"chat/admin"
|
"chat/admin"
|
||||||
"chat/auth"
|
"chat/auth"
|
||||||
"chat/channel"
|
"chat/channel"
|
||||||
@ -57,8 +57,8 @@ func ImagesRelayAPI(c *gin.Context) {
|
|||||||
createRelayImageObject(c, form, prompt, created, user, supportRelayPlan())
|
createRelayImageObject(c, form, prompt, created, user, supportRelayPlan())
|
||||||
}
|
}
|
||||||
|
|
||||||
func getImageProps(form RelayImageForm, messages []globals.Message, buffer *utils.Buffer) *adapter.ChatProps {
|
func getImageProps(form RelayImageForm, messages []globals.Message, buffer *utils.Buffer) *adaptercommon.ChatProps {
|
||||||
return &adapter.ChatProps{
|
return &adaptercommon.ChatProps{
|
||||||
Model: form.Model,
|
Model: form.Model,
|
||||||
Message: messages,
|
Message: messages,
|
||||||
MaxTokens: utils.ToPtr(-1),
|
MaxTokens: utils.ToPtr(-1),
|
||||||
|
Loading…
Reference in New Issue
Block a user