This commit is contained in:
Sh1n3zZ 2025-02-11 14:00:54 +08:00
commit 934f78b90c
4 changed files with 45 additions and 14 deletions

View File

@ -13,6 +13,7 @@ type ChatInstance struct {
Endpoint string
ApiKey string
isFirstReasoning bool
isReasonOver bool
}
func (c *ChatInstance) GetEndpoint() string {
@ -90,7 +91,18 @@ func (c *ChatInstance) ProcessLine(data string) (string, error) {
}
delta := form.Choices[0].Delta
if delta.ReasoningContent != nil {
if *delta.ReasoningContent == "" && delta.Content != "" {
if !c.isReasonOver {
c.isReasonOver = true
return fmt.Sprintf("\n\n%s", delta.Content), nil
}
}
}
if delta.ReasoningContent != nil && delta.Content == "" {
content := *delta.ReasoningContent
// replace double newlines with single newlines for markdown
if strings.Contains(content, "\n\n") {
@ -146,6 +158,7 @@ func (c *ChatInstance) CreateChatRequest(props *adaptercommon.ChatProps) (string
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
c.isFirstReasoning = true
c.isReasonOver = false
err := utils.EventScanner(&utils.EventScannerProps{
Method: "POST",
Uri: c.GetChatEndpoint(),

View File

@ -45,18 +45,35 @@ func (c *ChatInstance) GetChatBody(props *adaptercommon.ChatProps, stream bool)
messages := formatMessages(props)
return ChatRequest{
// o1, o3 compatibility
isNewModel := len(props.Model) >= 2 && (props.Model[:2] == "o1" || props.Model[:2] == "o3")
var temperature *float32
if isNewModel {
temp := float32(1.0)
temperature = &temp
} else {
temperature = props.Temperature
}
request := ChatRequest{
Model: props.Model,
Messages: messages,
MaxToken: props.MaxTokens,
Stream: stream,
PresencePenalty: props.PresencePenalty,
FrequencyPenalty: props.FrequencyPenalty,
Temperature: props.Temperature,
Temperature: temperature,
TopP: props.TopP,
Tools: props.Tools,
ToolChoice: props.ToolChoice,
}
if isNewModel {
request.MaxCompletionTokens = props.MaxTokens
} else {
request.MaxToken = props.MaxTokens
}
return request
}
// CreateChatRequest is the native http request body for openai

View File

@ -29,6 +29,7 @@ type ChatRequest struct {
Model string `json:"model"`
Messages interface{} `json:"messages"`
MaxToken *int `json:"max_tokens,omitempty"`
MaxCompletionTokens *int `json:"max_completion_tokens,omitempty"`
Stream bool `json:"stream"`
PresencePenalty *float32 `json:"presence_penalty,omitempty"`
FrequencyPenalty *float32 `json:"frequency_penalty,omitempty"`

View File

@ -7,7 +7,7 @@ import {
import { syncSiteInfo } from "@/admin/api/info.ts";
import { setAxiosConfig } from "@/conf/api.ts";
export const version = "3.11.1"; // version of the current build
export const version = "3.11.2"; // version of the current build
export const dev: boolean = getDev(); // is in development mode (for debugging, in localhost origin)
export const deploy: boolean = true; // is production environment (for api endpoint)
export const tokenField = getTokenField(deploy); // token field name for storing token