mirror of
https://github.com/coaidev/coai.git
synced 2025-05-23 06:50:14 +09:00
Merge branch 'main' of https://github.com/coaidev/coai
This commit is contained in:
commit
934f78b90c
@ -13,6 +13,7 @@ type ChatInstance struct {
|
|||||||
Endpoint string
|
Endpoint string
|
||||||
ApiKey string
|
ApiKey string
|
||||||
isFirstReasoning bool
|
isFirstReasoning bool
|
||||||
|
isReasonOver bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ChatInstance) GetEndpoint() string {
|
func (c *ChatInstance) GetEndpoint() string {
|
||||||
@ -90,7 +91,18 @@ func (c *ChatInstance) ProcessLine(data string) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
delta := form.Choices[0].Delta
|
delta := form.Choices[0].Delta
|
||||||
|
|
||||||
if delta.ReasoningContent != nil {
|
if delta.ReasoningContent != nil {
|
||||||
|
if *delta.ReasoningContent == "" && delta.Content != "" {
|
||||||
|
if !c.isReasonOver {
|
||||||
|
c.isReasonOver = true
|
||||||
|
|
||||||
|
return fmt.Sprintf("\n\n%s", delta.Content), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if delta.ReasoningContent != nil && delta.Content == "" {
|
||||||
content := *delta.ReasoningContent
|
content := *delta.ReasoningContent
|
||||||
// replace double newlines with single newlines for markdown
|
// replace double newlines with single newlines for markdown
|
||||||
if strings.Contains(content, "\n\n") {
|
if strings.Contains(content, "\n\n") {
|
||||||
@ -146,6 +158,7 @@ func (c *ChatInstance) CreateChatRequest(props *adaptercommon.ChatProps) (string
|
|||||||
|
|
||||||
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
func (c *ChatInstance) CreateStreamChatRequest(props *adaptercommon.ChatProps, callback globals.Hook) error {
|
||||||
c.isFirstReasoning = true
|
c.isFirstReasoning = true
|
||||||
|
c.isReasonOver = false
|
||||||
err := utils.EventScanner(&utils.EventScannerProps{
|
err := utils.EventScanner(&utils.EventScannerProps{
|
||||||
Method: "POST",
|
Method: "POST",
|
||||||
Uri: c.GetChatEndpoint(),
|
Uri: c.GetChatEndpoint(),
|
||||||
|
@ -45,18 +45,35 @@ func (c *ChatInstance) GetChatBody(props *adaptercommon.ChatProps, stream bool)
|
|||||||
|
|
||||||
messages := formatMessages(props)
|
messages := formatMessages(props)
|
||||||
|
|
||||||
return ChatRequest{
|
// o1, o3 compatibility
|
||||||
|
isNewModel := len(props.Model) >= 2 && (props.Model[:2] == "o1" || props.Model[:2] == "o3")
|
||||||
|
|
||||||
|
var temperature *float32
|
||||||
|
if isNewModel {
|
||||||
|
temp := float32(1.0)
|
||||||
|
temperature = &temp
|
||||||
|
} else {
|
||||||
|
temperature = props.Temperature
|
||||||
|
}
|
||||||
|
|
||||||
|
request := ChatRequest{
|
||||||
Model: props.Model,
|
Model: props.Model,
|
||||||
Messages: messages,
|
Messages: messages,
|
||||||
MaxToken: props.MaxTokens,
|
|
||||||
Stream: stream,
|
Stream: stream,
|
||||||
PresencePenalty: props.PresencePenalty,
|
PresencePenalty: props.PresencePenalty,
|
||||||
FrequencyPenalty: props.FrequencyPenalty,
|
FrequencyPenalty: props.FrequencyPenalty,
|
||||||
Temperature: props.Temperature,
|
Temperature: temperature,
|
||||||
TopP: props.TopP,
|
TopP: props.TopP,
|
||||||
Tools: props.Tools,
|
Tools: props.Tools,
|
||||||
ToolChoice: props.ToolChoice,
|
ToolChoice: props.ToolChoice,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if isNewModel {
|
||||||
|
request.MaxCompletionTokens = props.MaxTokens
|
||||||
|
} else {
|
||||||
|
request.MaxToken = props.MaxTokens
|
||||||
|
}
|
||||||
|
return request
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateChatRequest is the native http request body for openai
|
// CreateChatRequest is the native http request body for openai
|
||||||
|
@ -29,6 +29,7 @@ type ChatRequest struct {
|
|||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
Messages interface{} `json:"messages"`
|
Messages interface{} `json:"messages"`
|
||||||
MaxToken *int `json:"max_tokens,omitempty"`
|
MaxToken *int `json:"max_tokens,omitempty"`
|
||||||
|
MaxCompletionTokens *int `json:"max_completion_tokens,omitempty"`
|
||||||
Stream bool `json:"stream"`
|
Stream bool `json:"stream"`
|
||||||
PresencePenalty *float32 `json:"presence_penalty,omitempty"`
|
PresencePenalty *float32 `json:"presence_penalty,omitempty"`
|
||||||
FrequencyPenalty *float32 `json:"frequency_penalty,omitempty"`
|
FrequencyPenalty *float32 `json:"frequency_penalty,omitempty"`
|
||||||
|
@ -7,7 +7,7 @@ import {
|
|||||||
import { syncSiteInfo } from "@/admin/api/info.ts";
|
import { syncSiteInfo } from "@/admin/api/info.ts";
|
||||||
import { setAxiosConfig } from "@/conf/api.ts";
|
import { setAxiosConfig } from "@/conf/api.ts";
|
||||||
|
|
||||||
export const version = "3.11.1"; // version of the current build
|
export const version = "3.11.2"; // version of the current build
|
||||||
export const dev: boolean = getDev(); // is in development mode (for debugging, in localhost origin)
|
export const dev: boolean = getDev(); // is in development mode (for debugging, in localhost origin)
|
||||||
export const deploy: boolean = true; // is production environment (for api endpoint)
|
export const deploy: boolean = true; // is production environment (for api endpoint)
|
||||||
export const tokenField = getTokenField(deploy); // token field name for storing token
|
export const tokenField = getTokenField(deploy); // token field name for storing token
|
||||||
|
Loading…
Reference in New Issue
Block a user