add independent channel

This commit is contained in:
Zhang Minghan 2023-10-30 12:18:14 +08:00
parent ebc5edcdc8
commit 60e096f302
11 changed files with 50 additions and 45 deletions

View File

@ -155,11 +155,6 @@ openai:
endpoint: https://api.openai.com
apikey: sk-...|sk-...
reverse:
endpoint: .../imitate
hash: gpt-4-... # reverse model hash
apikey: ...
slack:
bot_id: ...
token: ...

View File

@ -14,7 +14,7 @@ import (
type ChatProps struct {
Model string
Reversible bool
Plan bool
Infinity bool
Message []globals.Message
Token int
@ -24,14 +24,14 @@ func NewChatRequest(props *ChatProps, hook globals.Hook) error {
if globals.IsChatGPTModel(props.Model) {
instance := chatgpt.NewChatInstanceFromModel(&chatgpt.InstanceProps{
Model: props.Model,
Reversible: props.Reversible,
Plan: props.Plan,
})
return instance.CreateStreamChatRequest(&chatgpt.ChatProps{
Model: props.Model,
Message: props.Message,
Token: utils.Multi(
props.Token == 0,
utils.Multi(globals.IsGPT4Model(props.Model) || props.Infinity, -1, 2000),
utils.Multi(globals.IsGPT4Model(props.Model) || props.Plan || props.Infinity, -1, 2000),
props.Token,
),
}, hook)

View File

@ -14,7 +14,7 @@ type ChatInstance struct {
type InstanceProps struct {
Model string
Reversible bool
Plan bool
}
func (c *ChatInstance) GetEndpoint() string {
@ -51,20 +51,22 @@ func NewChatInstanceFromModel(props *InstanceProps) *ChatInstance {
case globals.GPT4,
globals.GPT40314,
globals.GPT40613:
if props.Reversible {
return NewChatInstanceFromConfig("reverse")
} else {
return NewChatInstanceFromConfig("gpt4")
}
case globals.GPT432k,
globals.GPT432k0613,
globals.GPT432k0314:
return NewChatInstanceFromConfig("gpt4")
case globals.GPT3Turbo16k,
case globals.GPT3Turbo,
globals.GPT3Turbo0613,
globals.GPT3Turbo0301,
globals.GPT3Turbo16k,
globals.GPT3Turbo16k0301,
globals.GPT3Turbo16k0613:
if props.Plan {
return NewChatInstanceFromConfig("subscribe")
}
return NewChatInstanceFromConfig("gpt3")
default:
return NewChatInstanceFromConfig("gpt3")

View File

@ -18,7 +18,7 @@ func CreateGeneration(model string, prompt string, path string, plan bool, hook
if err := adapter.NewChatRequest(&adapter.ChatProps{
Model: model,
Message: message,
Reversible: plan,
Plan: plan,
Infinity: true,
}, func(data string) error {
buffer.Write(data)

View File

@ -53,6 +53,15 @@ function ModelSelector(props: ModelSelectorProps) {
} as SelectItemProps;
}
const channel = ["gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k-0613"];
if (subscription && channel.includes(model.id)) {
return {
name: model.id,
value: model.name,
badge: { variant: "gold", name: "pro" },
} as SelectItemProps;
}
return {
name: model.id,
value: model.name,

View File

@ -8,7 +8,7 @@ import {
} from "@/utils/env.ts";
import { getMemory } from "@/utils/memory.ts";
export const version = "3.6.1";
export const version = "3.6.2";
export const dev: boolean = getDev();
export const deploy: boolean = true;
export let rest_api: string = getRestApi(deploy);

View File

@ -29,6 +29,10 @@ func CanEnableModel(db *sql.DB, user *User, model string) bool {
func HandleSubscriptionUsage(db *sql.DB, cache *redis.Client, user *User, model string) bool {
subscription := user.IsSubscribe(db)
if globals.IsGPT3TurboModel(model) {
// independent channel for subscription users
return subscription
}
if globals.IsGPT4NativeModel(model) {
return subscription && IncreaseSubscriptionUsage(cache, user, globals.GPT4, 50)
} else if model == globals.Claude2100k {

View File

@ -29,11 +29,6 @@ openai:
endpoint: https://api.openai.com
apikey: sk-...|sk-...
reverse:
endpoint: .../imitate
hash: gpt-4-... # reverse model hash
apikey: ...
slack:
bot_id: ...
token: ...

View File

@ -118,7 +118,7 @@ func ChatHandler(conn *Connection, user *auth.User, instance *conversation.Conve
err := adapter.NewChatRequest(&adapter.ChatProps{
Model: model,
Message: segment,
Reversible: plan,
Plan: plan,
}, func(data string) error {
if signal := conn.PeekWithType(StopType); signal != nil {
// stop signal from client

View File

@ -40,7 +40,7 @@ func NativeChatHandler(c *gin.Context, user *auth.User, model string, message []
buffer := utils.NewBuffer(model, segment)
if err := adapter.NewChatRequest(&adapter.ChatProps{
Model: model,
Reversible: plan,
Plan: plan,
Message: segment,
}, func(resp string) error {
buffer.Write(resp)

View File

@ -115,7 +115,7 @@ func sendTranshipmentResponse(c *gin.Context, form TranshipmentForm, id string,
err := adapter.NewChatRequest(&adapter.ChatProps{
Model: form.Model,
Message: form.Messages,
Reversible: plan,
Plan: plan,
Token: form.MaxTokens,
}, func(data string) error {
buffer.Write(data)
@ -179,7 +179,7 @@ func sendStreamTranshipmentResponse(c *gin.Context, form TranshipmentForm, id st
if err := adapter.NewChatRequest(&adapter.ChatProps{
Model: form.Model,
Message: form.Messages,
Reversible: plan,
Plan: plan,
Token: form.MaxTokens,
}, func(data string) error {
channel <- getStreamTranshipmentForm(id, created, form, buffer.Write(data), buffer, false)