mirror of
https://github.com/coaidev/coai.git
synced 2025-05-19 04:50:14 +09:00
add tencent hunyuan model
This commit is contained in:
parent
e8fc050996
commit
3e03da83c6
@ -73,9 +73,9 @@
|
||||
- [x] Stable Diffusion XL
|
||||
- [x] LLaMa 2 (70b, 13b, 7b)
|
||||
- [x] Code LLaMa (34b, 13b, 7b)
|
||||
- [x] Tencent Hunyuan
|
||||
- [ ] RWKV
|
||||
- [ ] Azure OpenAI
|
||||
- [ ] Tencent Hunyuan
|
||||
- [ ] Baidu Qianfan
|
||||
|
||||
## 📚 预览 | Screenshots
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"chat/adapter/bing"
|
||||
"chat/adapter/claude"
|
||||
"chat/adapter/dashscope"
|
||||
"chat/adapter/hunyuan"
|
||||
"chat/adapter/midjourney"
|
||||
"chat/adapter/oneapi"
|
||||
"chat/adapter/palm2"
|
||||
@ -67,6 +68,11 @@ func NewChatRequest(props *ChatProps, hook globals.Hook) error {
|
||||
Model: props.Model,
|
||||
Messages: props.Message,
|
||||
}, hook)
|
||||
} else if globals.IsHunyuanModel(props.Model) {
|
||||
return hunyuan.NewChatInstanceFromConfig().CreateStreamChatRequest(&hunyuan.ChatProps{
|
||||
Model: props.Model,
|
||||
Messages: props.Message,
|
||||
}, hook)
|
||||
}
|
||||
|
||||
return hook("Sorry, we cannot find the model you are looking for. Please try another model.")
|
||||
|
@ -105,6 +105,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
|
||||
|
||||
buf := ""
|
||||
cursor := 0
|
||||
chunk := ""
|
||||
instruct := props.Model == globals.GPT3TurboInstruct
|
||||
|
||||
err := utils.EventSource(
|
||||
@ -114,6 +115,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
|
||||
c.GetChatBody(props, true),
|
||||
func(data string) error {
|
||||
data, err := c.ProcessLine(instruct, buf, data)
|
||||
chunk += data
|
||||
|
||||
if err != nil {
|
||||
if strings.HasPrefix(err.Error(), "chatgpt error") {
|
||||
@ -138,7 +140,7 @@ func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback global
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
} else if cursor == 0 {
|
||||
} else if len(chunk) == 0 {
|
||||
return fmt.Errorf("empty response")
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,7 @@ import (
|
||||
func (c *ChatInstance) Test() bool {
|
||||
result, err := c.CreateChatRequest(&ChatProps{
|
||||
Model: globals.GPT3Turbo,
|
||||
Message: []globals.Message{{Role: "user", Content: "hi"}},
|
||||
Message: []globals.Message{{Role: globals.User, Content: "hi"}},
|
||||
Token: 1,
|
||||
})
|
||||
if err != nil {
|
||||
|
@ -34,7 +34,7 @@ func (c *ChatInstance) ConvertMessage(message []globals.Message) string {
|
||||
|
||||
var result string
|
||||
for i, item := range message {
|
||||
if i == 0 && item.Role == "assistant" {
|
||||
if i == 0 && item.Role == globals.Assistant {
|
||||
// skip first assistant message
|
||||
continue
|
||||
}
|
||||
|
55
adapter/hunyuan/chat.go
Normal file
55
adapter/hunyuan/chat.go
Normal file
@ -0,0 +1,55 @@
|
||||
package hunyuan
|
||||
|
||||
import (
|
||||
"chat/globals"
|
||||
"context"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type ChatProps struct {
|
||||
Model string
|
||||
Messages []globals.Message
|
||||
}
|
||||
|
||||
func (c *ChatInstance) FormatMessages(messages []globals.Message) []globals.Message {
|
||||
var result []globals.Message
|
||||
for _, message := range messages {
|
||||
switch message.Role {
|
||||
case globals.System:
|
||||
result = append(result, globals.Message{Role: globals.User, Content: message.Content})
|
||||
case globals.Assistant, globals.User:
|
||||
bound := len(result) > 0 && result[len(result)-1].Role == message.Role
|
||||
if bound {
|
||||
result[len(result)-1].Content += message.Content
|
||||
} else {
|
||||
result = append(result, message)
|
||||
}
|
||||
default:
|
||||
result = append(result, message)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (c *ChatInstance) CreateStreamChatRequest(props *ChatProps, callback globals.Hook) error {
|
||||
credential := NewCredential(c.GetSecretId(), c.GetSecretKey())
|
||||
client := NewInstance(c.GetAppId(), credential)
|
||||
channel, err := client.Chat(context.Background(), NewRequest(Stream, c.FormatMessages(props.Messages)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("tencent hunyuan error: %+v", err)
|
||||
}
|
||||
|
||||
for chunk := range channel {
|
||||
if chunk.Error.Code != 0 {
|
||||
fmt.Printf("tencent hunyuan error: %+v\n", chunk.Error)
|
||||
break
|
||||
}
|
||||
|
||||
if err := callback(chunk.Choices[0].Delta.Content); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
267
adapter/hunyuan/sdk.go
Normal file
267
adapter/hunyuan/sdk.go
Normal file
@ -0,0 +1,267 @@
|
||||
package hunyuan
|
||||
|
||||
/*
|
||||
* Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"chat/globals"
|
||||
"context"
|
||||
"crypto/hmac"
|
||||
"crypto/sha1"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/google/uuid"
|
||||
"io"
|
||||
"net/http"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
protocol = "https"
|
||||
host = "hunyuan.cloud.tencent.com"
|
||||
path = "/hyllm/v1/chat/completions?"
|
||||
)
|
||||
|
||||
const (
|
||||
Synchronize = iota
|
||||
Stream
|
||||
)
|
||||
|
||||
func getUrl() string {
|
||||
return fmt.Sprintf("%s://%s%s", protocol, host, path)
|
||||
}
|
||||
|
||||
func getFullPath() string {
|
||||
return host + path
|
||||
}
|
||||
|
||||
type ResponseChoices struct {
|
||||
FinishReason string `json:"finish_reason,omitempty"`
|
||||
Messages []globals.Message `json:"messages,omitempty"`
|
||||
Delta globals.Message `json:"delta,omitempty"`
|
||||
}
|
||||
|
||||
type ResponseUsage struct {
|
||||
PromptTokens int64 `json:"prompt_tokens,omitempty"`
|
||||
TotalTokens int64 `json:"total_tokens,omitempty"`
|
||||
CompletionTokens int64 `json:"completion_tokens,omitempty"`
|
||||
}
|
||||
|
||||
type ResponseError struct {
|
||||
Message string `json:"message,omitempty"`
|
||||
Code int `json:"code,omitempty"`
|
||||
}
|
||||
|
||||
type StreamDelta struct {
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
type ChatRequest struct {
|
||||
AppID int64 `json:"app_id"`
|
||||
SecretID string `json:"secret_id"`
|
||||
Timestamp int `json:"timestamp"`
|
||||
Expired int `json:"expired"`
|
||||
QueryID string `json:"query_id"`
|
||||
Temperature float64 `json:"temperature"`
|
||||
TopP float64 `json:"top_p"`
|
||||
Stream int `json:"stream"`
|
||||
Messages []globals.Message `json:"messages"`
|
||||
}
|
||||
|
||||
type ChatResponse struct {
|
||||
Choices []ResponseChoices `json:"choices,omitempty"`
|
||||
Created string `json:"created,omitempty"`
|
||||
ID string `json:"id,omitempty"`
|
||||
Usage ResponseUsage `json:"usage,omitempty"`
|
||||
Error ResponseError `json:"error,omitempty"`
|
||||
Note string `json:"note,omitempty"`
|
||||
ReqID string `json:"req_id,omitempty"`
|
||||
}
|
||||
|
||||
type Credential struct {
|
||||
SecretID string
|
||||
SecretKey string
|
||||
}
|
||||
|
||||
func NewCredential(secretID, secretKey string) *Credential {
|
||||
return &Credential{SecretID: secretID, SecretKey: secretKey}
|
||||
}
|
||||
|
||||
type HunyuanClient struct {
|
||||
Credential *Credential
|
||||
AppID int64
|
||||
}
|
||||
|
||||
func NewInstance(appId int64, credential *Credential) *HunyuanClient {
|
||||
return &HunyuanClient{
|
||||
Credential: credential,
|
||||
AppID: appId,
|
||||
}
|
||||
}
|
||||
|
||||
func NewRequest(mod int, messages []globals.Message) ChatRequest {
|
||||
queryID := uuid.NewString()
|
||||
return ChatRequest{
|
||||
Timestamp: int(time.Now().Unix()),
|
||||
Expired: int(time.Now().Unix()) + 24*60*60,
|
||||
Temperature: 0,
|
||||
TopP: 0.8,
|
||||
Messages: messages,
|
||||
QueryID: queryID,
|
||||
Stream: mod,
|
||||
}
|
||||
}
|
||||
|
||||
func (t *HunyuanClient) getHttpReq(ctx context.Context, req ChatRequest) (*http.Request, error) {
|
||||
req.AppID = t.AppID
|
||||
req.SecretID = t.Credential.SecretID
|
||||
signatureUrl := t.buildURL(req)
|
||||
signature := t.genSignature(signatureUrl)
|
||||
body, err := json.Marshal(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("json marshal err: %+v", err)
|
||||
}
|
||||
|
||||
httpReq, err := http.NewRequestWithContext(ctx, "POST", getUrl(), bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("new http request err: %+v", err)
|
||||
}
|
||||
httpReq.Header.Set("Authorization", signature)
|
||||
httpReq.Header.Set("Content-Type", "application/json")
|
||||
|
||||
if req.Stream == Stream {
|
||||
httpReq.Header.Set("Cache-Control", "no-cache")
|
||||
httpReq.Header.Set("Connection", "keep-alive")
|
||||
httpReq.Header.Set("Accept", "text/event-Stream")
|
||||
}
|
||||
|
||||
return httpReq, nil
|
||||
}
|
||||
|
||||
func (t *HunyuanClient) Chat(ctx context.Context, req ChatRequest) (<-chan ChatResponse, error) {
|
||||
res := make(chan ChatResponse, 1)
|
||||
httpReq, err := t.getHttpReq(ctx, req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("do general http request err: %+v", err)
|
||||
}
|
||||
httpResp, err := http.DefaultClient.Do(httpReq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("do chat request err: %+v", err)
|
||||
}
|
||||
|
||||
if httpResp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("do chat request failed status code :%d", httpResp.StatusCode)
|
||||
}
|
||||
|
||||
if req.Stream == Synchronize {
|
||||
err = t.synchronize(httpResp, res)
|
||||
return res, err
|
||||
}
|
||||
go t.stream(httpResp, res)
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (t *HunyuanClient) synchronize(httpResp *http.Response, res chan ChatResponse) (err error) {
|
||||
defer func() {
|
||||
httpResp.Body.Close()
|
||||
close(res)
|
||||
}()
|
||||
var chatResp ChatResponse
|
||||
respBody, err := io.ReadAll(httpResp.Body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("read response body err: %+v", err)
|
||||
}
|
||||
|
||||
if err = json.Unmarshal(respBody, &chatResp); err != nil {
|
||||
return fmt.Errorf("json unmarshal err: %+v", err)
|
||||
}
|
||||
res <- chatResp
|
||||
return
|
||||
}
|
||||
|
||||
func (t *HunyuanClient) stream(httpResp *http.Response, res chan ChatResponse) {
|
||||
defer func() {
|
||||
httpResp.Body.Close()
|
||||
close(res)
|
||||
}()
|
||||
reader := bufio.NewReader(httpResp.Body)
|
||||
for {
|
||||
raw, err := reader.ReadBytes('\n')
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return
|
||||
}
|
||||
res <- ChatResponse{Error: ResponseError{Message: fmt.Sprintf("tencent error: read stream data failed: %+v", err), Code: 500}}
|
||||
return
|
||||
}
|
||||
|
||||
data := strings.TrimSpace(string(raw))
|
||||
if data == "" || !strings.HasPrefix(data, "data: ") {
|
||||
continue
|
||||
}
|
||||
|
||||
var chatResponse ChatResponse
|
||||
if err := json.Unmarshal([]byte(data[6:]), &chatResponse); err != nil {
|
||||
res <- ChatResponse{Error: ResponseError{Message: fmt.Sprintf("json unmarshal err: %+v", err), Code: 500}}
|
||||
return
|
||||
}
|
||||
|
||||
res <- chatResponse
|
||||
if chatResponse.Choices[0].FinishReason == "stop" {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *HunyuanClient) genSignature(url string) string {
|
||||
mac := hmac.New(sha1.New, []byte(t.Credential.SecretKey))
|
||||
signURL := url
|
||||
mac.Write([]byte(signURL))
|
||||
sign := mac.Sum([]byte(nil))
|
||||
return base64.StdEncoding.EncodeToString(sign)
|
||||
}
|
||||
|
||||
func (t *HunyuanClient) getMessages(messages []globals.Message) string {
|
||||
var message string
|
||||
for _, msg := range messages {
|
||||
message += fmt.Sprintf(`{"role":"%s","content":"%s"},`, msg.Role, msg.Content)
|
||||
}
|
||||
message = strings.TrimSuffix(message, ",")
|
||||
|
||||
return message
|
||||
}
|
||||
|
||||
func (t *HunyuanClient) buildURL(req ChatRequest) string {
|
||||
params := make([]string, 0)
|
||||
params = append(params, "app_id="+strconv.FormatInt(req.AppID, 10))
|
||||
params = append(params, "secret_id="+req.SecretID)
|
||||
params = append(params, "timestamp="+strconv.Itoa(req.Timestamp))
|
||||
params = append(params, "query_id="+req.QueryID)
|
||||
params = append(params, "temperature="+strconv.FormatFloat(req.Temperature, 'f', -1, 64))
|
||||
params = append(params, "top_p="+strconv.FormatFloat(req.TopP, 'f', -1, 64))
|
||||
params = append(params, "stream="+strconv.Itoa(req.Stream))
|
||||
params = append(params, "expired="+strconv.Itoa(req.Expired))
|
||||
params = append(params, fmt.Sprintf("messages=[%s]", t.getMessages(req.Messages)))
|
||||
|
||||
sort.Sort(sort.StringSlice(params))
|
||||
return getFullPath() + strings.Join(params, "&")
|
||||
}
|
37
adapter/hunyuan/struct.go
Normal file
37
adapter/hunyuan/struct.go
Normal file
@ -0,0 +1,37 @@
|
||||
package hunyuan
|
||||
|
||||
import "github.com/spf13/viper"
|
||||
|
||||
type ChatInstance struct {
|
||||
AppId int64
|
||||
SecretId string
|
||||
SecretKey string
|
||||
}
|
||||
|
||||
func (c *ChatInstance) GetAppId() int64 {
|
||||
return c.AppId
|
||||
}
|
||||
|
||||
func (c *ChatInstance) GetSecretId() string {
|
||||
return c.SecretId
|
||||
}
|
||||
|
||||
func (c *ChatInstance) GetSecretKey() string {
|
||||
return c.SecretKey
|
||||
}
|
||||
|
||||
func NewChatInstance(appId int64, secretId string, secretKey string) *ChatInstance {
|
||||
return &ChatInstance{
|
||||
AppId: appId,
|
||||
SecretId: secretId,
|
||||
SecretKey: secretKey,
|
||||
}
|
||||
}
|
||||
|
||||
func NewChatInstanceFromConfig() *ChatInstance {
|
||||
return NewChatInstance(
|
||||
viper.GetInt64("hunyuan.app_id"),
|
||||
viper.GetString("hunyuan.secret_id"),
|
||||
viper.GetString("hunyuan.secret_key"),
|
||||
)
|
||||
}
|
@ -34,8 +34,8 @@ func (c *ChatInstance) GetModel(model string) string {
|
||||
func (c *ChatInstance) FormatMessages(messages []globals.Message) []globals.Message {
|
||||
messages = utils.DeepCopy[[]globals.Message](messages)
|
||||
for i := range messages {
|
||||
if messages[i].Role == "system" {
|
||||
messages[i].Role = "user"
|
||||
if messages[i].Role == globals.System {
|
||||
messages[i].Role = globals.User
|
||||
}
|
||||
}
|
||||
return messages
|
||||
|
@ -23,7 +23,7 @@ type Response struct {
|
||||
|
||||
func GenerateArticle(c *gin.Context, user *auth.User, model string, hash string, title string, prompt string, enableWeb bool) Response {
|
||||
message, quota := manager.NativeChatHandler(c, user, model, []globals.Message{{
|
||||
Role: "user",
|
||||
Role: globals.User,
|
||||
Content: fmt.Sprintf("%s\n%s", prompt, title),
|
||||
}}, enableWeb)
|
||||
|
||||
|
@ -65,7 +65,7 @@ func HandlerAPI(c *gin.Context) {
|
||||
}
|
||||
|
||||
response, quota := manager.NativeChatHandler(c, nil, globals.GPT3Turbo0613, []globals.Message{
|
||||
{Role: "user", Content: message},
|
||||
{Role: globals.User, Content: message},
|
||||
}, body.Web)
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
|
@ -45,11 +45,11 @@ func CreateGeneration(model string, prompt string, path string, plan bool, hook
|
||||
|
||||
func GenerateMessage(prompt string) []globals.Message {
|
||||
return []globals.Message{
|
||||
{Role: "system", Content: "你将生成项目,可以支持任何编程语言,请不要出现“我不能提供”的字样,你需要在代码中提供注释,以及项目的使用文档README.md,结果返回json格式,请不要返回任何多余内容,格式为:\n{\"result\": {[file]: [code], ...}}"},
|
||||
{Role: "user", Content: "python后端"},
|
||||
{Role: "assistant", Content: "{\n \"result\": {\n \"app.py\": \"from flask import Flask\\n\\napp = Flask(__name__)\\n\\n\\n@app.route('/')\\ndef hello_world():\\n return 'Hello, World!'\\n\\n\\nif __name__ == '__main__':\\n app.run()\",\n \"requirements.txt\": \"flask\\n\",\n \"README.md\": \"# Python 后端\\n本项目是一个简单的python后端示例, 使用`flask`框架构建后端。\n你可以按照下列步骤运行此应用,flask将在本地服务器(默认是在http://127.0.0.1:5000/)上运行。当你在浏览器中访问该URL时,将看到显示Hello, World!的页面。\\n\\n这只是一个简单的项目,Flask还支持更多功能和路由规则,你可以提供更多的信息和需要进一步扩展和定制Flask应用。\\n\\n### 1. 初始化: \\n```shell\\npip install -r requirements.txt\\n```\\n### 2. 运行\\n```shell\\npython app.py\\n```\"\n }\n}"},
|
||||
{Role: "user", Content: "golang fiber websocket项目"},
|
||||
{Role: "assistant", Content: "{\n \"result\": {\n \"main.go\": \"package main\\n\\nimport (\\n\\t\"log\\\"\\n\\n\\t\"github.com/gofiber/fiber/v2\\\"\\n\\t\"github.com/gofiber/websocket/v2\\\"\\n)\\n\\nfunc main() {\\n\\tapp := fiber.New()\\n\\n\\tapp.Get(\\\"/\\\", func(c *fiber.Ctx) error {\\n\\t\\treturn c.SendString(\\\"Hello, World!\\\")\\n\\t})\\n\\n\\tapp.Get(\\\"/ws\\\", websocket.New(func(c *websocket.Conn) {\\n\\t\\tfor {\\n\\t\\t\\tmt, message, err := c.ReadMessage()\\n\\t\\t\\tif err != nil {\\n\\t\\t\\t\\tlog.Println(\\\"read error:\\\", err)\\n\\t\\t\\t\\tbreak\\n\\t\\t\\t}\\n\\t\\t\\tlog.Printf(\\\"received: %s\\\", message)\\n\\t\\t\\terr = c.WriteMessage(mt, message)\\n\\t\\t\\tif err != nil {\\n\\t\\t\\t\\tlog.Println(\\\"write error:\\\", err)\\n\\t\\t\\t\\tbreak\\n\\t\\t\\t}\\n\\t\\t}\\n\\t}))\\n\\n\\tlog.Fatal(app.Listen(\\\":3000\\\"))\\n}\",\n \"go.mod\": \"module fiber-websocket\\n\\ngo 1.16\\n\\nrequire (\\n\\tgithub.com/gofiber/fiber/v2 v2.12.1\\n\\tgithub.com/gofiber/websocket/v2 v2.10.2\\n)\",\n \"README.md\": \"# Golang Fiber WebSocket项目\\n\\n这个项目是一个使用Golang和Fiber框架构建的WebSocket服务器示例。\\n\\n### 1. 初始化:\\n```shell\\ngo mod init fiber-websocket\\n```\\n\\n### 2. 安装依赖:\\n```shell\\ngo get github.com/gofiber/fiber/v2\\n``` \\n```shell\\ngo get github.com/gofiber/websocket/v2\\n```\\n\\n### 3. 创建main.go文件,将以下代码复制粘贴:\\n\\n```go\\npackage main\\n\\nimport (\\n\\t\\\"log\\\"\\n\\n\\t\\\"github.com/gofiber/fiber/v2\\\"\\n\\t\\\"github.com/gofiber/websocket/v2\\\"\\n)\\n\\nfunc main() {\\n\\tapp := fiber.New()\\n\\n\\tapp.Get(\\\"/\\\", func(c *fiber.Ctx) error {\\n\\t\\treturn c.SendString(\\\"Hello, World!\\\")\\n\\t})\\n\\n\\tapp.Get(\\\"/ws\\\", websocket.New(func(c *websocket.Conn) {\\n\\t\\tfor {\\n\\t\\t\\tmt, message, err := c.ReadMessage()\\n\\t\\t\\tif err != nil {\\n\\t\\t\\t\\tlog.Println(\\\"read error:\\\", err)\\n\\t\\t\\t\\tbreak\\n\\t\\t\\t}\\n\\t\\t\\tlog.Printf(\\\"received: %s\\\", message)\\n\\t\\t\\terr = c.WriteMessage(mt, message)\\n\\t\\t\\tif err != nil {\\n\\t\\t\\t\\tlog.Println(\\\"write error:\\\", err)\\n\\t\\t\\t\\tbreak\\n\\t\\t\\t}\\n\\t\\t}\\n\\t}))\\n\\n\\tlog.Fatal(app.Listen(\\\":3000\\\"))\\n}\\n```\\n\\n### 4. 运行应用程序:\\n```shell\\ngo run main.go\\n```\\n\\n应用程序将在本地服务器(默认是在http://localhost:3000)上运行。当你在浏览器中访问`http://localhost:3000`时,将看到显示\"Hello, World!\"的页面。你还可以访问`http://localhost:3000/ws`来测试WebSocket功能。\n\n这只是一个简单的示例,Fiber框架提供了更多的功能和路由规则,你可以在此基础上进行进一步扩展和定制。\n\n注意:在运行应用程序之前,请确保已经安装了Go语言开发环境。"},
|
||||
{Role: "user", Content: prompt},
|
||||
{Role: globals.System, Content: "你将生成项目,可以支持任何编程语言,请不要出现“我不能提供”的字样,你需要在代码中提供注释,以及项目的使用文档README.md,结果返回json格式,请不要返回任何多余内容,格式为:\n{\"result\": {[file]: [code], ...}}"},
|
||||
{Role: globals.User, Content: "python后端"},
|
||||
{Role: globals.Assistant, Content: "{\n \"result\": {\n \"app.py\": \"from flask import Flask\\n\\napp = Flask(__name__)\\n\\n\\n@app.route('/')\\ndef hello_world():\\n return 'Hello, World!'\\n\\n\\nif __name__ == '__main__':\\n app.run()\",\n \"requirements.txt\": \"flask\\n\",\n \"README.md\": \"# Python 后端\\n本项目是一个简单的python后端示例, 使用`flask`框架构建后端。\n你可以按照下列步骤运行此应用,flask将在本地服务器(默认是在http://127.0.0.1:5000/)上运行。当你在浏览器中访问该URL时,将看到显示Hello, World!的页面。\\n\\n这只是一个简单的项目,Flask还支持更多功能和路由规则,你可以提供更多的信息和需要进一步扩展和定制Flask应用。\\n\\n### 1. 初始化: \\n```shell\\npip install -r requirements.txt\\n```\\n### 2. 运行\\n```shell\\npython app.py\\n```\"\n }\n}"},
|
||||
{Role: globals.User, Content: "golang fiber websocket项目"},
|
||||
{Role: globals.Assistant, Content: "{\n \"result\": {\n \"main.go\": \"package main\\n\\nimport (\\n\\t\"log\\\"\\n\\n\\t\"github.com/gofiber/fiber/v2\\\"\\n\\t\"github.com/gofiber/websocket/v2\\\"\\n)\\n\\nfunc main() {\\n\\tapp := fiber.New()\\n\\n\\tapp.Get(\\\"/\\\", func(c *fiber.Ctx) error {\\n\\t\\treturn c.SendString(\\\"Hello, World!\\\")\\n\\t})\\n\\n\\tapp.Get(\\\"/ws\\\", websocket.New(func(c *websocket.Conn) {\\n\\t\\tfor {\\n\\t\\t\\tmt, message, err := c.ReadMessage()\\n\\t\\t\\tif err != nil {\\n\\t\\t\\t\\tlog.Println(\\\"read error:\\\", err)\\n\\t\\t\\t\\tbreak\\n\\t\\t\\t}\\n\\t\\t\\tlog.Printf(\\\"received: %s\\\", message)\\n\\t\\t\\terr = c.WriteMessage(mt, message)\\n\\t\\t\\tif err != nil {\\n\\t\\t\\t\\tlog.Println(\\\"write error:\\\", err)\\n\\t\\t\\t\\tbreak\\n\\t\\t\\t}\\n\\t\\t}\\n\\t}))\\n\\n\\tlog.Fatal(app.Listen(\\\":3000\\\"))\\n}\",\n \"go.mod\": \"module fiber-websocket\\n\\ngo 1.16\\n\\nrequire (\\n\\tgithub.com/gofiber/fiber/v2 v2.12.1\\n\\tgithub.com/gofiber/websocket/v2 v2.10.2\\n)\",\n \"README.md\": \"# Golang Fiber WebSocket项目\\n\\n这个项目是一个使用Golang和Fiber框架构建的WebSocket服务器示例。\\n\\n### 1. 初始化:\\n```shell\\ngo mod init fiber-websocket\\n```\\n\\n### 2. 安装依赖:\\n```shell\\ngo get github.com/gofiber/fiber/v2\\n``` \\n```shell\\ngo get github.com/gofiber/websocket/v2\\n```\\n\\n### 3. 创建main.go文件,将以下代码复制粘贴:\\n\\n```go\\npackage main\\n\\nimport (\\n\\t\\\"log\\\"\\n\\n\\t\\\"github.com/gofiber/fiber/v2\\\"\\n\\t\\\"github.com/gofiber/websocket/v2\\\"\\n)\\n\\nfunc main() {\\n\\tapp := fiber.New()\\n\\n\\tapp.Get(\\\"/\\\", func(c *fiber.Ctx) error {\\n\\t\\treturn c.SendString(\\\"Hello, World!\\\")\\n\\t})\\n\\n\\tapp.Get(\\\"/ws\\\", websocket.New(func(c *websocket.Conn) {\\n\\t\\tfor {\\n\\t\\t\\tmt, message, err := c.ReadMessage()\\n\\t\\t\\tif err != nil {\\n\\t\\t\\t\\tlog.Println(\\\"read error:\\\", err)\\n\\t\\t\\t\\tbreak\\n\\t\\t\\t}\\n\\t\\t\\tlog.Printf(\\\"received: %s\\\", message)\\n\\t\\t\\terr = c.WriteMessage(mt, message)\\n\\t\\t\\tif err != nil {\\n\\t\\t\\t\\tlog.Println(\\\"write error:\\\", err)\\n\\t\\t\\t\\tbreak\\n\\t\\t\\t}\\n\\t\\t}\\n\\t}))\\n\\n\\tlog.Fatal(app.Listen(\\\":3000\\\"))\\n}\\n```\\n\\n### 4. 运行应用程序:\\n```shell\\ngo run main.go\\n```\\n\\n应用程序将在本地服务器(默认是在http://localhost:3000)上运行。当你在浏览器中访问`http://localhost:3000`时,将看到显示\"Hello, World!\"的页面。你还可以访问`http://localhost:3000/ws`来测试WebSocket功能。\n\n这只是一个简单的示例,Fiber框架提供了更多的功能和路由规则,你可以在此基础上进行进一步扩展和定制。\n\n注意:在运行应用程序之前,请确保已经安装了Go语言开发环境。"},
|
||||
{Role: globals.User, Content: prompt},
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ func ChatWithWeb(hook Hook, message []globals.Message, long bool) []globals.Mess
|
||||
data = utils.GetSegmentString(data, 3000)
|
||||
}
|
||||
return utils.Insert(message, 0, globals.Message{
|
||||
Role: "system",
|
||||
Role: globals.System,
|
||||
Content: fmt.Sprintf("你将扮演AI问答助手,你的知识库不是离线的,而是可以实时联网的,你可以提供实时联网的信息。"+
|
||||
"当前时间: %s, 实时联网搜索结果:%s",
|
||||
time.Now().Format("2006-01-02 15:04:05"), data,
|
||||
@ -37,34 +37,34 @@ func StringCleaner(content string) string {
|
||||
|
||||
func GetKeywordPoint(hook Hook, message []globals.Message) string {
|
||||
resp, _ := hook([]globals.Message{{
|
||||
Role: "system",
|
||||
Role: globals.System,
|
||||
Content: "If the user input content require ONLINE SEARCH to get the results, please output these keywords to refine the data Interval with space, remember not to answer other content, json format return, format {\"keyword\": \"...\" }",
|
||||
}, {
|
||||
Role: "user",
|
||||
Role: globals.User,
|
||||
Content: "你是谁",
|
||||
}, {
|
||||
Role: "assistant",
|
||||
Role: globals.Assistant,
|
||||
Content: "{\"keyword\":\"\"}",
|
||||
}, {
|
||||
Role: "user",
|
||||
Role: globals.User,
|
||||
Content: "那fystart起始页是什么 和深能科创有什么关系",
|
||||
}, {
|
||||
Role: "assistant",
|
||||
Role: globals.Assistant,
|
||||
Content: "{\"keyword\":\"fystart起始页 深能科创 关系\"}",
|
||||
}, {
|
||||
Role: "user",
|
||||
Role: globals.User,
|
||||
Content: "1+1=?",
|
||||
}, {
|
||||
Role: "assistant",
|
||||
Role: globals.Assistant,
|
||||
Content: "{\"keyword\":\"\"}",
|
||||
}, {
|
||||
Role: "user",
|
||||
Role: globals.User,
|
||||
Content: "?",
|
||||
}, {
|
||||
Role: "assistant",
|
||||
Role: globals.Assistant,
|
||||
Content: "{\"keyword\":\"\"}",
|
||||
}, {
|
||||
Role: "user",
|
||||
Role: globals.User,
|
||||
Content: message[len(message)-1].Content,
|
||||
}}, 40)
|
||||
keyword := utils.UnmarshalJson[map[string]interface{}](resp)
|
||||
|
@ -60,6 +60,8 @@ export const modelColorMapper: Record<string, string> = {
|
||||
"code-llama-34b": "#01a9f0",
|
||||
"code-llama-13b": "#01a9f0",
|
||||
"code-llama-7b": "#01a9f0",
|
||||
|
||||
"hunyuan": "#0052d9"
|
||||
};
|
||||
|
||||
export function getModelColor(model: string): string {
|
||||
|
@ -8,7 +8,7 @@ import {
|
||||
} from "@/utils/env.ts";
|
||||
import { getMemory } from "@/utils/memory.ts";
|
||||
|
||||
export const version = "3.6.21";
|
||||
export const version = "3.6.22";
|
||||
export const dev: boolean = getDev();
|
||||
export const deploy: boolean = true;
|
||||
export let rest_api: string = getRestApi(deploy);
|
||||
@ -39,6 +39,9 @@ export const supportModels: Model[] = [
|
||||
{ id: "qwen-turbo-net", name: "通义千问 Turbo X", free: false, auth: true },
|
||||
{ id: "qwen-turbo", name: "通义千问 Turbo", free: false, auth: true },
|
||||
|
||||
// huyuan models
|
||||
{ id: "hunyuan", name: "腾讯混元 Pro", free: false, auth: true },
|
||||
|
||||
// zhipu models
|
||||
{
|
||||
id: "zhipu-chatglm-turbo",
|
||||
|
@ -29,6 +29,8 @@ func CanEnableModel(db *sql.DB, user *User, model string) bool {
|
||||
case globals.LLaMa27B, globals.LLaMa213B, globals.LLaMa270B,
|
||||
globals.CodeLLaMa34B, globals.CodeLLaMa13B, globals.CodeLLaMa7B:
|
||||
return user != nil && user.GetQuota(db) >= 1
|
||||
case globals.Hunyuan:
|
||||
return user != nil && user.GetQuota(db) >= 1
|
||||
default:
|
||||
return user != nil
|
||||
}
|
||||
|
7
globals/constant.go
Normal file
7
globals/constant.go
Normal file
@ -0,0 +1,7 @@
|
||||
package globals
|
||||
|
||||
const (
|
||||
System = "system"
|
||||
User = "user"
|
||||
Assistant = "assistant"
|
||||
)
|
@ -88,6 +88,7 @@ const (
|
||||
CodeLLaMa34B = "code-llama-34b"
|
||||
CodeLLaMa13B = "code-llama-13b"
|
||||
CodeLLaMa7B = "code-llama-7b"
|
||||
Hunyuan = "hunyuan"
|
||||
)
|
||||
|
||||
var GPT3TurboArray = []string{
|
||||
@ -198,6 +199,7 @@ var AllModels = []string{
|
||||
StableDiffusion, Midjourney, MidjourneyFast, MidjourneyTurbo,
|
||||
LLaMa270B, LLaMa213B, LLaMa27B,
|
||||
CodeLLaMa34B, CodeLLaMa13B, CodeLLaMa7B,
|
||||
Hunyuan,
|
||||
}
|
||||
|
||||
func in(value string, slice []string) bool {
|
||||
@ -269,6 +271,10 @@ func IsMidjourneyModel(model string) bool {
|
||||
return in(model, MidjourneyModelArray)
|
||||
}
|
||||
|
||||
func IsHunyuanModel(model string) bool {
|
||||
return model == Hunyuan
|
||||
}
|
||||
|
||||
func IsLongContextModel(model string) bool {
|
||||
return in(model, LongContextModelArray)
|
||||
}
|
||||
|
@ -180,21 +180,21 @@ func (c *Conversation) InsertMessages(messages []globals.Message, index int) {
|
||||
|
||||
func (c *Conversation) AddMessageFromUser(message string) {
|
||||
c.AddMessage(globals.Message{
|
||||
Role: "user",
|
||||
Role: globals.User,
|
||||
Content: message,
|
||||
})
|
||||
}
|
||||
|
||||
func (c *Conversation) AddMessageFromAssistant(message string) {
|
||||
c.AddMessage(globals.Message{
|
||||
Role: "assistant",
|
||||
Role: globals.Assistant,
|
||||
Content: message,
|
||||
})
|
||||
}
|
||||
|
||||
func (c *Conversation) AddMessageFromSystem(message string) {
|
||||
c.AddMessage(globals.Message{
|
||||
Role: "system",
|
||||
Role: globals.System,
|
||||
Content: message,
|
||||
})
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package manager
|
||||
|
||||
import (
|
||||
"chat/auth"
|
||||
"chat/globals"
|
||||
"chat/manager/conversation"
|
||||
"chat/utils"
|
||||
"fmt"
|
||||
@ -71,7 +72,7 @@ func ChatAPI(c *gin.Context) {
|
||||
case ShareType:
|
||||
instance.LoadSharing(db, form.Message)
|
||||
case RestartType:
|
||||
if message := instance.RemoveLatestMessage(); message.Role != "assistant" {
|
||||
if message := instance.RemoveLatestMessage(); message.Role != globals.Assistant {
|
||||
return fmt.Errorf("message type error")
|
||||
}
|
||||
response := ChatHandler(buf, user, instance)
|
||||
|
@ -152,7 +152,7 @@ func sendTranshipmentResponse(c *gin.Context, form TranshipmentForm, id string,
|
||||
Choices: []Choice{
|
||||
{
|
||||
Index: 0,
|
||||
Message: globals.Message{Role: "assistant", Content: buffer.ReadWithDefault(defaultMessage)},
|
||||
Message: globals.Message{Role: globals.Assistant, Content: buffer.ReadWithDefault(defaultMessage)},
|
||||
FinishReason: "stop",
|
||||
},
|
||||
},
|
||||
@ -175,7 +175,7 @@ func getStreamTranshipmentForm(id string, created int64, form TranshipmentForm,
|
||||
{
|
||||
Index: 0,
|
||||
Delta: globals.Message{
|
||||
Role: "assistant",
|
||||
Role: globals.Assistant,
|
||||
Content: data,
|
||||
},
|
||||
FinishReason: utils.Multi[interface{}](end, "stop", nil),
|
||||
|
@ -25,7 +25,7 @@ func GetWeightByModel(model string) int {
|
||||
|
||||
globals.SparkDesk, globals.SparkDeskV2, globals.SparkDeskV3,
|
||||
globals.QwenTurbo, globals.QwenPlus, globals.QwenTurboNet, globals.QwenPlusNet,
|
||||
globals.BingPrecise, globals.BingCreative, globals.BingBalanced:
|
||||
globals.BingPrecise, globals.BingCreative, globals.BingBalanced, globals.Hunyuan:
|
||||
return 3
|
||||
case globals.GPT3Turbo0301, globals.GPT3Turbo16k0301,
|
||||
globals.ZhiPuChatGLMTurbo, globals.ZhiPuChatGLMLite, globals.ZhiPuChatGLMStd, globals.ZhiPuChatGLMPro:
|
||||
@ -105,6 +105,8 @@ func CountInputToken(model string, v []globals.Message) float32 {
|
||||
return float32(CountTokenPrice(v, model)) / 1000 * 0.08
|
||||
case globals.QwenPlus, globals.QwenPlusNet:
|
||||
return float32(CountTokenPrice(v, model)) / 1000 * 0.2
|
||||
case globals.Hunyuan:
|
||||
return float32(CountTokenPrice(v, model)) / 1000 * 1
|
||||
default:
|
||||
return 0
|
||||
}
|
||||
@ -141,6 +143,8 @@ func CountOutputToken(model string, t int) float32 {
|
||||
return float32(t*GetWeightByModel(model)) / 1000 * 0.08
|
||||
case globals.QwenPlus, globals.QwenPlusNet:
|
||||
return float32(t*GetWeightByModel(model)) / 1000 * 0.2
|
||||
case globals.Hunyuan:
|
||||
return float32(t*GetWeightByModel(model)) / 1000 * 1
|
||||
case globals.StableDiffusion:
|
||||
return 0.25
|
||||
case globals.Midjourney:
|
||||
|
Loading…
Reference in New Issue
Block a user