From e6673e1c3607284c12d9cd96beefa3eaff5e0441 Mon Sep 17 00:00:00 2001 From: Zhang Minghan Date: Sat, 22 Jul 2023 22:57:56 +0800 Subject: [PATCH] Implemented feature: chatgpt api stream real time reception --- api/chat.go | 60 +++++++++++++++ api/stream.go | 6 +- app/src/App.vue | 44 ++++++++++- app/src/assets/script/auth.ts | 2 + app/src/assets/script/conversation.ts | 103 ++++++++++++++++++++++++++ go.mod | 1 + go.sum | 2 + main.go | 2 + 8 files changed, 215 insertions(+), 5 deletions(-) create mode 100644 api/chat.go diff --git a/api/chat.go b/api/chat.go new file mode 100644 index 0000000..05b9066 --- /dev/null +++ b/api/chat.go @@ -0,0 +1,60 @@ +package api + +import ( + "encoding/json" + "github.com/gin-gonic/gin" + "github.com/gorilla/websocket" + "net/http" +) + +func ChatAPI(c *gin.Context) { + // websocket connection + upgrader := websocket.Upgrader{ + CheckOrigin: func(r *http.Request) bool { + return true + }, + } + conn, err := upgrader.Upgrade(c.Writer, c.Request, nil) + if err != nil { + c.JSON(http.StatusOK, gin.H{ + "status": false, + "message": "", + "reason": err.Error(), + }) + return + } + defer func(conn *websocket.Conn) { + err := conn.Close() + if err != nil { + return + } + }(conn) + for { + _, message, err := conn.ReadMessage() + if err != nil { + return + } + + var form map[string]interface{} + if err := json.Unmarshal(message, &form); err == nil { + message := form["message"].(string) + StreamRequest("gpt-3.5-turbo-16k", []ChatGPTMessage{ + { + Role: "user", + Content: message, + }, + }, 250, func(resp string) { + data, _ := json.Marshal(map[string]interface{}{ + "message": resp, + "end": false, + }) + _ = conn.WriteMessage(websocket.TextMessage, data) + }) + data, _ := json.Marshal(map[string]interface{}{ + "message": "", + "end": true, + }) + _ = conn.WriteMessage(websocket.TextMessage, data) + } + } +} diff --git a/api/stream.go b/api/stream.go index 29685c3..db5deb7 100644 --- a/api/stream.go +++ b/api/stream.go @@ -42,7 +42,7 @@ func StreamRequest(model string, messages []ChatGPTMessage, token int, callback http.DefaultTransport.(*http.Transport).TLSClientConfig = &tls.Config{InsecureSkipVerify: true} client := &http.Client{} - req, err := http.NewRequest("POST", viper.GetString("openai.user_endpoint")+"/chat/completions", utils.ConvertBody(ChatGPTRequest{ + req, err := http.NewRequest("POST", viper.GetString("openai.anonymous_endpoint")+"/chat/completions", utils.ConvertBody(ChatGPTRequest{ Model: model, Messages: messages, MaxToken: token, @@ -53,7 +53,7 @@ func StreamRequest(model string, messages []ChatGPTMessage, token int, callback } req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", "Bearer "+viper.GetString("openai.user")) + req.Header.Set("Authorization", "Bearer "+viper.GetString("openai.anonymous")) res, err := client.Do(req) if err != nil { @@ -67,7 +67,7 @@ func StreamRequest(model string, messages []ChatGPTMessage, token int, callback } for { - buf := make([]byte, 1024) + buf := make([]byte, 20480) n, err := res.Body.Read(buf) if err == io.EOF { diff --git a/app/src/App.vue b/app/src/App.vue index 48820a3..7045cd1 100644 --- a/app/src/App.vue +++ b/app/src/App.vue @@ -1,6 +1,6 @@