NextChat-U/app/client/platforms/tencent.ts

269 lines
7.2 KiB
TypeScript
Raw Normal View History

2024-07-07 15:43:28 +09:00
"use client";
2024-08-01 20:02:40 +09:00
import { ApiPath, DEFAULT_API_HOST, REQUEST_TIMEOUT_MS } from "@/app/constant";
2024-07-07 15:43:28 +09:00
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
ChatOptions,
getHeaders,
LLMApi,
LLMModel,
MultimodalContent,
} from "../api";
import Locale from "../../locales";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { getMessageTextContent, isVisionModel } from "@/app/utils";
2024-08-01 16:33:48 +09:00
import mapKeys from "lodash-es/mapKeys";
import mapValues from "lodash-es/mapValues";
import isArray from "lodash-es/isArray";
import isObject from "lodash-es/isObject";
2024-07-07 15:43:28 +09:00
export interface OpenAIListModelResponse {
object: string;
data: Array<{
id: string;
object: string;
root: string;
}>;
}
interface RequestPayload {
2024-08-01 16:33:48 +09:00
Messages: {
Role: "system" | "user" | "assistant";
Content: string | MultimodalContent[];
2024-07-07 15:43:28 +09:00
}[];
2024-08-01 16:33:48 +09:00
Stream?: boolean;
Model: string;
Temperature: number;
TopP: number;
}
function capitalizeKeys(obj: any): any {
if (isArray(obj)) {
return obj.map(capitalizeKeys);
} else if (isObject(obj)) {
return mapValues(
mapKeys(obj, (value: any, key: string) =>
key.replace(/(^|_)(\w)/g, (m, $1, $2) => $2.toUpperCase()),
),
capitalizeKeys,
);
} else {
return obj;
}
2024-07-07 15:43:28 +09:00
}
export class HunyuanApi implements LLMApi {
2024-08-01 20:02:40 +09:00
path(): string {
2024-07-07 15:43:28 +09:00
const accessStore = useAccessStore.getState();
let baseUrl = "";
if (accessStore.useCustomConfig) {
baseUrl = accessStore.tencentUrl;
}
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp
2024-08-01 20:02:40 +09:00
? DEFAULT_API_HOST + "/api/proxy/tencent"
2024-07-07 15:43:28 +09:00
: ApiPath.Tencent;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
}
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Tencent)) {
baseUrl = "https://" + baseUrl;
}
console.log("[Proxy Endpoint] ", baseUrl);
2024-08-01 19:58:07 +09:00
return baseUrl;
2024-07-07 15:43:28 +09:00
}
extractMessage(res: any) {
2024-08-01 16:33:48 +09:00
return res.Choices?.at(0)?.Message?.Content ?? "";
2024-07-07 15:43:28 +09:00
}
async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages = options.messages.map((v, index) => ({
// "Messages 中 system 角色必须位于列表的最开始"
role: index !== 0 && v.role === "system" ? "user" : v.role,
2024-07-07 15:43:28 +09:00
content: visionModel ? v.content : getMessageTextContent(v),
}));
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
...{
model: options.config.model,
},
};
2024-08-01 16:33:48 +09:00
const requestPayload: RequestPayload = capitalizeKeys({
2024-07-07 15:43:28 +09:00
model: modelConfig.model,
2024-08-01 19:58:07 +09:00
messages,
2024-07-07 15:43:28 +09:00
temperature: modelConfig.temperature,
top_p: modelConfig.top_p,
2024-08-01 19:58:07 +09:00
stream: options.config.stream,
2024-08-01 16:33:48 +09:00
});
2024-07-07 15:43:28 +09:00
console.log("[Request] Tencent payload: ", requestPayload);
const shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
try {
2024-08-01 20:02:40 +09:00
const chatPath = this.path();
2024-07-07 15:43:28 +09:00
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
};
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
if (shouldStream) {
let responseText = "";
let remainText = "";
let finished = false;
// animate response to make it looks smooth
function animateResponseText() {
if (finished || controller.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
if (responseText?.length === 0) {
options.onError?.(new Error("empty response from server"));
}
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
options.onUpdate?.(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
}
// start animaion
animateResponseText();
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
}
};
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log(
"[Tencent] request response content type: ",
contentType,
);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || finished) {
return finish();
}
const text = msg.data;
try {
const json = JSON.parse(text);
2024-08-01 16:33:48 +09:00
const choices = json.Choices as Array<{
Delta: { Content: string };
2024-07-07 15:43:28 +09:00
}>;
2024-08-01 16:33:48 +09:00
const delta = choices[0]?.Delta?.Content;
2024-07-07 15:43:28 +09:00
if (delta) {
remainText += delta;
}
} catch (e) {
console.error("[Request] parse error", text, msg);
}
},
onclose() {
finish();
},
onerror(e) {
options.onError?.(e);
throw e;
},
openWhenHidden: true,
});
} else {
const res = await fetch(chatPath, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
options.onError?.(e as Error);
}
}
async usage() {
return {
used: 0,
total: 0,
};
}
async models(): Promise<LLMModel[]> {
return [];
}
}