feat: support dall-e model chat
@ -6,7 +6,7 @@ import {
|
|||||||
ServiceProvider,
|
ServiceProvider,
|
||||||
} from "../constant";
|
} from "../constant";
|
||||||
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
|
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
|
||||||
import { ChatGPTApi } from "./platforms/openai";
|
import { ChatGPTApi, DalleRequestPayload } from "./platforms/openai";
|
||||||
import { FileApi, FileInfo } from "./platforms/utils";
|
import { FileApi, FileInfo } from "./platforms/utils";
|
||||||
import { GeminiProApi } from "./platforms/google";
|
import { GeminiProApi } from "./platforms/google";
|
||||||
import { ClaudeApi } from "./platforms/anthropic";
|
import { ClaudeApi } from "./platforms/anthropic";
|
||||||
@ -43,6 +43,9 @@ export interface LLMConfig {
|
|||||||
stream?: boolean;
|
stream?: boolean;
|
||||||
presence_penalty?: number;
|
presence_penalty?: number;
|
||||||
frequency_penalty?: number;
|
frequency_penalty?: number;
|
||||||
|
size?: DalleRequestPayload["size"];
|
||||||
|
quality?: DalleRequestPayload["quality"];
|
||||||
|
style?: DalleRequestPayload["style"];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface LLMAgentConfig {
|
export interface LLMAgentConfig {
|
||||||
@ -73,6 +76,7 @@ export interface TranscriptionOptions {
|
|||||||
export interface ChatOptions {
|
export interface ChatOptions {
|
||||||
messages: RequestMessage[];
|
messages: RequestMessage[];
|
||||||
config: LLMConfig;
|
config: LLMConfig;
|
||||||
|
|
||||||
onToolUpdate?: (toolName: string, toolInput: string) => void;
|
onToolUpdate?: (toolName: string, toolInput: string) => void;
|
||||||
onUpdate?: (message: string, chunk: string) => void;
|
onUpdate?: (message: string, chunk: string) => void;
|
||||||
onFinish: (message: string) => void;
|
onFinish: (message: string) => void;
|
||||||
|
@ -9,10 +9,17 @@ import {
|
|||||||
REQUEST_TIMEOUT_MS,
|
REQUEST_TIMEOUT_MS,
|
||||||
ServiceProvider,
|
ServiceProvider,
|
||||||
} from "@/app/constant";
|
} from "@/app/constant";
|
||||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
import {
|
||||||
|
ChatMessageTool,
|
||||||
|
useAccessStore,
|
||||||
|
useAppConfig,
|
||||||
|
useChatStore,
|
||||||
|
usePluginStore,
|
||||||
|
} from "@/app/store";
|
||||||
import { collectModelsWithDefaultModel } from "@/app/utils/model";
|
import { collectModelsWithDefaultModel } from "@/app/utils/model";
|
||||||
import { preProcessImageContent } from "@/app/utils/chat";
|
import { preProcessImageContent, stream } from "@/app/utils/chat";
|
||||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||||
|
import { DalleSize, DalleQuality, DalleStyle } from "@/app/typing";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
AgentChatOptions,
|
AgentChatOptions,
|
||||||
@ -35,8 +42,8 @@ import { prettyObject } from "@/app/utils/format";
|
|||||||
import { getClientConfig } from "@/app/config/client";
|
import { getClientConfig } from "@/app/config/client";
|
||||||
import {
|
import {
|
||||||
getMessageTextContent,
|
getMessageTextContent,
|
||||||
getMessageImages,
|
|
||||||
isVisionModel,
|
isVisionModel,
|
||||||
|
isDalle3 as _isDalle3,
|
||||||
} from "@/app/utils";
|
} from "@/app/utils";
|
||||||
|
|
||||||
export interface OpenAIListModelResponse {
|
export interface OpenAIListModelResponse {
|
||||||
@ -62,6 +69,16 @@ export interface RequestPayload {
|
|||||||
max_tokens?: number;
|
max_tokens?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface DalleRequestPayload {
|
||||||
|
model: string;
|
||||||
|
prompt: string;
|
||||||
|
response_format: "url" | "b64_json";
|
||||||
|
n: number;
|
||||||
|
size: DalleSize;
|
||||||
|
quality: DalleQuality;
|
||||||
|
style: DalleStyle;
|
||||||
|
}
|
||||||
|
|
||||||
export class ChatGPTApi implements LLMApi {
|
export class ChatGPTApi implements LLMApi {
|
||||||
private disableListModels = true;
|
private disableListModels = true;
|
||||||
|
|
||||||
@ -188,17 +205,6 @@ export class ChatGPTApi implements LLMApi {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async chat(options: ChatOptions) {
|
async chat(options: ChatOptions) {
|
||||||
const visionModel = isVisionModel(options.config.model);
|
|
||||||
const isO1 = options.config.model.startsWith("o1");
|
|
||||||
const messages: ChatOptions["messages"] = [];
|
|
||||||
for (const v of options.messages) {
|
|
||||||
const content = visionModel
|
|
||||||
? await preProcessImageContent(v.content)
|
|
||||||
: getMessageTextContent(v);
|
|
||||||
if (!(isO1 && v.role === "system"))
|
|
||||||
messages.push({ role: v.role, content });
|
|
||||||
}
|
|
||||||
|
|
||||||
const modelConfig = {
|
const modelConfig = {
|
||||||
...useAppConfig.getState().modelConfig,
|
...useAppConfig.getState().modelConfig,
|
||||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||||
@ -208,26 +214,57 @@ export class ChatGPTApi implements LLMApi {
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const requestPayload: RequestPayload = {
|
let requestPayload: RequestPayload | DalleRequestPayload;
|
||||||
messages,
|
|
||||||
stream: !isO1 ? options.config.stream : false,
|
|
||||||
model: modelConfig.model,
|
|
||||||
temperature: !isO1 ? modelConfig.temperature : 1,
|
|
||||||
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
|
|
||||||
frequency_penalty: !isO1 ? modelConfig.frequency_penalty : 0,
|
|
||||||
top_p: !isO1 ? modelConfig.top_p : 1,
|
|
||||||
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
|
||||||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
|
||||||
};
|
|
||||||
|
|
||||||
// add max_tokens to vision model
|
const isDalle3 = _isDalle3(options.config.model);
|
||||||
if (visionModel && modelConfig.model.includes("preview")) {
|
const isO1 = options.config.model.startsWith("o1");
|
||||||
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
if (isDalle3) {
|
||||||
|
const prompt = getMessageTextContent(
|
||||||
|
options.messages.slice(-1)?.pop() as any,
|
||||||
|
);
|
||||||
|
requestPayload = {
|
||||||
|
model: options.config.model,
|
||||||
|
prompt,
|
||||||
|
// URLs are only valid for 60 minutes after the image has been generated.
|
||||||
|
response_format: "b64_json", // using b64_json, and save image in CacheStorage
|
||||||
|
n: 1,
|
||||||
|
size: options.config?.size ?? "1024x1024",
|
||||||
|
quality: options.config?.quality ?? "standard",
|
||||||
|
style: options.config?.style ?? "vivid",
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
const visionModel = isVisionModel(options.config.model);
|
||||||
|
const messages: ChatOptions["messages"] = [];
|
||||||
|
for (const v of options.messages) {
|
||||||
|
const content = visionModel
|
||||||
|
? await preProcessImageContent(v.content)
|
||||||
|
: getMessageTextContent(v);
|
||||||
|
if (!(isO1 && v.role === "system"))
|
||||||
|
messages.push({ role: v.role, content });
|
||||||
|
}
|
||||||
|
|
||||||
|
// O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet.
|
||||||
|
requestPayload = {
|
||||||
|
messages,
|
||||||
|
stream: !isO1 ? options.config.stream : false,
|
||||||
|
model: modelConfig.model,
|
||||||
|
temperature: !isO1 ? modelConfig.temperature : 1,
|
||||||
|
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
|
||||||
|
frequency_penalty: !isO1 ? modelConfig.frequency_penalty : 0,
|
||||||
|
top_p: !isO1 ? modelConfig.top_p : 1,
|
||||||
|
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
||||||
|
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||||
|
};
|
||||||
|
|
||||||
|
// add max_tokens to vision model
|
||||||
|
if (visionModel) {
|
||||||
|
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("[Request] openai payload: ", requestPayload);
|
console.log("[Request] openai payload: ", requestPayload);
|
||||||
|
|
||||||
const shouldStream = !!options.config.stream && !isO1;
|
const shouldStream = !isDalle3 && !!options.config.stream && !isO1;
|
||||||
const controller = new AbortController();
|
const controller = new AbortController();
|
||||||
options.onController?.(controller);
|
options.onController?.(controller);
|
||||||
|
|
||||||
@ -253,155 +290,99 @@ export class ChatGPTApi implements LLMApi {
|
|||||||
model?.provider?.providerName === ServiceProvider.Azure,
|
model?.provider?.providerName === ServiceProvider.Azure,
|
||||||
);
|
);
|
||||||
chatPath = this.path(
|
chatPath = this.path(
|
||||||
Azure.ChatPath(
|
(isDalle3 ? Azure.ImagePath : Azure.ChatPath)(
|
||||||
(model?.displayName ?? model?.name) as string,
|
(model?.displayName ?? model?.name) as string,
|
||||||
useCustomConfig ? useAccessStore.getState().azureApiVersion : "",
|
useCustomConfig ? useAccessStore.getState().azureApiVersion : "",
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
chatPath = this.path(OpenaiPath.ChatPath);
|
chatPath = this.path(
|
||||||
|
isDalle3 ? OpenaiPath.ImagePath : OpenaiPath.ChatPath,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
const chatPayload = {
|
|
||||||
method: "POST",
|
|
||||||
body: JSON.stringify(requestPayload),
|
|
||||||
signal: controller.signal,
|
|
||||||
headers: getHeaders(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// make a fetch request
|
|
||||||
const requestTimeoutId = setTimeout(
|
|
||||||
() => controller.abort(),
|
|
||||||
isO1 ? REQUEST_TIMEOUT_MS * 2 : REQUEST_TIMEOUT_MS,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (shouldStream) {
|
if (shouldStream) {
|
||||||
let responseText = "";
|
// const [tools, funcs] = usePluginStore
|
||||||
let remainText = "";
|
// .getState()
|
||||||
let finished = false;
|
// .getAsTools(
|
||||||
|
// useChatStore.getState().currentSession().mask?.plugin || [],
|
||||||
// animate response to make it looks smooth
|
// );
|
||||||
function animateResponseText() {
|
// console.log("getAsTools", tools, funcs);
|
||||||
if (finished || controller.signal.aborted) {
|
const tools = null;
|
||||||
responseText += remainText;
|
const funcs: Record<string, Function> = {};
|
||||||
console.log("[Response Animation] finished");
|
stream(
|
||||||
if (responseText?.length === 0) {
|
chatPath,
|
||||||
options.onError?.(new Error("empty response from server"));
|
requestPayload,
|
||||||
|
getHeaders(),
|
||||||
|
tools as any,
|
||||||
|
funcs,
|
||||||
|
controller,
|
||||||
|
// parseSSE
|
||||||
|
(text: string, runTools: ChatMessageTool[]) => {
|
||||||
|
// console.log("parseSSE", text, runTools);
|
||||||
|
const json = JSON.parse(text);
|
||||||
|
const choices = json.choices as Array<{
|
||||||
|
delta: {
|
||||||
|
content: string;
|
||||||
|
tool_calls: ChatMessageTool[];
|
||||||
|
};
|
||||||
|
}>;
|
||||||
|
const tool_calls = choices[0]?.delta?.tool_calls;
|
||||||
|
if (tool_calls?.length > 0) {
|
||||||
|
const index = tool_calls[0]?.index;
|
||||||
|
const id = tool_calls[0]?.id;
|
||||||
|
const args = tool_calls[0]?.function?.arguments;
|
||||||
|
if (id) {
|
||||||
|
runTools.push({
|
||||||
|
id,
|
||||||
|
type: tool_calls[0]?.type,
|
||||||
|
function: {
|
||||||
|
name: tool_calls[0]?.function?.name as string,
|
||||||
|
arguments: args,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// @ts-ignore
|
||||||
|
runTools[index]["function"]["arguments"] += args;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return;
|
return choices[0]?.delta?.content;
|
||||||
}
|
},
|
||||||
|
// processToolMessage, include tool_calls message and tool call results
|
||||||
if (remainText.length > 0) {
|
(
|
||||||
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
requestPayload: RequestPayload,
|
||||||
const fetchText = remainText.slice(0, fetchCount);
|
toolCallMessage: any,
|
||||||
responseText += fetchText;
|
toolCallResult: any[],
|
||||||
remainText = remainText.slice(fetchCount);
|
) => {
|
||||||
options.onUpdate?.(responseText, fetchText);
|
// @ts-ignore
|
||||||
}
|
requestPayload?.messages?.splice(
|
||||||
|
// @ts-ignore
|
||||||
requestAnimationFrame(animateResponseText);
|
requestPayload?.messages?.length,
|
||||||
}
|
0,
|
||||||
|
toolCallMessage,
|
||||||
// start animaion
|
...toolCallResult,
|
||||||
animateResponseText();
|
);
|
||||||
|
},
|
||||||
const finish = () => {
|
options,
|
||||||
if (!finished) {
|
);
|
||||||
finished = true;
|
} else {
|
||||||
options.onFinish(responseText + remainText);
|
const chatPayload = {
|
||||||
}
|
method: "POST",
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers: getHeaders(),
|
||||||
};
|
};
|
||||||
|
|
||||||
controller.signal.onabort = finish;
|
// make a fetch request
|
||||||
fetchEventSource(chatPath, {
|
const requestTimeoutId = setTimeout(
|
||||||
...chatPayload,
|
() => controller.abort(),
|
||||||
async onopen(res) {
|
isDalle3 || isO1 ? REQUEST_TIMEOUT_MS * 2 : REQUEST_TIMEOUT_MS, // dalle3 using b64_json is slow.
|
||||||
clearTimeout(requestTimeoutId);
|
);
|
||||||
const contentType = res.headers.get("content-type");
|
|
||||||
console.log(
|
|
||||||
"[OpenAI] request response content type: ",
|
|
||||||
contentType,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (contentType?.startsWith("text/plain")) {
|
|
||||||
responseText = await res.clone().text();
|
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
!res.ok ||
|
|
||||||
!res.headers
|
|
||||||
.get("content-type")
|
|
||||||
?.startsWith(EventStreamContentType) ||
|
|
||||||
res.status !== 200
|
|
||||||
) {
|
|
||||||
const responseTexts = [responseText];
|
|
||||||
let extraInfo = await res.clone().text();
|
|
||||||
try {
|
|
||||||
const resJson = await res.clone().json();
|
|
||||||
extraInfo = prettyObject(resJson);
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
if (res.status === 401) {
|
|
||||||
responseTexts.push(Locale.Error.Unauthorized);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (extraInfo) {
|
|
||||||
responseTexts.push(extraInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
responseText = responseTexts.join("\n\n");
|
|
||||||
|
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onmessage(msg) {
|
|
||||||
if (msg.data === "[DONE]" || finished) {
|
|
||||||
return finish();
|
|
||||||
}
|
|
||||||
const text = msg.data;
|
|
||||||
try {
|
|
||||||
const json = JSON.parse(text);
|
|
||||||
const choices = json.choices as Array<{
|
|
||||||
delta: { content: string };
|
|
||||||
}>;
|
|
||||||
const delta = choices[0]?.delta?.content;
|
|
||||||
const textmoderation = json?.prompt_filter_results;
|
|
||||||
|
|
||||||
if (delta) {
|
|
||||||
remainText += delta;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
textmoderation &&
|
|
||||||
textmoderation.length > 0 &&
|
|
||||||
ServiceProvider.Azure
|
|
||||||
) {
|
|
||||||
const contentFilterResults =
|
|
||||||
textmoderation[0]?.content_filter_results;
|
|
||||||
console.log(
|
|
||||||
`[${ServiceProvider.Azure}] [Text Moderation] flagged categories result:`,
|
|
||||||
contentFilterResults,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error("[Request] parse error", text, msg);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onclose() {
|
|
||||||
finish();
|
|
||||||
},
|
|
||||||
onerror(e) {
|
|
||||||
options.onError?.(e);
|
|
||||||
throw e;
|
|
||||||
},
|
|
||||||
openWhenHidden: true,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
const res = await fetch(chatPath, chatPayload);
|
const res = await fetch(chatPath, chatPayload);
|
||||||
clearTimeout(requestTimeoutId);
|
clearTimeout(requestTimeoutId);
|
||||||
|
|
||||||
const resJson = await res.json();
|
const resJson = await res.json();
|
||||||
const message = this.extractMessage(resJson);
|
const message = await this.extractMessage(resJson);
|
||||||
options.onFinish(message);
|
options.onFinish(message);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
@ -45,6 +45,12 @@ import BottomIcon from "../icons/bottom.svg";
|
|||||||
import StopIcon from "../icons/pause.svg";
|
import StopIcon from "../icons/pause.svg";
|
||||||
import RobotIcon from "../icons/robot.svg";
|
import RobotIcon from "../icons/robot.svg";
|
||||||
import CheckmarkIcon from "../icons/checkmark.svg";
|
import CheckmarkIcon from "../icons/checkmark.svg";
|
||||||
|
import SizeIcon from "../icons/size.svg";
|
||||||
|
import QualityIcon from "../icons/hd.svg";
|
||||||
|
import StyleIcon from "../icons/palette.svg";
|
||||||
|
import PluginIcon from "../icons/plugin.svg";
|
||||||
|
import ShortcutkeyIcon from "../icons/shortcutkey.svg";
|
||||||
|
import ReloadIcon from "../icons/reload.svg";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
ChatMessage,
|
ChatMessage,
|
||||||
@ -67,6 +73,8 @@ import {
|
|||||||
getMessageTextContent,
|
getMessageTextContent,
|
||||||
getMessageImages,
|
getMessageImages,
|
||||||
isVisionModel,
|
isVisionModel,
|
||||||
|
isDalle3,
|
||||||
|
safeLocalStorage,
|
||||||
isFirefox,
|
isFirefox,
|
||||||
isSupportRAGModel,
|
isSupportRAGModel,
|
||||||
isFunctionCallModel,
|
isFunctionCallModel,
|
||||||
@ -77,6 +85,7 @@ import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
|
|||||||
import dynamic from "next/dynamic";
|
import dynamic from "next/dynamic";
|
||||||
|
|
||||||
import { ChatControllerPool } from "../client/controller";
|
import { ChatControllerPool } from "../client/controller";
|
||||||
|
import { DalleSize, DalleQuality, DalleStyle } from "../typing";
|
||||||
import { Prompt, usePromptStore } from "../store/prompt";
|
import { Prompt, usePromptStore } from "../store/prompt";
|
||||||
import Locale, { getLang, getSTTLang } from "../locales";
|
import Locale, { getLang, getSTTLang } from "../locales";
|
||||||
|
|
||||||
@ -534,6 +543,19 @@ export function ChatActions(props: {
|
|||||||
const [showUploadImage, setShowUploadImage] = useState(false);
|
const [showUploadImage, setShowUploadImage] = useState(false);
|
||||||
const [showUploadFile, setShowUploadFile] = useState(false);
|
const [showUploadFile, setShowUploadFile] = useState(false);
|
||||||
|
|
||||||
|
const [showSizeSelector, setShowSizeSelector] = useState(false);
|
||||||
|
const [showQualitySelector, setShowQualitySelector] = useState(false);
|
||||||
|
const [showStyleSelector, setShowStyleSelector] = useState(false);
|
||||||
|
const dalle3Sizes: DalleSize[] = ["1024x1024", "1792x1024", "1024x1792"];
|
||||||
|
const dalle3Qualitys: DalleQuality[] = ["standard", "hd"];
|
||||||
|
const dalle3Styles: DalleStyle[] = ["vivid", "natural"];
|
||||||
|
const currentSize =
|
||||||
|
chatStore.currentSession().mask.modelConfig?.size ?? "1024x1024";
|
||||||
|
const currentQuality =
|
||||||
|
chatStore.currentSession().mask.modelConfig?.quality ?? "standard";
|
||||||
|
const currentStyle =
|
||||||
|
chatStore.currentSession().mask.modelConfig?.style ?? "vivid";
|
||||||
|
|
||||||
const accessStore = useAccessStore();
|
const accessStore = useAccessStore();
|
||||||
const isEnableRAG = useMemo(
|
const isEnableRAG = useMemo(
|
||||||
() => accessStore.enableRAG(),
|
() => accessStore.enableRAG(),
|
||||||
@ -689,6 +711,87 @@ export function ChatActions(props: {
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{isDalle3(currentModel) && (
|
||||||
|
<ChatAction
|
||||||
|
onClick={() => setShowSizeSelector(true)}
|
||||||
|
text={currentSize}
|
||||||
|
icon={<SizeIcon />}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{showSizeSelector && (
|
||||||
|
<Selector
|
||||||
|
defaultSelectedValue={currentSize}
|
||||||
|
items={dalle3Sizes.map((m) => ({
|
||||||
|
title: m,
|
||||||
|
value: m,
|
||||||
|
}))}
|
||||||
|
onClose={() => setShowSizeSelector(false)}
|
||||||
|
onSelection={(s) => {
|
||||||
|
if (s.length === 0) return;
|
||||||
|
const size = s[0];
|
||||||
|
chatStore.updateCurrentSession((session) => {
|
||||||
|
session.mask.modelConfig.size = size;
|
||||||
|
});
|
||||||
|
showToast(size);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{isDalle3(currentModel) && (
|
||||||
|
<ChatAction
|
||||||
|
onClick={() => setShowQualitySelector(true)}
|
||||||
|
text={currentQuality}
|
||||||
|
icon={<QualityIcon />}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{showQualitySelector && (
|
||||||
|
<Selector
|
||||||
|
defaultSelectedValue={currentQuality}
|
||||||
|
items={dalle3Qualitys.map((m) => ({
|
||||||
|
title: m,
|
||||||
|
value: m,
|
||||||
|
}))}
|
||||||
|
onClose={() => setShowQualitySelector(false)}
|
||||||
|
onSelection={(q) => {
|
||||||
|
if (q.length === 0) return;
|
||||||
|
const quality = q[0];
|
||||||
|
chatStore.updateCurrentSession((session) => {
|
||||||
|
session.mask.modelConfig.quality = quality;
|
||||||
|
});
|
||||||
|
showToast(quality);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{isDalle3(currentModel) && (
|
||||||
|
<ChatAction
|
||||||
|
onClick={() => setShowStyleSelector(true)}
|
||||||
|
text={currentStyle}
|
||||||
|
icon={<StyleIcon />}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{showStyleSelector && (
|
||||||
|
<Selector
|
||||||
|
defaultSelectedValue={currentStyle}
|
||||||
|
items={dalle3Styles.map((m) => ({
|
||||||
|
title: m,
|
||||||
|
value: m,
|
||||||
|
}))}
|
||||||
|
onClose={() => setShowStyleSelector(false)}
|
||||||
|
onSelection={(s) => {
|
||||||
|
if (s.length === 0) return;
|
||||||
|
const style = s[0];
|
||||||
|
chatStore.updateCurrentSession((session) => {
|
||||||
|
session.mask.modelConfig.style = style;
|
||||||
|
});
|
||||||
|
showToast(style);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
<div>
|
<div>
|
||||||
<ChatAction
|
<ChatAction
|
||||||
|
@ -121,6 +121,7 @@ export const Anthropic = {
|
|||||||
export const OpenaiPath = {
|
export const OpenaiPath = {
|
||||||
ChatPath: "v1/chat/completions",
|
ChatPath: "v1/chat/completions",
|
||||||
SpeechPath: "v1/audio/speech",
|
SpeechPath: "v1/audio/speech",
|
||||||
|
ImagePath: "v1/images/generations",
|
||||||
TranscriptionPath: "v1/audio/transcriptions",
|
TranscriptionPath: "v1/audio/transcriptions",
|
||||||
UsagePath: "dashboard/billing/usage",
|
UsagePath: "dashboard/billing/usage",
|
||||||
SubsPath: "dashboard/billing/subscription",
|
SubsPath: "dashboard/billing/subscription",
|
||||||
@ -130,6 +131,8 @@ export const OpenaiPath = {
|
|||||||
export const Azure = {
|
export const Azure = {
|
||||||
ChatPath: (deployName: string, apiVersion: string) =>
|
ChatPath: (deployName: string, apiVersion: string) =>
|
||||||
`deployments/${deployName}/chat/completions?api-version=${apiVersion}`,
|
`deployments/${deployName}/chat/completions?api-version=${apiVersion}`,
|
||||||
|
ImagePath: (deployName: string, apiVersion: string) =>
|
||||||
|
`deployments/${deployName}/images/generations?api-version=${apiVersion}`,
|
||||||
ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}",
|
ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}",
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 23 KiB |
7
app/icons/discovery.svg
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="1.2rem" height="1.2rem" viewBox="0 0 24 24">
|
||||||
|
<g fill="none" stroke="black" stroke-linecap="round" stroke-linejoin="round" stroke-width="2">
|
||||||
|
<circle cx="12" cy="12" r="9" />
|
||||||
|
<path
|
||||||
|
d="M11.307 9.739L15 9l-.739 3.693a2 2 0 0 1-1.568 1.569L9 15l.739-3.693a2 2 0 0 1 1.568-1.568" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 371 B |
4
app/icons/hd.svg
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="#333" class="bi bi-badge-hd" viewBox="0 0 16 16">
|
||||||
|
<path d="M7.396 11V5.001H6.209v2.44H3.687V5H2.5v6h1.187V8.43h2.522V11zM8.5 5.001V11h2.188c1.811 0 2.685-1.107 2.685-3.015 0-1.894-.86-2.984-2.684-2.984zm1.187.967h.843c1.112 0 1.622.686 1.622 2.04 0 1.353-.505 2.02-1.622 2.02h-.843z"/>
|
||||||
|
<path d="M14 3a1 1 0 0 1 1 1v8a1 1 0 0 1-1 1H2a1 1 0 0 1-1-1V4a1 1 0 0 1 1-1zM2 2a2 2 0 0 0-2 2v8a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V4a2 2 0 0 0-2-2z"/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 514 B |
10
app/icons/history.svg
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg width="16" height="16" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M5.81836 6.72729V14H13.0911" stroke="#333" stroke-width="4" stroke-linecap="round"
|
||||||
|
stroke-linejoin="round" />
|
||||||
|
<path
|
||||||
|
d="M4 24C4 35.0457 12.9543 44 24 44V44C35.0457 44 44 35.0457 44 24C44 12.9543 35.0457 4 24 4C16.598 4 10.1351 8.02111 6.67677 13.9981"
|
||||||
|
stroke="#333" stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||||
|
<path d="M24.005 12L24.0038 24.0088L32.4832 32.4882" stroke="#333" stroke-width="4"
|
||||||
|
stroke-linecap="round" stroke-linejoin="round" />
|
||||||
|
</svg>
|
After Width: | Height: | Size: 660 B |
4
app/icons/palette.svg
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="#333" class="bi bi-palette" viewBox="0 0 16 16">
|
||||||
|
<path d="M8 5a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3m4 3a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3M5.5 7a1.5 1.5 0 1 1-3 0 1.5 1.5 0 0 1 3 0m.5 6a1.5 1.5 0 1 0 0-3 1.5 1.5 0 0 0 0 3"/>
|
||||||
|
<path d="M16 8c0 3.15-1.866 2.585-3.567 2.07C11.42 9.763 10.465 9.473 10 10c-.603.683-.475 1.819-.351 2.92C9.826 14.495 9.996 16 8 16a8 8 0 1 1 8-8m-8 7c.611 0 .654-.171.655-.176.078-.146.124-.464.07-1.119-.014-.168-.037-.37-.061-.591-.052-.464-.112-1.005-.118-1.462-.01-.707.083-1.61.704-2.314.369-.417.845-.578 1.272-.618.404-.038.812.026 1.16.104.343.077.702.186 1.025.284l.028.008c.346.105.658.199.953.266.653.148.904.083.991.024C14.717 9.38 15 9.161 15 8a7 7 0 1 0-7 7"/>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 781 B |
12
app/icons/sd.svg
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="1.21em" height="1em" viewBox="0 0 256 213">
|
||||||
|
<defs>
|
||||||
|
<linearGradient id="logosStabilityAiIcon0" x1="50%" x2="50%" y1="0%" y2="100%">
|
||||||
|
<stop offset="0%" stop-color="#9d39ff" />
|
||||||
|
<stop offset="100%" stop-color="#a380ff" />
|
||||||
|
</linearGradient>
|
||||||
|
</defs>
|
||||||
|
<path fill="url(#logosStabilityAiIcon0)"
|
||||||
|
d="M72.418 212.45c49.478 0 81.658-26.205 81.658-65.626c0-30.572-19.572-49.998-54.569-58.043l-22.469-6.74c-19.71-4.424-31.215-9.738-28.505-23.312c2.255-11.292 9.002-17.667 24.69-17.667c49.872 0 68.35 17.667 68.35 17.667V16.237S123.583 0 73.223 0C25.757 0 0 24.424 0 62.236c0 30.571 17.85 48.35 54.052 56.798q3.802.95 3.885.976q8.26 2.556 22.293 6.755c18.504 4.425 23.262 9.121 23.262 23.2c0 12.872-13.374 20.19-31.074 20.19C21.432 170.154 0 144.36 0 144.36v47.078s13.402 21.01 72.418 21.01" />
|
||||||
|
<path fill="#e80000"
|
||||||
|
d="M225.442 209.266c17.515 0 30.558-12.67 30.558-29.812c0-17.515-12.67-29.813-30.558-29.813c-17.515 0-30.185 12.298-30.185 29.813s12.67 29.812 30.185 29.812" />
|
||||||
|
</svg>
|
After Width: | Height: | Size: 1.1 KiB |
1
app/icons/shortcutkey.svg
Normal file
@ -0,0 +1 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?><svg width="16" height="16" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M42 7H6C4.89543 7 4 7.89543 4 9V37C4 38.1046 4.89543 39 6 39H42C43.1046 39 44 38.1046 44 37V9C44 7.89543 43.1046 7 42 7Z" fill="none" stroke="#000" stroke-width="3" stroke-linejoin="round"/><path d="M12 19H14" stroke="#000" stroke-width="3" stroke-linecap="round" stroke-linejoin="round"/><path d="M21 19H23" stroke="#000" stroke-width="3" stroke-linecap="round" stroke-linejoin="round"/><path d="M29 19H36" stroke="#000" stroke-width="3" stroke-linecap="round" stroke-linejoin="round"/><path d="M12 28H36" stroke="#000" stroke-width="3" stroke-linecap="round" stroke-linejoin="round"/></svg>
|
After Width: | Height: | Size: 734 B |
1
app/icons/size.svg
Normal file
@ -0,0 +1 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?><svg width="16" height="16" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M42 7H6C4.89543 7 4 7.89543 4 9V39C4 40.1046 4.89543 41 6 41H42C43.1046 41 44 40.1046 44 39V9C44 7.89543 43.1046 7 42 7Z" fill="none" stroke="#333" stroke-width="4"/><path d="M30 30V18L38 30V18" stroke="#333" stroke-width="4" stroke-linecap="round" stroke-linejoin="round"/><path d="M10 30V18L18 30V18" stroke="#333" stroke-width="4" stroke-linecap="round" stroke-linejoin="round"/><path d="M24 20V21" stroke="#333" stroke-width="4" stroke-linecap="round"/><path d="M24 27V28" stroke="#333" stroke-width="4" stroke-linecap="round"/></svg>
|
After Width: | Height: | Size: 681 B |
@ -1,4 +1,4 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="20" height="20" fill="none" viewBox="0 0 20 20">
|
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="16" height="16" fill="none" viewBox="0 0 20 20">
|
||||||
<defs>
|
<defs>
|
||||||
<rect id="path_0" width="20" height="20" x="0" y="0" />
|
<rect id="path_0" width="20" height="20" x="0" y="0" />
|
||||||
</defs>
|
</defs>
|
||||||
@ -7,9 +7,9 @@
|
|||||||
<use xlink:href="#path_0" />
|
<use xlink:href="#path_0" />
|
||||||
</mask>
|
</mask>
|
||||||
<g mask="url(#bg-mask-0)">
|
<g mask="url(#bg-mask-0)">
|
||||||
<path d="M7 4a3 3 0 016 0v6a3 3 0 11-6 0V4z">
|
<path d="M7 4a3 3 0 016 0v6a3 3 0 11-6 0V4z" fill="#333333">
|
||||||
</path>
|
</path>
|
||||||
<path d="M5.5 9.643a.75.75 0 00-1.5 0V10c0 3.06 2.29 5.585 5.25 5.954V17.5h-1.5a.75.75 0 000 1.5h4.5a.75.75 0 000-1.5h-1.5v-1.546A6.001 6.001 0 0016 10v-.357a.75.75 0 00-1.5 0V10a4.5 4.5 0 01-9 0v-.357z">
|
<path d="M5.5 9.643a.75.75 0 00-1.5 0V10c0 3.06 2.29 5.585 5.25 5.954V17.5h-1.5a.75.75 0 000 1.5h4.5a.75.75 0 000-1.5h-1.5v-1.546A6.001 6.001 0 0016 10v-.357a.75.75 0 00-1.5 0V10a4.5 4.5 0 01-9 0v-.357z" fill="#333333">
|
||||||
</path>
|
</path>
|
||||||
</g>
|
</g>
|
||||||
</g>
|
</g>
|
||||||
|
Before Width: | Height: | Size: 678 B After Width: | Height: | Size: 708 B |
1
app/icons/zoom.svg
Normal file
@ -0,0 +1 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="1.2rem" height="1.2rem" viewBox="0 0 24 24"><g fill="none" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"><circle cx="11" cy="11" r="8"></circle><line x1="21" y1="21" x2="16.65" y2="16.65"></line></g></svg>
|
After Width: | Height: | Size: 285 B |
@ -18,6 +18,7 @@ import {
|
|||||||
GEMINI_SUMMARIZE_MODEL,
|
GEMINI_SUMMARIZE_MODEL,
|
||||||
MYFILES_BROWSER_TOOLS_SYSTEM_PROMPT,
|
MYFILES_BROWSER_TOOLS_SYSTEM_PROMPT,
|
||||||
} from "../constant";
|
} from "../constant";
|
||||||
|
import { isDalle3, safeLocalStorage } from "../utils";
|
||||||
import { getClientApi } from "../client/api";
|
import { getClientApi } from "../client/api";
|
||||||
import type {
|
import type {
|
||||||
ClientApi,
|
ClientApi,
|
||||||
@ -39,6 +40,20 @@ import { FileInfo } from "../client/platforms/utils";
|
|||||||
import { collectModelsWithDefaultModel } from "../utils/model";
|
import { collectModelsWithDefaultModel } from "../utils/model";
|
||||||
import { useAccessStore } from "./access";
|
import { useAccessStore } from "./access";
|
||||||
|
|
||||||
|
const localStorage = safeLocalStorage();
|
||||||
|
|
||||||
|
export type ChatMessageTool = {
|
||||||
|
id: string;
|
||||||
|
index?: number;
|
||||||
|
type?: string;
|
||||||
|
function?: {
|
||||||
|
name: string;
|
||||||
|
arguments?: string;
|
||||||
|
};
|
||||||
|
content?: string;
|
||||||
|
isError?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
export type ChatMessage = RequestMessage & {
|
export type ChatMessage = RequestMessage & {
|
||||||
date: string;
|
date: string;
|
||||||
toolMessages?: ChatToolMessage[];
|
toolMessages?: ChatToolMessage[];
|
||||||
@ -670,6 +685,10 @@ ${file.partial}
|
|||||||
const config = useAppConfig.getState();
|
const config = useAppConfig.getState();
|
||||||
const session = get().currentSession();
|
const session = get().currentSession();
|
||||||
const modelConfig = session.mask.modelConfig;
|
const modelConfig = session.mask.modelConfig;
|
||||||
|
// skip summarize when using dalle3?
|
||||||
|
if (isDalle3(modelConfig.model)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const api: ClientApi = getClientApi(modelConfig.providerName);
|
const api: ClientApi = getClientApi(modelConfig.providerName);
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { LLMModel } from "../client/api";
|
import { LLMModel } from "../client/api";
|
||||||
|
import { DalleSize, DalleQuality, DalleStyle } from "../typing";
|
||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import {
|
import {
|
||||||
DEFAULT_INPUT_TEMPLATE,
|
DEFAULT_INPUT_TEMPLATE,
|
||||||
@ -61,7 +62,7 @@ export const DEFAULT_CONFIG = {
|
|||||||
models: DEFAULT_MODELS as any as LLMModel[],
|
models: DEFAULT_MODELS as any as LLMModel[],
|
||||||
|
|
||||||
modelConfig: {
|
modelConfig: {
|
||||||
model: "gpt-3.5-turbo" as ModelType,
|
model: "gpt-4o-mini" as ModelType,
|
||||||
providerName: "OpenAI" as ServiceProvider,
|
providerName: "OpenAI" as ServiceProvider,
|
||||||
temperature: 0.5,
|
temperature: 0.5,
|
||||||
top_p: 1,
|
top_p: 1,
|
||||||
@ -73,6 +74,9 @@ export const DEFAULT_CONFIG = {
|
|||||||
compressMessageLengthThreshold: 1000,
|
compressMessageLengthThreshold: 1000,
|
||||||
enableInjectSystemPrompts: true,
|
enableInjectSystemPrompts: true,
|
||||||
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
|
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
|
||||||
|
size: "1024x1024" as DalleSize,
|
||||||
|
quality: "standard" as DalleQuality,
|
||||||
|
style: "vivid" as DalleStyle,
|
||||||
},
|
},
|
||||||
|
|
||||||
pluginConfig: {
|
pluginConfig: {
|
||||||
|
@ -2,3 +2,4 @@ export * from "./chat";
|
|||||||
export * from "./update";
|
export * from "./update";
|
||||||
export * from "./access";
|
export * from "./access";
|
||||||
export * from "./config";
|
export * from "./config";
|
||||||
|
export * from "./plugin";
|
||||||
|
@ -7,3 +7,7 @@ export interface RequestMessage {
|
|||||||
role: MessageRole;
|
role: MessageRole;
|
||||||
content: string;
|
content: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type DalleSize = "1024x1024" | "1792x1024" | "1024x1792";
|
||||||
|
export type DalleQuality = "standard" | "hd";
|
||||||
|
export type DalleStyle = "vivid" | "natural";
|
||||||
|
67
app/utils.ts
@ -267,6 +267,10 @@ export function isVisionModel(model: string) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isDalle3(model: string) {
|
||||||
|
return "dall-e-3" === model;
|
||||||
|
}
|
||||||
|
|
||||||
export function isSupportRAGModel(modelName: string) {
|
export function isSupportRAGModel(modelName: string) {
|
||||||
const specialModels = [
|
const specialModels = [
|
||||||
"gpt-4-turbo",
|
"gpt-4-turbo",
|
||||||
@ -284,6 +288,9 @@ export function isSupportRAGModel(modelName: string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function isFunctionCallModel(modelName: string) {
|
export function isFunctionCallModel(modelName: string) {
|
||||||
|
if (isDalle3(modelName)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
const specialModels = [
|
const specialModels = [
|
||||||
"gpt-3.5-turbo",
|
"gpt-3.5-turbo",
|
||||||
"gpt-3.5-turbo-1106",
|
"gpt-3.5-turbo-1106",
|
||||||
@ -310,3 +317,63 @@ export function isFunctionCallModel(modelName: string) {
|
|||||||
(model) => model.provider.id === "openai" && !model.name.includes("o1"),
|
(model) => model.provider.id === "openai" && !model.name.includes("o1"),
|
||||||
).some((model) => model.name === modelName);
|
).some((model) => model.name === modelName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function safeLocalStorage(): {
|
||||||
|
getItem: (key: string) => string | null;
|
||||||
|
setItem: (key: string, value: string) => void;
|
||||||
|
removeItem: (key: string) => void;
|
||||||
|
clear: () => void;
|
||||||
|
} {
|
||||||
|
let storage: Storage | null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (typeof window !== "undefined" && window.localStorage) {
|
||||||
|
storage = window.localStorage;
|
||||||
|
} else {
|
||||||
|
storage = null;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error("localStorage is not available:", e);
|
||||||
|
storage = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
getItem(key: string): string | null {
|
||||||
|
if (storage) {
|
||||||
|
return storage.getItem(key);
|
||||||
|
} else {
|
||||||
|
console.warn(
|
||||||
|
`Attempted to get item "${key}" from localStorage, but localStorage is not available.`,
|
||||||
|
);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
setItem(key: string, value: string): void {
|
||||||
|
if (storage) {
|
||||||
|
storage.setItem(key, value);
|
||||||
|
} else {
|
||||||
|
console.warn(
|
||||||
|
`Attempted to set item "${key}" in localStorage, but localStorage is not available.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
removeItem(key: string): void {
|
||||||
|
if (storage) {
|
||||||
|
storage.removeItem(key);
|
||||||
|
} else {
|
||||||
|
console.warn(
|
||||||
|
`Attempted to remove item "${key}" from localStorage, but localStorage is not available.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
clear(): void {
|
||||||
|
if (storage) {
|
||||||
|
storage.clear();
|
||||||
|
} else {
|
||||||
|
console.warn(
|
||||||
|
"Attempted to clear localStorage, but localStorage is not available.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
@ -1,5 +1,15 @@
|
|||||||
import { CACHE_URL_PREFIX, UPLOAD_URL } from "@/app/constant";
|
import {
|
||||||
|
CACHE_URL_PREFIX,
|
||||||
|
UPLOAD_URL,
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
} from "@/app/constant";
|
||||||
import { RequestMessage } from "@/app/client/api";
|
import { RequestMessage } from "@/app/client/api";
|
||||||
|
import Locale from "@/app/locales";
|
||||||
|
import {
|
||||||
|
EventStreamContentType,
|
||||||
|
fetchEventSource,
|
||||||
|
} from "@fortaine/fetch-event-source";
|
||||||
|
import { prettyObject } from "./format";
|
||||||
|
|
||||||
export function compressImage(file: Blob, maxSize: number): Promise<string> {
|
export function compressImage(file: Blob, maxSize: number): Promise<string> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
@ -142,3 +152,203 @@ export function removeImage(imageUrl: string) {
|
|||||||
credentials: "include",
|
credentials: "include",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function stream(
|
||||||
|
chatPath: string,
|
||||||
|
requestPayload: any,
|
||||||
|
headers: any,
|
||||||
|
tools: any[],
|
||||||
|
funcs: Record<string, Function>,
|
||||||
|
controller: AbortController,
|
||||||
|
parseSSE: (text: string, runTools: any[]) => string | undefined,
|
||||||
|
processToolMessage: (
|
||||||
|
requestPayload: any,
|
||||||
|
toolCallMessage: any,
|
||||||
|
toolCallResult: any[],
|
||||||
|
) => void,
|
||||||
|
options: any,
|
||||||
|
) {
|
||||||
|
let responseText = "";
|
||||||
|
let remainText = "";
|
||||||
|
let finished = false;
|
||||||
|
let running = false;
|
||||||
|
let runTools: any[] = [];
|
||||||
|
|
||||||
|
// animate response to make it looks smooth
|
||||||
|
function animateResponseText() {
|
||||||
|
if (finished || controller.signal.aborted) {
|
||||||
|
responseText += remainText;
|
||||||
|
console.log("[Response Animation] finished");
|
||||||
|
if (responseText?.length === 0) {
|
||||||
|
options.onError?.(new Error("empty response from server"));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (remainText.length > 0) {
|
||||||
|
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||||
|
const fetchText = remainText.slice(0, fetchCount);
|
||||||
|
responseText += fetchText;
|
||||||
|
remainText = remainText.slice(fetchCount);
|
||||||
|
options.onUpdate?.(responseText, fetchText);
|
||||||
|
}
|
||||||
|
|
||||||
|
requestAnimationFrame(animateResponseText);
|
||||||
|
}
|
||||||
|
|
||||||
|
// start animaion
|
||||||
|
animateResponseText();
|
||||||
|
|
||||||
|
const finish = () => {
|
||||||
|
if (!finished) {
|
||||||
|
if (!running && runTools.length > 0) {
|
||||||
|
const toolCallMessage = {
|
||||||
|
role: "assistant",
|
||||||
|
tool_calls: [...runTools],
|
||||||
|
};
|
||||||
|
running = true;
|
||||||
|
runTools.splice(0, runTools.length); // empty runTools
|
||||||
|
return Promise.all(
|
||||||
|
toolCallMessage.tool_calls.map((tool) => {
|
||||||
|
options?.onBeforeTool?.(tool);
|
||||||
|
return Promise.resolve(
|
||||||
|
// @ts-ignore
|
||||||
|
funcs[tool.function.name](
|
||||||
|
// @ts-ignore
|
||||||
|
tool?.function?.arguments
|
||||||
|
? JSON.parse(tool?.function?.arguments)
|
||||||
|
: {},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.then((res) => {
|
||||||
|
const content = JSON.stringify(res.data);
|
||||||
|
if (res.status >= 300) {
|
||||||
|
return Promise.reject(content);
|
||||||
|
}
|
||||||
|
return content;
|
||||||
|
})
|
||||||
|
.then((content) => {
|
||||||
|
options?.onAfterTool?.({
|
||||||
|
...tool,
|
||||||
|
content,
|
||||||
|
isError: false,
|
||||||
|
});
|
||||||
|
return content;
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
options?.onAfterTool?.({ ...tool, isError: true });
|
||||||
|
return e.toString();
|
||||||
|
})
|
||||||
|
.then((content) => ({
|
||||||
|
role: "tool",
|
||||||
|
content,
|
||||||
|
tool_call_id: tool.id,
|
||||||
|
}));
|
||||||
|
}),
|
||||||
|
).then((toolCallResult) => {
|
||||||
|
processToolMessage(requestPayload, toolCallMessage, toolCallResult);
|
||||||
|
setTimeout(() => {
|
||||||
|
// call again
|
||||||
|
console.debug("[ChatAPI] restart");
|
||||||
|
running = false;
|
||||||
|
chatApi(chatPath, headers, requestPayload, tools); // call fetchEventSource
|
||||||
|
}, 60);
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (running) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
console.debug("[ChatAPI] end");
|
||||||
|
finished = true;
|
||||||
|
options.onFinish(responseText + remainText);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
controller.signal.onabort = finish;
|
||||||
|
|
||||||
|
function chatApi(
|
||||||
|
chatPath: string,
|
||||||
|
headers: any,
|
||||||
|
requestPayload: any,
|
||||||
|
tools: any,
|
||||||
|
) {
|
||||||
|
const chatPayload = {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify({
|
||||||
|
...requestPayload,
|
||||||
|
tools: tools && tools.length ? tools : undefined,
|
||||||
|
}),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers,
|
||||||
|
};
|
||||||
|
const requestTimeoutId = setTimeout(
|
||||||
|
() => controller.abort(),
|
||||||
|
REQUEST_TIMEOUT_MS,
|
||||||
|
);
|
||||||
|
fetchEventSource(chatPath, {
|
||||||
|
...chatPayload,
|
||||||
|
async onopen(res) {
|
||||||
|
clearTimeout(requestTimeoutId);
|
||||||
|
const contentType = res.headers.get("content-type");
|
||||||
|
console.log("[Request] response content type: ", contentType);
|
||||||
|
|
||||||
|
if (contentType?.startsWith("text/plain")) {
|
||||||
|
responseText = await res.clone().text();
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!res.ok ||
|
||||||
|
!res.headers
|
||||||
|
.get("content-type")
|
||||||
|
?.startsWith(EventStreamContentType) ||
|
||||||
|
res.status !== 200
|
||||||
|
) {
|
||||||
|
const responseTexts = [responseText];
|
||||||
|
let extraInfo = await res.clone().text();
|
||||||
|
try {
|
||||||
|
const resJson = await res.clone().json();
|
||||||
|
extraInfo = prettyObject(resJson);
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
if (res.status === 401) {
|
||||||
|
responseTexts.push(Locale.Error.Unauthorized);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (extraInfo) {
|
||||||
|
responseTexts.push(extraInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
responseText = responseTexts.join("\n\n");
|
||||||
|
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onmessage(msg) {
|
||||||
|
if (msg.data === "[DONE]" || finished) {
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
const text = msg.data;
|
||||||
|
try {
|
||||||
|
const chunk = parseSSE(msg.data, runTools);
|
||||||
|
if (chunk) {
|
||||||
|
remainText += chunk;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[Request] parse error", text, msg, e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onclose() {
|
||||||
|
finish();
|
||||||
|
},
|
||||||
|
onerror(e) {
|
||||||
|
options?.onError?.(e);
|
||||||
|
throw e;
|
||||||
|
},
|
||||||
|
openWhenHidden: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
console.debug("[ChatAPI] start");
|
||||||
|
chatApi(chatPath, headers, requestPayload, tools); // call fetchEventSource
|
||||||
|
}
|
||||||
|
47
app/utils/indexedDB-storage.ts
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
import { StateStorage } from "zustand/middleware";
|
||||||
|
import { get, set, del, clear } from "idb-keyval";
|
||||||
|
import { safeLocalStorage } from "@/app/utils";
|
||||||
|
|
||||||
|
const localStorage = safeLocalStorage();
|
||||||
|
|
||||||
|
class IndexedDBStorage implements StateStorage {
|
||||||
|
public async getItem(name: string): Promise<string | null> {
|
||||||
|
try {
|
||||||
|
const value = (await get(name)) || localStorage.getItem(name);
|
||||||
|
return value;
|
||||||
|
} catch (error) {
|
||||||
|
return localStorage.getItem(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async setItem(name: string, value: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
const _value = JSON.parse(value);
|
||||||
|
if (!_value?.state?._hasHydrated) {
|
||||||
|
console.warn("skip setItem", name);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await set(name, value);
|
||||||
|
} catch (error) {
|
||||||
|
localStorage.setItem(name, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async removeItem(name: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await del(name);
|
||||||
|
} catch (error) {
|
||||||
|
localStorage.removeItem(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async clear(): Promise<void> {
|
||||||
|
try {
|
||||||
|
await clear();
|
||||||
|
} catch (error) {
|
||||||
|
localStorage.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const indexedDBStorage = new IndexedDBStorage();
|
15701
package-lock.json
generated
@ -48,6 +48,7 @@
|
|||||||
"heic2any": "^0.0.4",
|
"heic2any": "^0.0.4",
|
||||||
"html-entities": "^2.4.0",
|
"html-entities": "^2.4.0",
|
||||||
"html-to-image": "^1.11.11",
|
"html-to-image": "^1.11.11",
|
||||||
|
"idb-keyval": "^6.2.1",
|
||||||
"html-to-text": "^9.0.5",
|
"html-to-text": "^9.0.5",
|
||||||
"https-proxy-agent": "^7.0.2",
|
"https-proxy-agent": "^7.0.2",
|
||||||
"mammoth": "^1.7.1",
|
"mammoth": "^1.7.1",
|
||||||
|
@ -5935,6 +5935,11 @@ iconv-lite@0.6, iconv-lite@^0.6.2, iconv-lite@^0.6.3:
|
|||||||
dependencies:
|
dependencies:
|
||||||
safer-buffer ">= 2.1.2 < 3.0.0"
|
safer-buffer ">= 2.1.2 < 3.0.0"
|
||||||
|
|
||||||
|
idb-keyval@^6.2.1:
|
||||||
|
version "6.2.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/idb-keyval/-/idb-keyval-6.2.1.tgz#94516d625346d16f56f3b33855da11bfded2db33"
|
||||||
|
integrity sha512-8Sb3veuYCyrZL+VBt9LJfZjLUPWVvqn8tG28VqYNFCo43KHcKuq+b4EiXGeuaLAQWL2YmyDgMp2aSpH9JHsEQg==
|
||||||
|
|
||||||
ieee754@^1.1.13, ieee754@^1.2.1:
|
ieee754@^1.1.13, ieee754@^1.2.1:
|
||||||
version "1.2.1"
|
version "1.2.1"
|
||||||
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
|
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
|
||||||
|