feat: openai realtime merge
@ -38,16 +38,20 @@ interface ChatCommands {
|
||||
next?: Command;
|
||||
prev?: Command;
|
||||
clear?: Command;
|
||||
fork?: Command;
|
||||
del?: Command;
|
||||
}
|
||||
|
||||
export const ChatCommandPrefix = ":";
|
||||
// Compatible with Chinese colon character ":"
|
||||
export const ChatCommandPrefix = /^[::]/;
|
||||
|
||||
export function useChatCommand(commands: ChatCommands = {}) {
|
||||
function extract(userInput: string) {
|
||||
return (
|
||||
userInput.startsWith(ChatCommandPrefix) ? userInput.slice(1) : userInput
|
||||
) as keyof ChatCommands;
|
||||
const match = userInput.match(ChatCommandPrefix);
|
||||
if (match) {
|
||||
return userInput.slice(1) as keyof ChatCommands;
|
||||
}
|
||||
return userInput as keyof ChatCommands;
|
||||
}
|
||||
|
||||
function search(userInput: string) {
|
||||
@ -57,7 +61,7 @@ export function useChatCommand(commands: ChatCommands = {}) {
|
||||
.filter((c) => c.startsWith(input))
|
||||
.map((c) => ({
|
||||
title: desc[c as keyof ChatCommands],
|
||||
content: ChatCommandPrefix + c,
|
||||
content: ":" + c,
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -110,6 +110,13 @@
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
justify-content: space-between;
|
||||
gap: 5px;
|
||||
|
||||
&-end {
|
||||
display: flex;
|
||||
margin-left: auto;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.chat-input-action {
|
||||
display: inline-flex;
|
||||
@ -127,10 +134,6 @@
|
||||
width: var(--icon-width);
|
||||
overflow: hidden;
|
||||
|
||||
&:not(:last-child) {
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.text {
|
||||
white-space: nowrap;
|
||||
padding-left: 5px;
|
||||
@ -413,6 +416,12 @@
|
||||
flex-wrap: nowrap;
|
||||
}
|
||||
}
|
||||
|
||||
.chat-model-name {
|
||||
font-size: 12px;
|
||||
color: var(--black);
|
||||
margin-left: 6px;
|
||||
}
|
||||
}
|
||||
|
||||
.chat-message-container {
|
||||
@ -467,37 +476,6 @@
|
||||
}
|
||||
}
|
||||
|
||||
.chat-message-checkmark {
|
||||
display: inline-block;
|
||||
margin-right: 5px;
|
||||
height: 12px;
|
||||
width: 12px;
|
||||
color: #13a10e;
|
||||
fill: #13a10e;
|
||||
user-select: none;
|
||||
backface-visibility: hidden;
|
||||
transform: translateZ(0px);
|
||||
}
|
||||
|
||||
.chat-message-tools-status {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
font-size: 12px;
|
||||
margin-top: 5px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.chat-message-tools-name {
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
.chat-message-tools-details {
|
||||
margin-left: 5px;
|
||||
font-weight: bold;
|
||||
color: #999;
|
||||
}
|
||||
|
||||
.chat-message-status {
|
||||
font-size: 12px;
|
||||
color: #aaa;
|
||||
@ -505,6 +483,21 @@
|
||||
margin-top: 5px;
|
||||
}
|
||||
|
||||
.chat-message-tools {
|
||||
font-size: 12px;
|
||||
color: #aaa;
|
||||
line-height: 1.5;
|
||||
margin-top: 5px;
|
||||
.chat-message-tool {
|
||||
display: flex;
|
||||
align-items: end;
|
||||
svg {
|
||||
margin-left: 5px;
|
||||
margin-right: 5px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.chat-message-item {
|
||||
box-sizing: border-box;
|
||||
max-width: 100%;
|
||||
@ -520,15 +513,23 @@
|
||||
transition: all ease 0.3s;
|
||||
}
|
||||
|
||||
.chat-message-item-files {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(var(--file-count), auto);
|
||||
grid-gap: 5px;
|
||||
}
|
||||
|
||||
.chat-message-item-file {
|
||||
text-decoration: none;
|
||||
color: #aaa;
|
||||
.chat-message-audio {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
border-radius: 10px;
|
||||
background-color: rgba(0, 0, 0, 0.05);
|
||||
border: var(--border-in-light);
|
||||
position: relative;
|
||||
transition: all ease 0.3s;
|
||||
margin-top: 10px;
|
||||
font-size: 14px;
|
||||
user-select: text;
|
||||
word-break: break-word;
|
||||
box-sizing: border-box;
|
||||
audio {
|
||||
height: 30px; /* 调整高度 */
|
||||
}
|
||||
}
|
||||
|
||||
.chat-message-item-image {
|
||||
@ -739,3 +740,78 @@
|
||||
bottom: 30px;
|
||||
}
|
||||
}
|
||||
|
||||
.shortcut-key-container {
|
||||
padding: 10px;
|
||||
overflow-y: auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.shortcut-key-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(350px, 1fr));
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.shortcut-key-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
overflow: hidden;
|
||||
padding: 10px;
|
||||
background-color: var(--white);
|
||||
}
|
||||
|
||||
.shortcut-key-title {
|
||||
font-size: 14px;
|
||||
color: var(--black);
|
||||
}
|
||||
|
||||
.shortcut-key-keys {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.shortcut-key {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border: var(--border-in-light);
|
||||
border-radius: 8px;
|
||||
padding: 4px;
|
||||
background-color: var(--gray);
|
||||
min-width: 32px;
|
||||
}
|
||||
|
||||
.shortcut-key span {
|
||||
font-size: 12px;
|
||||
color: var(--black);
|
||||
}
|
||||
|
||||
.chat-main {
|
||||
display: flex;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
.chat-body-container {
|
||||
height: 100%;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex: 1;
|
||||
width: 100%;
|
||||
}
|
||||
.chat-side-panel {
|
||||
position: absolute;
|
||||
inset: 0;
|
||||
background: var(--white);
|
||||
overflow: hidden;
|
||||
z-index: 10;
|
||||
transform: translateX(100%);
|
||||
transition: all ease 0.3s;
|
||||
&-show {
|
||||
transform: translateX(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
1
app/components/realtime-chat/index.ts
Normal file
@ -0,0 +1 @@
|
||||
export * from "./realtime-chat";
|
74
app/components/realtime-chat/realtime-chat.module.scss
Normal file
@ -0,0 +1,74 @@
|
||||
.realtime-chat {
|
||||
width: 100%;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
position: relative;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
padding: 20px;
|
||||
box-sizing: border-box;
|
||||
.circle-mic {
|
||||
width: 150px;
|
||||
height: 150px;
|
||||
border-radius: 50%;
|
||||
background: linear-gradient(to bottom right, #a0d8ef, #f0f8ff);
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
.icon-center {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
.bottom-icons {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
bottom: 20px;
|
||||
box-sizing: border-box;
|
||||
padding: 0 20px;
|
||||
}
|
||||
|
||||
.icon-left,
|
||||
.icon-right {
|
||||
width: 46px;
|
||||
height: 46px;
|
||||
font-size: 36px;
|
||||
background: var(--second);
|
||||
border-radius: 50%;
|
||||
padding: 2px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
cursor: pointer;
|
||||
&:hover {
|
||||
opacity: 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
&.mobile {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
.pulse {
|
||||
animation: pulse 1.5s infinite;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
transform: scale(1);
|
||||
opacity: 0.7;
|
||||
}
|
||||
50% {
|
||||
transform: scale(1.1);
|
||||
opacity: 1;
|
||||
}
|
||||
100% {
|
||||
transform: scale(1);
|
||||
opacity: 0.7;
|
||||
}
|
||||
}
|
359
app/components/realtime-chat/realtime-chat.tsx
Normal file
@ -0,0 +1,359 @@
|
||||
import VoiceIcon from "@/app/icons/voice.svg";
|
||||
import VoiceOffIcon from "@/app/icons/voice-off.svg";
|
||||
import PowerIcon from "@/app/icons/power.svg";
|
||||
|
||||
import styles from "./realtime-chat.module.scss";
|
||||
import clsx from "clsx";
|
||||
|
||||
import { useState, useRef, useEffect } from "react";
|
||||
|
||||
import { useChatStore, createMessage, useAppConfig } from "@/app/store";
|
||||
|
||||
import { IconButton } from "@/app/components/button";
|
||||
|
||||
import {
|
||||
Modality,
|
||||
RTClient,
|
||||
RTInputAudioItem,
|
||||
RTResponse,
|
||||
TurnDetection,
|
||||
} from "rt-client";
|
||||
import { AudioHandler } from "@/app/lib/audio";
|
||||
import { uploadImage } from "@/app/utils/chat";
|
||||
import { VoicePrint } from "@/app/components/voice-print";
|
||||
|
||||
interface RealtimeChatProps {
|
||||
onClose?: () => void;
|
||||
onStartVoice?: () => void;
|
||||
onPausedVoice?: () => void;
|
||||
}
|
||||
|
||||
export function RealtimeChat({
|
||||
onClose,
|
||||
onStartVoice,
|
||||
onPausedVoice,
|
||||
}: RealtimeChatProps) {
|
||||
const chatStore = useChatStore();
|
||||
const session = chatStore.currentSession();
|
||||
const config = useAppConfig();
|
||||
const [status, setStatus] = useState("");
|
||||
const [isRecording, setIsRecording] = useState(false);
|
||||
const [isConnected, setIsConnected] = useState(false);
|
||||
const [isConnecting, setIsConnecting] = useState(false);
|
||||
const [modality, setModality] = useState("audio");
|
||||
const [useVAD, setUseVAD] = useState(true);
|
||||
const [frequencies, setFrequencies] = useState<Uint8Array | undefined>();
|
||||
|
||||
const clientRef = useRef<RTClient | null>(null);
|
||||
const audioHandlerRef = useRef<AudioHandler | null>(null);
|
||||
const initRef = useRef(false);
|
||||
|
||||
const temperature = config.realtimeConfig.temperature;
|
||||
const apiKey = config.realtimeConfig.apiKey;
|
||||
const model = config.realtimeConfig.model;
|
||||
const azure = config.realtimeConfig.provider === "Azure";
|
||||
const azureEndpoint = config.realtimeConfig.azure.endpoint;
|
||||
const azureDeployment = config.realtimeConfig.azure.deployment;
|
||||
const voice = config.realtimeConfig.voice;
|
||||
|
||||
const handleConnect = async () => {
|
||||
if (isConnecting) return;
|
||||
if (!isConnected) {
|
||||
try {
|
||||
setIsConnecting(true);
|
||||
clientRef.current = azure
|
||||
? new RTClient(
|
||||
new URL(azureEndpoint),
|
||||
{ key: apiKey },
|
||||
{ deployment: azureDeployment },
|
||||
)
|
||||
: new RTClient({ key: apiKey }, { model });
|
||||
const modalities: Modality[] =
|
||||
modality === "audio" ? ["text", "audio"] : ["text"];
|
||||
const turnDetection: TurnDetection = useVAD
|
||||
? { type: "server_vad" }
|
||||
: null;
|
||||
await clientRef.current.configure({
|
||||
instructions: "",
|
||||
voice,
|
||||
input_audio_transcription: { model: "whisper-1" },
|
||||
turn_detection: turnDetection,
|
||||
tools: [],
|
||||
temperature,
|
||||
modalities,
|
||||
});
|
||||
startResponseListener();
|
||||
|
||||
setIsConnected(true);
|
||||
// TODO
|
||||
// try {
|
||||
// const recentMessages = chatStore.getMessagesWithMemory();
|
||||
// for (const message of recentMessages) {
|
||||
// const { role, content } = message;
|
||||
// if (typeof content === "string") {
|
||||
// await clientRef.current.sendItem({
|
||||
// type: "message",
|
||||
// role: role as any,
|
||||
// content: [
|
||||
// {
|
||||
// type: (role === "assistant" ? "text" : "input_text") as any,
|
||||
// text: content as string,
|
||||
// },
|
||||
// ],
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// // await clientRef.current.generateResponse();
|
||||
// } catch (error) {
|
||||
// console.error("Set message failed:", error);
|
||||
// }
|
||||
} catch (error) {
|
||||
console.error("Connection failed:", error);
|
||||
setStatus("Connection failed");
|
||||
} finally {
|
||||
setIsConnecting(false);
|
||||
}
|
||||
} else {
|
||||
await disconnect();
|
||||
}
|
||||
};
|
||||
|
||||
const disconnect = async () => {
|
||||
if (clientRef.current) {
|
||||
try {
|
||||
await clientRef.current.close();
|
||||
clientRef.current = null;
|
||||
setIsConnected(false);
|
||||
} catch (error) {
|
||||
console.error("Disconnect failed:", error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const startResponseListener = async () => {
|
||||
if (!clientRef.current) return;
|
||||
|
||||
try {
|
||||
for await (const serverEvent of clientRef.current.events()) {
|
||||
if (serverEvent.type === "response") {
|
||||
await handleResponse(serverEvent);
|
||||
} else if (serverEvent.type === "input_audio") {
|
||||
await handleInputAudio(serverEvent);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (clientRef.current) {
|
||||
console.error("Response iteration error:", error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleResponse = async (response: RTResponse) => {
|
||||
for await (const item of response) {
|
||||
if (item.type === "message" && item.role === "assistant") {
|
||||
const botMessage = createMessage({
|
||||
role: item.role,
|
||||
content: "",
|
||||
});
|
||||
// add bot message first
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat([botMessage]);
|
||||
});
|
||||
let hasAudio = false;
|
||||
for await (const content of item) {
|
||||
if (content.type === "text") {
|
||||
for await (const text of content.textChunks()) {
|
||||
botMessage.content += text;
|
||||
}
|
||||
} else if (content.type === "audio") {
|
||||
const textTask = async () => {
|
||||
for await (const text of content.transcriptChunks()) {
|
||||
botMessage.content += text;
|
||||
}
|
||||
};
|
||||
const audioTask = async () => {
|
||||
audioHandlerRef.current?.startStreamingPlayback();
|
||||
for await (const audio of content.audioChunks()) {
|
||||
hasAudio = true;
|
||||
audioHandlerRef.current?.playChunk(audio);
|
||||
}
|
||||
};
|
||||
await Promise.all([textTask(), audioTask()]);
|
||||
}
|
||||
// update message.content
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
}
|
||||
if (hasAudio) {
|
||||
// upload audio get audio_url
|
||||
const blob = audioHandlerRef.current?.savePlayFile();
|
||||
uploadImage(blob!).then((audio_url) => {
|
||||
botMessage.audio_url = audio_url;
|
||||
// update text and audio_url
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const handleInputAudio = async (item: RTInputAudioItem) => {
|
||||
await item.waitForCompletion();
|
||||
if (item.transcription) {
|
||||
const userMessage = createMessage({
|
||||
role: "user",
|
||||
content: item.transcription,
|
||||
});
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat([userMessage]);
|
||||
});
|
||||
// save input audio_url, and update session
|
||||
const { audioStartMillis, audioEndMillis } = item;
|
||||
// upload audio get audio_url
|
||||
const blob = audioHandlerRef.current?.saveRecordFile(
|
||||
audioStartMillis,
|
||||
audioEndMillis,
|
||||
);
|
||||
uploadImage(blob!).then((audio_url) => {
|
||||
userMessage.audio_url = audio_url;
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
});
|
||||
}
|
||||
// stop streaming play after get input audio.
|
||||
audioHandlerRef.current?.stopStreamingPlayback();
|
||||
};
|
||||
|
||||
const toggleRecording = async () => {
|
||||
if (!isRecording && clientRef.current) {
|
||||
try {
|
||||
if (!audioHandlerRef.current) {
|
||||
audioHandlerRef.current = new AudioHandler();
|
||||
await audioHandlerRef.current.initialize();
|
||||
}
|
||||
await audioHandlerRef.current.startRecording(async (chunk) => {
|
||||
await clientRef.current?.sendAudio(chunk);
|
||||
});
|
||||
setIsRecording(true);
|
||||
} catch (error) {
|
||||
console.error("Failed to start recording:", error);
|
||||
}
|
||||
} else if (audioHandlerRef.current) {
|
||||
try {
|
||||
audioHandlerRef.current.stopRecording();
|
||||
if (!useVAD) {
|
||||
const inputAudio = await clientRef.current?.commitAudio();
|
||||
await handleInputAudio(inputAudio!);
|
||||
await clientRef.current?.generateResponse();
|
||||
}
|
||||
setIsRecording(false);
|
||||
} catch (error) {
|
||||
console.error("Failed to stop recording:", error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
// 防止重复初始化
|
||||
if (initRef.current) return;
|
||||
initRef.current = true;
|
||||
|
||||
const initAudioHandler = async () => {
|
||||
const handler = new AudioHandler();
|
||||
await handler.initialize();
|
||||
audioHandlerRef.current = handler;
|
||||
await handleConnect();
|
||||
await toggleRecording();
|
||||
};
|
||||
|
||||
initAudioHandler().catch((error) => {
|
||||
setStatus(error);
|
||||
console.error(error);
|
||||
});
|
||||
|
||||
return () => {
|
||||
if (isRecording) {
|
||||
toggleRecording();
|
||||
}
|
||||
audioHandlerRef.current?.close().catch(console.error);
|
||||
disconnect();
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
let animationFrameId: number;
|
||||
|
||||
if (isConnected && isRecording) {
|
||||
const animationFrame = () => {
|
||||
if (audioHandlerRef.current) {
|
||||
const freqData = audioHandlerRef.current.getByteFrequencyData();
|
||||
setFrequencies(freqData);
|
||||
}
|
||||
animationFrameId = requestAnimationFrame(animationFrame);
|
||||
};
|
||||
|
||||
animationFrameId = requestAnimationFrame(animationFrame);
|
||||
} else {
|
||||
setFrequencies(undefined);
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (animationFrameId) {
|
||||
cancelAnimationFrame(animationFrameId);
|
||||
}
|
||||
};
|
||||
}, [isConnected, isRecording]);
|
||||
|
||||
// update session params
|
||||
useEffect(() => {
|
||||
clientRef.current?.configure({ voice });
|
||||
}, [voice]);
|
||||
useEffect(() => {
|
||||
clientRef.current?.configure({ temperature });
|
||||
}, [temperature]);
|
||||
|
||||
const handleClose = async () => {
|
||||
onClose?.();
|
||||
if (isRecording) {
|
||||
await toggleRecording();
|
||||
}
|
||||
disconnect().catch(console.error);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={styles["realtime-chat"]}>
|
||||
<div
|
||||
className={clsx(styles["circle-mic"], {
|
||||
[styles["pulse"]]: isRecording,
|
||||
})}
|
||||
>
|
||||
<VoicePrint frequencies={frequencies} isActive={isRecording} />
|
||||
</div>
|
||||
|
||||
<div className={styles["bottom-icons"]}>
|
||||
<div>
|
||||
<IconButton
|
||||
icon={isRecording ? <VoiceIcon /> : <VoiceOffIcon />}
|
||||
onClick={toggleRecording}
|
||||
disabled={!isConnected}
|
||||
shadow
|
||||
bordered
|
||||
/>
|
||||
</div>
|
||||
<div className={styles["icon-center"]}>{status}</div>
|
||||
<div>
|
||||
<IconButton
|
||||
icon={<PowerIcon />}
|
||||
onClick={handleClose}
|
||||
shadow
|
||||
bordered
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
173
app/components/realtime-chat/realtime-config.tsx
Normal file
@ -0,0 +1,173 @@
|
||||
import { RealtimeConfig } from "@/app/store";
|
||||
|
||||
import Locale from "@/app/locales";
|
||||
import { ListItem, Select, PasswordInput } from "@/app/components/ui-lib";
|
||||
|
||||
import { InputRange } from "@/app/components/input-range";
|
||||
import { Voice } from "rt-client";
|
||||
import { ServiceProvider } from "@/app/constant";
|
||||
|
||||
const providers = [ServiceProvider.OpenAI, ServiceProvider.Azure];
|
||||
|
||||
const models = ["gpt-4o-realtime-preview-2024-10-01"];
|
||||
|
||||
const voice = ["alloy", "shimmer", "echo"];
|
||||
|
||||
export function RealtimeConfigList(props: {
|
||||
realtimeConfig: RealtimeConfig;
|
||||
updateConfig: (updater: (config: RealtimeConfig) => void) => void;
|
||||
}) {
|
||||
const azureConfigComponent = props.realtimeConfig.provider ===
|
||||
ServiceProvider.Azure && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Azure.Endpoint.Title}
|
||||
subTitle={Locale.Settings.Realtime.Azure.Endpoint.SubTitle}
|
||||
>
|
||||
<input
|
||||
value={props.realtimeConfig?.azure?.endpoint}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Realtime.Azure.Endpoint.Title}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) => (config.azure.endpoint = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Azure.Deployment.Title}
|
||||
subTitle={Locale.Settings.Realtime.Azure.Deployment.SubTitle}
|
||||
>
|
||||
<input
|
||||
value={props.realtimeConfig?.azure?.deployment}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Realtime.Azure.Deployment.Title}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) => (config.azure.deployment = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Enable.Title}
|
||||
subTitle={Locale.Settings.Realtime.Enable.SubTitle}
|
||||
>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={props.realtimeConfig.enable}
|
||||
onChange={(e) =>
|
||||
props.updateConfig(
|
||||
(config) => (config.enable = e.currentTarget.checked),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
|
||||
{props.realtimeConfig.enable && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Provider.Title}
|
||||
subTitle={Locale.Settings.Realtime.Provider.SubTitle}
|
||||
>
|
||||
<Select
|
||||
aria-label={Locale.Settings.Realtime.Provider.Title}
|
||||
value={props.realtimeConfig.provider}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) =>
|
||||
(config.provider = e.target.value as ServiceProvider),
|
||||
);
|
||||
}}
|
||||
>
|
||||
{providers.map((v, i) => (
|
||||
<option value={v} key={i}>
|
||||
{v}
|
||||
</option>
|
||||
))}
|
||||
</Select>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Model.Title}
|
||||
subTitle={Locale.Settings.Realtime.Model.SubTitle}
|
||||
>
|
||||
<Select
|
||||
aria-label={Locale.Settings.Realtime.Model.Title}
|
||||
value={props.realtimeConfig.model}
|
||||
onChange={(e) => {
|
||||
props.updateConfig((config) => (config.model = e.target.value));
|
||||
}}
|
||||
>
|
||||
{models.map((v, i) => (
|
||||
<option value={v} key={i}>
|
||||
{v}
|
||||
</option>
|
||||
))}
|
||||
</Select>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Realtime.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
aria={Locale.Settings.ShowPassword}
|
||||
aria-label={Locale.Settings.Realtime.ApiKey.Title}
|
||||
value={props.realtimeConfig.apiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Realtime.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) => (config.apiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
{azureConfigComponent}
|
||||
<ListItem
|
||||
title={Locale.Settings.TTS.Voice.Title}
|
||||
subTitle={Locale.Settings.TTS.Voice.SubTitle}
|
||||
>
|
||||
<Select
|
||||
value={props.realtimeConfig.voice}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) => (config.voice = e.currentTarget.value as Voice),
|
||||
);
|
||||
}}
|
||||
>
|
||||
{voice.map((v, i) => (
|
||||
<option value={v} key={i}>
|
||||
{v}
|
||||
</option>
|
||||
))}
|
||||
</Select>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Realtime.Temperature.Title}
|
||||
subTitle={Locale.Settings.Realtime.Temperature.SubTitle}
|
||||
>
|
||||
<InputRange
|
||||
aria={Locale.Settings.Temperature.Title}
|
||||
value={props.realtimeConfig?.temperature?.toFixed(1)}
|
||||
min="0.6"
|
||||
max="1"
|
||||
step="0.1"
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) =>
|
||||
(config.temperature = e.currentTarget.valueAsNumber),
|
||||
);
|
||||
}}
|
||||
></InputRange>
|
||||
</ListItem>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
@ -9,6 +9,7 @@ import CopyIcon from "../icons/copy.svg";
|
||||
import ClearIcon from "../icons/clear.svg";
|
||||
import LoadingIcon from "../icons/three-dots.svg";
|
||||
import EditIcon from "../icons/edit.svg";
|
||||
import FireIcon from "../icons/fire.svg";
|
||||
import EyeIcon from "../icons/eye.svg";
|
||||
import DownloadIcon from "../icons/download.svg";
|
||||
import UploadIcon from "../icons/upload.svg";
|
||||
@ -18,7 +19,7 @@ import ConfirmIcon from "../icons/confirm.svg";
|
||||
import ConnectionIcon from "../icons/connection.svg";
|
||||
import CloudSuccessIcon from "../icons/cloud-success.svg";
|
||||
import CloudFailIcon from "../icons/cloud-fail.svg";
|
||||
|
||||
import { trackSettingsPageGuideToCPaymentClick } from "../utils/auth-settings-events";
|
||||
import {
|
||||
Input,
|
||||
List,
|
||||
@ -84,6 +85,7 @@ import { PluginConfigList } from "./plugin-config";
|
||||
import { useMaskStore } from "../store/mask";
|
||||
import { ProviderType } from "../utils/cloud";
|
||||
import { TTSConfigList } from "./tts-config";
|
||||
import { RealtimeConfigList } from "./realtime-chat/realtime-config";
|
||||
import { STTConfigList } from "./stt-config";
|
||||
|
||||
function EditPromptModal(props: { id: string; onClose: () => void }) {
|
||||
@ -1748,9 +1750,11 @@ export function Settings() {
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.CustomModel.Title}
|
||||
subTitle={Locale.Settings.Access.CustomModel.SubTitle}
|
||||
vertical={true}
|
||||
>
|
||||
<input
|
||||
aria-label={Locale.Settings.Access.CustomModel.Title}
|
||||
style={{ width: "100%", maxWidth: "unset", textAlign: "left" }}
|
||||
type="text"
|
||||
value={config.customModels}
|
||||
placeholder="model1,model2,model3"
|
||||
@ -1777,7 +1781,18 @@ export function Settings() {
|
||||
{shouldShowPromptModal && (
|
||||
<UserPromptModal onClose={() => setShowPromptModal(false)} />
|
||||
)}
|
||||
|
||||
<List>
|
||||
<RealtimeConfigList
|
||||
realtimeConfig={config.realtimeConfig}
|
||||
updateConfig={(updater) => {
|
||||
const realtimeConfig = { ...config.realtimeConfig };
|
||||
updater(realtimeConfig);
|
||||
config.update(
|
||||
(config) => (config.realtimeConfig = realtimeConfig),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</List>
|
||||
<List>
|
||||
<PluginConfigList
|
||||
pluginConfig={config.pluginConfig}
|
||||
@ -1788,7 +1803,6 @@ export function Settings() {
|
||||
}}
|
||||
/>
|
||||
</List>
|
||||
|
||||
<List>
|
||||
<TTSConfigList
|
||||
ttsConfig={config.ttsConfig}
|
||||
|
1
app/components/voice-print/index.ts
Normal file
@ -0,0 +1 @@
|
||||
export * from "./voice-print";
|
11
app/components/voice-print/voice-print.module.scss
Normal file
@ -0,0 +1,11 @@
|
||||
.voice-print {
|
||||
width: 100%;
|
||||
height: 60px;
|
||||
margin: 20px 0;
|
||||
|
||||
canvas {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
filter: brightness(1.2); // 增加整体亮度
|
||||
}
|
||||
}
|
180
app/components/voice-print/voice-print.tsx
Normal file
@ -0,0 +1,180 @@
|
||||
import { useEffect, useRef, useCallback } from "react";
|
||||
import styles from "./voice-print.module.scss";
|
||||
|
||||
interface VoicePrintProps {
|
||||
frequencies?: Uint8Array;
|
||||
isActive?: boolean;
|
||||
}
|
||||
|
||||
export function VoicePrint({ frequencies, isActive }: VoicePrintProps) {
|
||||
// Canvas引用,用于获取绘图上下文
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
// 存储历史频率数据,用于平滑处理
|
||||
const historyRef = useRef<number[][]>([]);
|
||||
// 控制保留的历史数据帧数,影响平滑度
|
||||
const historyLengthRef = useRef(10);
|
||||
// 存储动画帧ID,用于清理
|
||||
const animationFrameRef = useRef<number>();
|
||||
|
||||
/**
|
||||
* 更新频率历史数据
|
||||
* 使用FIFO队列维护固定长度的历史记录
|
||||
*/
|
||||
const updateHistory = useCallback((freqArray: number[]) => {
|
||||
historyRef.current.push(freqArray);
|
||||
if (historyRef.current.length > historyLengthRef.current) {
|
||||
historyRef.current.shift();
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const canvas = canvasRef.current;
|
||||
if (!canvas) return;
|
||||
|
||||
const ctx = canvas.getContext("2d");
|
||||
if (!ctx) return;
|
||||
|
||||
/**
|
||||
* 处理高DPI屏幕显示
|
||||
* 根据设备像素比例调整canvas实际渲染分辨率
|
||||
*/
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
canvas.width = canvas.offsetWidth * dpr;
|
||||
canvas.height = canvas.offsetHeight * dpr;
|
||||
ctx.scale(dpr, dpr);
|
||||
|
||||
/**
|
||||
* 主要绘制函数
|
||||
* 使用requestAnimationFrame实现平滑动画
|
||||
* 包含以下步骤:
|
||||
* 1. 清空画布
|
||||
* 2. 更新历史数据
|
||||
* 3. 计算波形点
|
||||
* 4. 绘制上下对称的声纹
|
||||
*/
|
||||
const draw = () => {
|
||||
// 清空画布
|
||||
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
if (!frequencies || !isActive) {
|
||||
historyRef.current = [];
|
||||
return;
|
||||
}
|
||||
|
||||
const freqArray = Array.from(frequencies);
|
||||
updateHistory(freqArray);
|
||||
|
||||
// 绘制声纹
|
||||
const points: [number, number][] = [];
|
||||
const centerY = canvas.height / 2;
|
||||
const width = canvas.width;
|
||||
const sliceWidth = width / (frequencies.length - 1);
|
||||
|
||||
// 绘制主波形
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(0, centerY);
|
||||
|
||||
/**
|
||||
* 声纹绘制算法:
|
||||
* 1. 使用历史数据平均值实现平滑过渡
|
||||
* 2. 通过正弦函数添加自然波动
|
||||
* 3. 使用贝塞尔曲线连接点,使曲线更平滑
|
||||
* 4. 绘制对称部分形成完整声纹
|
||||
*/
|
||||
for (let i = 0; i < frequencies.length; i++) {
|
||||
const x = i * sliceWidth;
|
||||
let avgFrequency = frequencies[i];
|
||||
|
||||
/**
|
||||
* 波形平滑处理:
|
||||
* 1. 收集历史数据中对应位置的频率值
|
||||
* 2. 计算当前值与历史值的加权平均
|
||||
* 3. 根据平均值计算实际显示高度
|
||||
*/
|
||||
if (historyRef.current.length > 0) {
|
||||
const historicalValues = historyRef.current.map((h) => h[i] || 0);
|
||||
avgFrequency =
|
||||
(avgFrequency + historicalValues.reduce((a, b) => a + b, 0)) /
|
||||
(historyRef.current.length + 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* 波形变换:
|
||||
* 1. 归一化频率值到0-1范围
|
||||
* 2. 添加时间相关的正弦变换
|
||||
* 3. 使用贝塞尔曲线平滑连接点
|
||||
*/
|
||||
const normalized = avgFrequency / 255.0;
|
||||
const height = normalized * (canvas.height / 2);
|
||||
const y = centerY + height * Math.sin(i * 0.2 + Date.now() * 0.002);
|
||||
|
||||
points.push([x, y]);
|
||||
|
||||
if (i === 0) {
|
||||
ctx.moveTo(x, y);
|
||||
} else {
|
||||
// 使用贝塞尔曲线使波形更平滑
|
||||
const prevPoint = points[i - 1];
|
||||
const midX = (prevPoint[0] + x) / 2;
|
||||
ctx.quadraticCurveTo(
|
||||
prevPoint[0],
|
||||
prevPoint[1],
|
||||
midX,
|
||||
(prevPoint[1] + y) / 2,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// 绘制对称的下半部分
|
||||
for (let i = points.length - 1; i >= 0; i--) {
|
||||
const [x, y] = points[i];
|
||||
const symmetricY = centerY - (y - centerY);
|
||||
if (i === points.length - 1) {
|
||||
ctx.lineTo(x, symmetricY);
|
||||
} else {
|
||||
const nextPoint = points[i + 1];
|
||||
const midX = (nextPoint[0] + x) / 2;
|
||||
ctx.quadraticCurveTo(
|
||||
nextPoint[0],
|
||||
centerY - (nextPoint[1] - centerY),
|
||||
midX,
|
||||
centerY - ((nextPoint[1] + y) / 2 - centerY),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
ctx.closePath();
|
||||
|
||||
/**
|
||||
* 渐变效果:
|
||||
* 从左到右应用三色渐变,带透明度
|
||||
* 使用蓝色系配色提升视觉效果
|
||||
*/
|
||||
const gradient = ctx.createLinearGradient(0, 0, canvas.width, 0);
|
||||
gradient.addColorStop(0, "rgba(100, 180, 255, 0.95)");
|
||||
gradient.addColorStop(0.5, "rgba(140, 200, 255, 0.9)");
|
||||
gradient.addColorStop(1, "rgba(180, 220, 255, 0.95)");
|
||||
|
||||
ctx.fillStyle = gradient;
|
||||
ctx.fill();
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(draw);
|
||||
};
|
||||
|
||||
// 启动动画循环
|
||||
draw();
|
||||
|
||||
// 清理函数:在组件卸载时取消动画
|
||||
return () => {
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
};
|
||||
}, [frequencies, isActive, updateHistory]);
|
||||
|
||||
return (
|
||||
<div className={styles["voice-print"]}>
|
||||
<canvas ref={canvasRef} />
|
||||
</div>
|
||||
);
|
||||
}
|
1
app/icons/arrow.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg class="icon--SJP_d" width="16" height="16" fill="none" viewBox="0 0 16 16" style="min-width: 16px; min-height: 16px;"><g><path data-follow-fill="currentColor" fill-rule="evenodd" clip-rule="evenodd" d="M5.248 14.444a.625.625 0 0 1-.005-.884l5.068-5.12a.625.625 0 0 0 0-.88L5.243 2.44a.625.625 0 1 1 .889-.88l5.067 5.121c.723.73.723 1.907 0 2.638l-5.067 5.12a.625.625 0 0 1-.884.005Z" fill="currentColor"></path></g></svg>
|
After Width: | Height: | Size: 426 B |
1
app/icons/fire.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="32" height="32" viewBox="0 0 24 24"><path fill="currentColor" d="M12.832 21.801c3.126-.626 7.168-2.875 7.168-8.69c0-5.291-3.873-8.815-6.658-10.434c-.619-.36-1.342.113-1.342.828v1.828c0 1.442-.606 4.074-2.29 5.169c-.86.559-1.79-.278-1.894-1.298l-.086-.838c-.1-.974-1.092-1.565-1.87-.971C4.461 8.46 3 10.33 3 13.11C3 20.221 8.289 22 10.933 22q.232 0 .484-.015C10.111 21.874 8 21.064 8 18.444c0-2.05 1.495-3.435 2.631-4.11c.306-.18.663.055.663.41v.59c0 .45.175 1.155.59 1.637c.47.546 1.159-.026 1.214-.744c.018-.226.246-.37.442-.256c.641.375 1.46 1.175 1.46 2.473c0 2.048-1.129 2.99-2.168 3.357"/></svg>
|
After Width: | Height: | Size: 648 B |
11
app/icons/headphone.svg
Normal file
@ -0,0 +1,11 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="16" height="16" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 28C4 26.8954 4.89543 26 6 26H10V38H6C4.89543 38 4 37.1046 4 36V28Z" fill="none" />
|
||||
<path d="M38 26H42C43.1046 26 44 26.8954 44 28V36C44 37.1046 43.1046 38 42 38H38V26Z"
|
||||
fill="none" />
|
||||
<path
|
||||
d="M10 36V24C10 16.268 16.268 10 24 10C31.732 10 38 16.268 38 24V36M10 26H6C4.89543 26 4 26.8954 4 28V36C4 37.1046 4.89543 38 6 38H10V26ZM38 26H42C43.1046 26 44 26.8954 44 28V36C44 37.1046 43.1046 38 42 38H38V26Z"
|
||||
stroke="#333" stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
<path d="M16 32H20L22 26L26 38L28 32H32" stroke="#333" stroke-width="4" stroke-linecap="round"
|
||||
stroke-linejoin="round" />
|
||||
</svg>
|
After Width: | Height: | Size: 808 B |
19
app/icons/logo.svg
Normal file
@ -0,0 +1,19 @@
|
||||
<svg width="38.73" height="42" viewBox="0 0 221 240" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect x="160.697" y="38.125" width="65.007" height="145.932" rx="32.503" transform="rotate(21.987 160.697 38.125)" fill="url(#logo_svg__a)"></rect>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="m48.642 79.125-25.92 71.213c-6.139 16.869 2.558 35.52 19.427 41.66 16.868 6.14 35.52-2.558 41.66-19.426L94.23 143.94l-36.658-37.439a32.42 32.42 0 0 1-9.244-23.497c.033-1.326.14-2.62.314-3.879Z" fill="url(#logo_svg__b)"></path>
|
||||
<path d="M172.578 132.787a32.765 32.765 0 0 1 8.981 24.238c-1.458 28.748-36.622 41.778-56.46 20.92l-67.644-71.122a32.763 32.763 0 0 1-8.981-24.238c1.457-28.748 36.622-41.778 56.46-20.92l67.644 71.122Z" fill="url(#logo_svg__c)" fill-opacity="0.96"></path>
|
||||
<defs>
|
||||
<linearGradient id="logo_svg__a" x1="215.063" y1="59.628" x2="160.714" y2="157.96" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#3EADFE"></stop>
|
||||
<stop offset="1" stop-color="#2A7AFF"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="logo_svg__b" x1="105.376" y1="84.416" x2="19.745" y2="131.163" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#01B3FF"></stop>
|
||||
<stop offset="1" stop-color="#59ECFA"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient id="logo_svg__c" x1="102.734" y1="136.396" x2="192.577" y2="155.859" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#023BFF" stop-opacity="0.82"></stop>
|
||||
<stop offset="0.88" stop-color="#2D86FF" stop-opacity="0.76"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 1.5 KiB |
7
app/icons/power.svg
Normal file
@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="24" height="24" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M14.5 8C13.8406 8.37652 13.2062 8.79103 12.6 9.24051C11.5625 10.0097 10.6074 10.8814 9.75 11.8402C6.79377 15.1463 5 19.4891 5 24.2455C5 34.6033 13.5066 43 24 43C34.4934 43 43 34.6033 43 24.2455C43 19.4891 41.2062 15.1463 38.25 11.8402C37.3926 10.8814 36.4375 10.0097 35.4 9.24051C34.7938 8.79103 34.1594 8.37652 33.5 8"
|
||||
stroke="#333" stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
<path d="M24 4V24" stroke="#333" stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
</svg>
|
After Width: | Height: | Size: 675 B |
13
app/icons/voice-off.svg
Normal file
@ -0,0 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="24" height="24" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path
|
||||
d="M31 24V11C31 7.13401 27.866 4 24 4C20.134 4 17 7.13401 17 11V24C17 27.866 20.134 31 24 31C27.866 31 31 27.866 31 24Z"
|
||||
stroke="#d0021b" stroke-width="4" stroke-linejoin="round" />
|
||||
<path
|
||||
d="M9 23C9 31.2843 15.7157 38 24 38C25.7532 38 27.4361 37.6992 29 37.1465M39 23C39 25.1333 38.5547 27.1626 37.7519 29"
|
||||
stroke="#d0021b" stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
<path d="M24 38V44" stroke="#d0021b" stroke-width="4" stroke-linecap="round"
|
||||
stroke-linejoin="round" />
|
||||
<path d="M42 42L6 6" stroke="#d0021b" stroke-width="4" stroke-linecap="round"
|
||||
stroke-linejoin="round" />
|
||||
</svg>
|
After Width: | Height: | Size: 811 B |
9
app/icons/voice.svg
Normal file
@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="24" height="24" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect x="17" y="4" width="14" height="27" rx="7" fill="none" stroke="#333" stroke-width="4"
|
||||
stroke-linejoin="round" />
|
||||
<path d="M9 23C9 31.2843 15.7157 38 24 38C32.2843 38 39 31.2843 39 23" stroke="#333"
|
||||
stroke-width="4" stroke-linecap="round" stroke-linejoin="round" />
|
||||
<path d="M24 38V44" stroke="#333" stroke-width="4" stroke-linecap="round"
|
||||
stroke-linejoin="round" />
|
||||
</svg>
|
After Width: | Height: | Size: 549 B |
200
app/lib/audio.ts
Normal file
@ -0,0 +1,200 @@
|
||||
export class AudioHandler {
|
||||
private context: AudioContext;
|
||||
private mergeNode: ChannelMergerNode;
|
||||
private analyserData: Uint8Array;
|
||||
public analyser: AnalyserNode;
|
||||
private workletNode: AudioWorkletNode | null = null;
|
||||
private stream: MediaStream | null = null;
|
||||
private source: MediaStreamAudioSourceNode | null = null;
|
||||
private recordBuffer: Int16Array[] = [];
|
||||
private readonly sampleRate = 24000;
|
||||
|
||||
private nextPlayTime: number = 0;
|
||||
private isPlaying: boolean = false;
|
||||
private playbackQueue: AudioBufferSourceNode[] = [];
|
||||
private playBuffer: Int16Array[] = [];
|
||||
|
||||
constructor() {
|
||||
this.context = new AudioContext({ sampleRate: this.sampleRate });
|
||||
// using ChannelMergerNode to get merged audio data, and then get analyser data.
|
||||
this.mergeNode = new ChannelMergerNode(this.context, { numberOfInputs: 2 });
|
||||
this.analyser = new AnalyserNode(this.context, { fftSize: 256 });
|
||||
this.analyserData = new Uint8Array(this.analyser.frequencyBinCount);
|
||||
this.mergeNode.connect(this.analyser);
|
||||
}
|
||||
|
||||
getByteFrequencyData() {
|
||||
this.analyser.getByteFrequencyData(this.analyserData);
|
||||
return this.analyserData;
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
await this.context.audioWorklet.addModule("/audio-processor.js");
|
||||
}
|
||||
|
||||
async startRecording(onChunk: (chunk: Uint8Array) => void) {
|
||||
try {
|
||||
if (!this.workletNode) {
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
this.stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: {
|
||||
channelCount: 1,
|
||||
sampleRate: this.sampleRate,
|
||||
echoCancellation: true,
|
||||
noiseSuppression: true,
|
||||
},
|
||||
});
|
||||
|
||||
await this.context.resume();
|
||||
this.source = this.context.createMediaStreamSource(this.stream);
|
||||
this.workletNode = new AudioWorkletNode(
|
||||
this.context,
|
||||
"audio-recorder-processor",
|
||||
);
|
||||
|
||||
this.workletNode.port.onmessage = (event) => {
|
||||
if (event.data.eventType === "audio") {
|
||||
const float32Data = event.data.audioData;
|
||||
const int16Data = new Int16Array(float32Data.length);
|
||||
|
||||
for (let i = 0; i < float32Data.length; i++) {
|
||||
const s = Math.max(-1, Math.min(1, float32Data[i]));
|
||||
int16Data[i] = s < 0 ? s * 0x8000 : s * 0x7fff;
|
||||
}
|
||||
|
||||
const uint8Data = new Uint8Array(int16Data.buffer);
|
||||
onChunk(uint8Data);
|
||||
// save recordBuffer
|
||||
// @ts-ignore
|
||||
this.recordBuffer.push.apply(this.recordBuffer, int16Data);
|
||||
}
|
||||
};
|
||||
|
||||
this.source.connect(this.workletNode);
|
||||
this.source.connect(this.mergeNode, 0, 0);
|
||||
this.workletNode.connect(this.context.destination);
|
||||
|
||||
this.workletNode.port.postMessage({ command: "START_RECORDING" });
|
||||
} catch (error) {
|
||||
console.error("Error starting recording:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
stopRecording() {
|
||||
if (!this.workletNode || !this.source || !this.stream) {
|
||||
throw new Error("Recording not started");
|
||||
}
|
||||
|
||||
this.workletNode.port.postMessage({ command: "STOP_RECORDING" });
|
||||
|
||||
this.workletNode.disconnect();
|
||||
this.source.disconnect();
|
||||
this.stream.getTracks().forEach((track) => track.stop());
|
||||
}
|
||||
startStreamingPlayback() {
|
||||
this.isPlaying = true;
|
||||
this.nextPlayTime = this.context.currentTime;
|
||||
}
|
||||
|
||||
stopStreamingPlayback() {
|
||||
this.isPlaying = false;
|
||||
this.playbackQueue.forEach((source) => source.stop());
|
||||
this.playbackQueue = [];
|
||||
this.playBuffer = [];
|
||||
}
|
||||
|
||||
playChunk(chunk: Uint8Array) {
|
||||
if (!this.isPlaying) return;
|
||||
|
||||
const int16Data = new Int16Array(chunk.buffer);
|
||||
// @ts-ignore
|
||||
this.playBuffer.push.apply(this.playBuffer, int16Data); // save playBuffer
|
||||
|
||||
const float32Data = new Float32Array(int16Data.length);
|
||||
for (let i = 0; i < int16Data.length; i++) {
|
||||
float32Data[i] = int16Data[i] / (int16Data[i] < 0 ? 0x8000 : 0x7fff);
|
||||
}
|
||||
|
||||
const audioBuffer = this.context.createBuffer(
|
||||
1,
|
||||
float32Data.length,
|
||||
this.sampleRate,
|
||||
);
|
||||
audioBuffer.getChannelData(0).set(float32Data);
|
||||
|
||||
const source = this.context.createBufferSource();
|
||||
source.buffer = audioBuffer;
|
||||
source.connect(this.context.destination);
|
||||
source.connect(this.mergeNode, 0, 1);
|
||||
|
||||
const chunkDuration = audioBuffer.length / this.sampleRate;
|
||||
|
||||
source.start(this.nextPlayTime);
|
||||
|
||||
this.playbackQueue.push(source);
|
||||
source.onended = () => {
|
||||
const index = this.playbackQueue.indexOf(source);
|
||||
if (index > -1) {
|
||||
this.playbackQueue.splice(index, 1);
|
||||
}
|
||||
};
|
||||
|
||||
this.nextPlayTime += chunkDuration;
|
||||
|
||||
if (this.nextPlayTime < this.context.currentTime) {
|
||||
this.nextPlayTime = this.context.currentTime;
|
||||
}
|
||||
}
|
||||
_saveData(data: Int16Array, bytesPerSample = 16): Blob {
|
||||
const headerLength = 44;
|
||||
const numberOfChannels = 1;
|
||||
const byteLength = data.buffer.byteLength;
|
||||
const header = new Uint8Array(headerLength);
|
||||
const view = new DataView(header.buffer);
|
||||
view.setUint32(0, 1380533830, false); // RIFF identifier 'RIFF'
|
||||
view.setUint32(4, 36 + byteLength, true); // file length minus RIFF identifier length and file description length
|
||||
view.setUint32(8, 1463899717, false); // RIFF type 'WAVE'
|
||||
view.setUint32(12, 1718449184, false); // format chunk identifier 'fmt '
|
||||
view.setUint32(16, 16, true); // format chunk length
|
||||
view.setUint16(20, 1, true); // sample format (raw)
|
||||
view.setUint16(22, numberOfChannels, true); // channel count
|
||||
view.setUint32(24, this.sampleRate, true); // sample rate
|
||||
view.setUint32(28, this.sampleRate * 4, true); // byte rate (sample rate * block align)
|
||||
view.setUint16(32, numberOfChannels * 2, true); // block align (channel count * bytes per sample)
|
||||
view.setUint16(34, bytesPerSample, true); // bits per sample
|
||||
view.setUint32(36, 1684108385, false); // data chunk identifier 'data'
|
||||
view.setUint32(40, byteLength, true); // data chunk length
|
||||
|
||||
// using data.buffer, so no need to setUint16 to view.
|
||||
return new Blob([view, data.buffer], { type: "audio/mpeg" });
|
||||
}
|
||||
savePlayFile() {
|
||||
// @ts-ignore
|
||||
return this._saveData(new Int16Array(this.playBuffer));
|
||||
}
|
||||
saveRecordFile(
|
||||
audioStartMillis: number | undefined,
|
||||
audioEndMillis: number | undefined,
|
||||
) {
|
||||
const startIndex = audioStartMillis
|
||||
? Math.floor((audioStartMillis * this.sampleRate) / 1000)
|
||||
: 0;
|
||||
const endIndex = audioEndMillis
|
||||
? Math.floor((audioEndMillis * this.sampleRate) / 1000)
|
||||
: this.recordBuffer.length;
|
||||
return this._saveData(
|
||||
// @ts-ignore
|
||||
new Int16Array(this.recordBuffer.slice(startIndex, endIndex)),
|
||||
);
|
||||
}
|
||||
async close() {
|
||||
this.recordBuffer = [];
|
||||
this.workletNode?.disconnect();
|
||||
this.source?.disconnect();
|
||||
this.stream?.getTracks().forEach((track) => track.stop());
|
||||
await this.context.close();
|
||||
}
|
||||
}
|
@ -42,6 +42,9 @@ const cn = {
|
||||
PinToastAction: "查看",
|
||||
Delete: "删除",
|
||||
Edit: "编辑",
|
||||
FullScreen: "全屏",
|
||||
RefreshTitle: "刷新标题",
|
||||
RefreshToast: "已发送刷新标题请求",
|
||||
Speech: "朗读",
|
||||
StopSpeech: "停止",
|
||||
},
|
||||
@ -51,6 +54,7 @@ const cn = {
|
||||
next: "下一个聊天",
|
||||
prev: "上一个聊天",
|
||||
clear: "清除上下文",
|
||||
fork: "复制聊天",
|
||||
del: "删除聊天",
|
||||
},
|
||||
InputActions: {
|
||||
@ -87,6 +91,14 @@ const cn = {
|
||||
SaveAs: "存为面具",
|
||||
},
|
||||
IsContext: "预设提示词",
|
||||
ShortcutKey: {
|
||||
Title: "键盘快捷方式",
|
||||
newChat: "打开新聊天",
|
||||
focusInput: "聚焦输入框",
|
||||
copyLastMessage: "复制最后一个回复",
|
||||
copyLastCode: "复制最后一个代码块",
|
||||
showShortcutKey: "显示快捷方式",
|
||||
},
|
||||
},
|
||||
Export: {
|
||||
Title: "分享聊天记录",
|
||||
@ -560,6 +572,39 @@ const cn = {
|
||||
SubTitle: "音频转换引擎",
|
||||
},
|
||||
},
|
||||
Realtime: {
|
||||
Enable: {
|
||||
Title: "实时聊天",
|
||||
SubTitle: "开启实时聊天功能",
|
||||
},
|
||||
Provider: {
|
||||
Title: "模型服务商",
|
||||
SubTitle: "切换不同的服务商",
|
||||
},
|
||||
Model: {
|
||||
Title: "模型",
|
||||
SubTitle: "选择一个模型",
|
||||
},
|
||||
ApiKey: {
|
||||
Title: "API Key",
|
||||
SubTitle: "API Key",
|
||||
Placeholder: "API Key",
|
||||
},
|
||||
Azure: {
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "接口地址",
|
||||
},
|
||||
Deployment: {
|
||||
Title: "部署名称",
|
||||
SubTitle: "部署名称",
|
||||
},
|
||||
},
|
||||
Temperature: {
|
||||
Title: "随机性 (temperature)",
|
||||
SubTitle: "值越大,回复越随机",
|
||||
},
|
||||
},
|
||||
},
|
||||
Store: {
|
||||
DefaultTopic: "新的聊天",
|
||||
|
@ -44,6 +44,9 @@ const en: LocaleType = {
|
||||
PinToastAction: "View",
|
||||
Delete: "Delete",
|
||||
Edit: "Edit",
|
||||
FullScreen: "FullScreen",
|
||||
RefreshTitle: "Refresh Title",
|
||||
RefreshToast: "Title refresh request sent",
|
||||
Speech: "Play",
|
||||
StopSpeech: "Stop",
|
||||
},
|
||||
@ -53,6 +56,7 @@ const en: LocaleType = {
|
||||
next: "Next Chat",
|
||||
prev: "Previous Chat",
|
||||
clear: "Clear Context",
|
||||
fork: "Copy Chat",
|
||||
del: "Delete Chat",
|
||||
},
|
||||
InputActions: {
|
||||
@ -89,6 +93,14 @@ const en: LocaleType = {
|
||||
SaveAs: "Save as Mask",
|
||||
},
|
||||
IsContext: "Contextual Prompt",
|
||||
ShortcutKey: {
|
||||
Title: "Keyboard Shortcuts",
|
||||
newChat: "Open New Chat",
|
||||
focusInput: "Focus Input Field",
|
||||
copyLastMessage: "Copy Last Reply",
|
||||
copyLastCode: "Copy Last Code Block",
|
||||
showShortcutKey: "Show Shortcuts",
|
||||
},
|
||||
},
|
||||
Export: {
|
||||
Title: "Export Messages",
|
||||
@ -568,6 +580,39 @@ const en: LocaleType = {
|
||||
SubTitle: "Text-to-Speech Engine",
|
||||
},
|
||||
},
|
||||
Realtime: {
|
||||
Enable: {
|
||||
Title: "Realtime Chat",
|
||||
SubTitle: "Enable realtime chat feature",
|
||||
},
|
||||
Provider: {
|
||||
Title: "Model Provider",
|
||||
SubTitle: "Switch between different providers",
|
||||
},
|
||||
Model: {
|
||||
Title: "Model",
|
||||
SubTitle: "Select a model",
|
||||
},
|
||||
ApiKey: {
|
||||
Title: "API Key",
|
||||
SubTitle: "API Key",
|
||||
Placeholder: "API Key",
|
||||
},
|
||||
Azure: {
|
||||
Endpoint: {
|
||||
Title: "Endpoint",
|
||||
SubTitle: "Endpoint",
|
||||
},
|
||||
Deployment: {
|
||||
Title: "Deployment Name",
|
||||
SubTitle: "Deployment Name",
|
||||
},
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Randomness (temperature)",
|
||||
SubTitle: "Higher values result in more random responses",
|
||||
},
|
||||
},
|
||||
},
|
||||
Store: {
|
||||
DefaultTopic: "New Conversation",
|
||||
|
@ -1,13 +1,15 @@
|
||||
import {
|
||||
trimTopic,
|
||||
getMessageTextContent,
|
||||
isFunctionCallModel,
|
||||
} from "../utils";
|
||||
import { getMessageTextContent, trimTopic } from "../utils";
|
||||
|
||||
import Locale, { getLang } from "../locales";
|
||||
import { indexedDBStorage } from "@/app/utils/indexedDB-storage";
|
||||
import { nanoid } from "nanoid";
|
||||
import type {
|
||||
ClientApi,
|
||||
MultimodalContent,
|
||||
RequestMessage,
|
||||
} from "../client/api";
|
||||
import { getClientApi } from "../client/api";
|
||||
import { ChatControllerPool } from "../client/controller";
|
||||
import { showToast } from "../components/ui-lib";
|
||||
import { ModelConfig, ModelType, useAppConfig } from "./config";
|
||||
import { createEmptyMask, Mask } from "./mask";
|
||||
import {
|
||||
DEFAULT_INPUT_TEMPLATE,
|
||||
DEFAULT_MODELS,
|
||||
@ -16,29 +18,24 @@ import {
|
||||
StoreKey,
|
||||
SUMMARIZE_MODEL,
|
||||
GEMINI_SUMMARIZE_MODEL,
|
||||
MYFILES_BROWSER_TOOLS_SYSTEM_PROMPT,
|
||||
ServiceProvider,
|
||||
} from "../constant";
|
||||
import Locale, { getLang } from "../locales";
|
||||
import { isDalle3, safeLocalStorage } from "../utils";
|
||||
import { getClientApi } from "../client/api";
|
||||
import type {
|
||||
ClientApi,
|
||||
RequestMessage,
|
||||
MultimodalContent,
|
||||
} from "../client/api";
|
||||
import { ChatControllerPool } from "../client/controller";
|
||||
import { prettyObject } from "../utils/format";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { estimateTokenLength } from "../utils/token";
|
||||
import { nanoid } from "nanoid";
|
||||
import { Plugin, usePluginStore } from "../store/plugin";
|
||||
import { ModelConfig, ModelType, useAppConfig } from "./config";
|
||||
import { useAccessStore } from "./access";
|
||||
import { collectModelsWithDefaultModel } from "../utils/model";
|
||||
import { createEmptyMask, Mask } from "./mask";
|
||||
import { FileInfo } from "../client/platforms/utils";
|
||||
import { usePluginStore } from "./plugin";
|
||||
|
||||
export interface ChatToolMessage {
|
||||
toolName: string;
|
||||
toolInput?: string;
|
||||
}
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { FileInfo } from "../client/platforms/utils";
|
||||
import { collectModelsWithDefaultModel } from "../utils/model";
|
||||
import { useAccessStore } from "./access";
|
||||
|
||||
const localStorage = safeLocalStorage();
|
||||
|
||||
@ -52,6 +49,7 @@ export type ChatMessageTool = {
|
||||
};
|
||||
content?: string;
|
||||
isError?: boolean;
|
||||
errorMsg?: string;
|
||||
};
|
||||
|
||||
export type ChatMessage = RequestMessage & {
|
||||
@ -61,6 +59,8 @@ export type ChatMessage = RequestMessage & {
|
||||
isError?: boolean;
|
||||
id: string;
|
||||
model?: ModelType;
|
||||
tools?: ChatMessageTool[];
|
||||
audio_url?: string;
|
||||
};
|
||||
|
||||
export function createMessage(override: Partial<ChatMessage>): ChatMessage {
|
||||
@ -122,9 +122,12 @@ function createEmptySession(): ChatSession {
|
||||
};
|
||||
}
|
||||
|
||||
function getSummarizeModel(currentModel: string) {
|
||||
function getSummarizeModel(
|
||||
currentModel: string,
|
||||
providerName: string,
|
||||
): string[] {
|
||||
// if it is using gpt-* models, force to use 4o-mini to summarize
|
||||
if (currentModel.startsWith("gpt")) {
|
||||
if (currentModel.startsWith("gpt") || currentModel.startsWith("chatgpt")) {
|
||||
const configStore = useAppConfig.getState();
|
||||
const accessStore = useAccessStore.getState();
|
||||
const allModel = collectModelsWithDefaultModel(
|
||||
@ -135,12 +138,17 @@ function getSummarizeModel(currentModel: string) {
|
||||
const summarizeModel = allModel.find(
|
||||
(m) => m.name === SUMMARIZE_MODEL && m.available,
|
||||
);
|
||||
return summarizeModel?.name ?? currentModel;
|
||||
if (summarizeModel) {
|
||||
return [
|
||||
summarizeModel.name,
|
||||
summarizeModel.provider?.providerName as string,
|
||||
];
|
||||
}
|
||||
}
|
||||
if (currentModel.startsWith("gemini")) {
|
||||
return GEMINI_SUMMARIZE_MODEL;
|
||||
return [GEMINI_SUMMARIZE_MODEL, ServiceProvider.Google];
|
||||
}
|
||||
return currentModel;
|
||||
return [currentModel, providerName];
|
||||
}
|
||||
|
||||
function countMessages(msgs: ChatMessage[]) {
|
||||
@ -197,6 +205,7 @@ function fillTemplateWith(input: string, modelConfig: ModelConfig) {
|
||||
const DEFAULT_CHAT_STATE = {
|
||||
sessions: [createEmptySession()],
|
||||
currentSessionIndex: 0,
|
||||
lastInput: "",
|
||||
};
|
||||
|
||||
export const useChatStore = createPersistStore(
|
||||
@ -210,6 +219,28 @@ export const useChatStore = createPersistStore(
|
||||
}
|
||||
|
||||
const methods = {
|
||||
forkSession() {
|
||||
// 获取当前会话
|
||||
const currentSession = get().currentSession();
|
||||
if (!currentSession) return;
|
||||
|
||||
const newSession = createEmptySession();
|
||||
|
||||
newSession.topic = currentSession.topic;
|
||||
newSession.messages = [...currentSession.messages];
|
||||
newSession.mask = {
|
||||
...currentSession.mask,
|
||||
modelConfig: {
|
||||
...currentSession.mask.modelConfig,
|
||||
},
|
||||
};
|
||||
|
||||
set((state) => ({
|
||||
currentSessionIndex: 0,
|
||||
sessions: [newSession, ...state.sessions],
|
||||
}));
|
||||
},
|
||||
|
||||
clearSessions() {
|
||||
set(() => ({
|
||||
sessions: [createEmptySession()],
|
||||
@ -335,13 +366,13 @@ export const useChatStore = createPersistStore(
|
||||
return session;
|
||||
},
|
||||
|
||||
onNewMessage(message: ChatMessage) {
|
||||
get().updateCurrentSession((session) => {
|
||||
onNewMessage(message: ChatMessage, targetSession: ChatSession) {
|
||||
get().updateTargetSession(targetSession, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
session.lastUpdate = Date.now();
|
||||
});
|
||||
get().updateStat(message);
|
||||
get().summarizeSession();
|
||||
get().updateStat(message, targetSession);
|
||||
get().summarizeSession(false, targetSession);
|
||||
},
|
||||
|
||||
async onUserInput(
|
||||
@ -359,44 +390,39 @@ export const useChatStore = createPersistStore(
|
||||
|
||||
if (attachImages && attachImages.length > 0) {
|
||||
mContent = [
|
||||
{
|
||||
type: "text",
|
||||
text: userContent,
|
||||
},
|
||||
...(userContent
|
||||
? [{ type: "text" as const, text: userContent }]
|
||||
: []),
|
||||
...attachImages.map((url) => ({
|
||||
type: "image_url" as const,
|
||||
image_url: { url },
|
||||
})),
|
||||
];
|
||||
mContent = mContent.concat(
|
||||
attachImages.map((url) => {
|
||||
return {
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: url,
|
||||
},
|
||||
};
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
// add file link
|
||||
if (attachFiles && attachFiles.length > 0) {
|
||||
mContent += ` [${attachFiles[0].originalFilename}](${attachFiles[0].filePath})`;
|
||||
}
|
||||
|
||||
let userMessage: ChatMessage = createMessage({
|
||||
role: "user",
|
||||
content: mContent,
|
||||
fileInfos: attachFiles,
|
||||
});
|
||||
|
||||
const botMessage: ChatMessage = createMessage({
|
||||
role: "assistant",
|
||||
streaming: true,
|
||||
model: modelConfig.model,
|
||||
toolMessages: [],
|
||||
});
|
||||
const api: ClientApi = getClientApi(modelConfig.providerName);
|
||||
const isEnableRAG =
|
||||
session.attachFiles && session.attachFiles.length > 0;
|
||||
// get recent messages
|
||||
const recentMessages = get().getMessagesWithMemory();
|
||||
const sendMessages = recentMessages.concat(userMessage);
|
||||
const messageIndex = get().currentSession().messages.length + 1;
|
||||
const messageIndex = session.messages.length + 1;
|
||||
|
||||
const config = useAppConfig.getState();
|
||||
const pluginConfig = useAppConfig.getState().pluginConfig;
|
||||
@ -410,148 +436,86 @@ export const useChatStore = createPersistStore(
|
||||
m.enable,
|
||||
);
|
||||
// save user's and bot's message
|
||||
get().updateCurrentSession((session) => {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
const savedUserMessage = {
|
||||
...userMessage,
|
||||
content: mContent,
|
||||
};
|
||||
session.messages.push(savedUserMessage);
|
||||
session.messages.push(botMessage);
|
||||
session.messages = session.messages.concat([
|
||||
savedUserMessage,
|
||||
botMessage,
|
||||
]);
|
||||
});
|
||||
if (
|
||||
config.pluginConfig.enable &&
|
||||
session.mask.usePlugins &&
|
||||
(allPlugins.length > 0 || isEnableRAG) &&
|
||||
isFunctionCallModel(modelConfig.model)
|
||||
) {
|
||||
console.log("[ToolAgent] start");
|
||||
let pluginToolNames = allPlugins.map((m) => m.toolName);
|
||||
if (isEnableRAG) {
|
||||
// other plugins will affect rag
|
||||
// clear existing plugins here
|
||||
pluginToolNames = [];
|
||||
pluginToolNames.push("myfiles_browser");
|
||||
}
|
||||
const agentCall = () => {
|
||||
api.llm.toolAgentChat({
|
||||
chatSessionId: session.id,
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
agentConfig: { ...pluginConfig, useTools: pluginToolNames },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onToolUpdate(toolName, toolInput) {
|
||||
botMessage.streaming = true;
|
||||
if (toolName && toolInput) {
|
||||
botMessage.toolMessages!.push({
|
||||
toolName,
|
||||
toolInput,
|
||||
});
|
||||
}
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage);
|
||||
}
|
||||
ChatControllerPool.remove(session.id, botMessage.id);
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message.includes("aborted");
|
||||
botMessage.content +=
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
const api: ClientApi = getClientApi(modelConfig.providerName);
|
||||
// make request
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
};
|
||||
agentCall();
|
||||
} else {
|
||||
// make request
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
botMessage.date = new Date().toLocaleString();
|
||||
get().onNewMessage(botMessage, session);
|
||||
}
|
||||
ChatControllerPool.remove(session.id, botMessage.id);
|
||||
},
|
||||
onBeforeTool(tool: ChatMessageTool) {
|
||||
(botMessage.tools = botMessage?.tools || []).push(tool);
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onAfterTool(tool: ChatMessageTool) {
|
||||
botMessage?.tools?.forEach((t, i, tools) => {
|
||||
if (tool.id == t.id) {
|
||||
tools[i] = { ...tool };
|
||||
}
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message?.includes?.("aborted");
|
||||
botMessage.content +=
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
});
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage);
|
||||
}
|
||||
ChatControllerPool.remove(session.id, botMessage.id);
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message.includes("aborted");
|
||||
botMessage.content +=
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
getMemoryPrompt() {
|
||||
@ -579,26 +543,17 @@ export const useChatStore = createPersistStore(
|
||||
// system prompts, to get close to OpenAI Web ChatGPT
|
||||
const shouldInjectSystemPrompts =
|
||||
modelConfig.enableInjectSystemPrompts &&
|
||||
session.mask.modelConfig.model.startsWith("gpt-");
|
||||
(session.mask.modelConfig.model.startsWith("gpt-") ||
|
||||
session.mask.modelConfig.model.startsWith("chatgpt-"));
|
||||
|
||||
var systemPrompts: ChatMessage[] = [];
|
||||
var template = DEFAULT_SYSTEM_TEMPLATE;
|
||||
if (session.attachFiles && session.attachFiles.length > 0) {
|
||||
template += MYFILES_BROWSER_TOOLS_SYSTEM_PROMPT;
|
||||
session.attachFiles.forEach((file) => {
|
||||
template += `filename: \`${file.originalFilename}\`
|
||||
partialDocument: \`\`\`
|
||||
${file.partial}
|
||||
\`\`\``;
|
||||
});
|
||||
}
|
||||
systemPrompts = shouldInjectSystemPrompts
|
||||
? [
|
||||
createMessage({
|
||||
role: "system",
|
||||
content: fillTemplateWith("", {
|
||||
...modelConfig,
|
||||
template: template,
|
||||
template: DEFAULT_SYSTEM_TEMPLATE,
|
||||
}),
|
||||
}),
|
||||
]
|
||||
@ -674,23 +629,33 @@ ${file.partial}
|
||||
set(() => ({ sessions }));
|
||||
},
|
||||
|
||||
resetSession() {
|
||||
get().updateCurrentSession((session) => {
|
||||
resetSession(session: ChatSession) {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = [];
|
||||
session.memoryPrompt = "";
|
||||
});
|
||||
},
|
||||
|
||||
summarizeSession() {
|
||||
summarizeSession(
|
||||
refreshTitle: boolean = false,
|
||||
targetSession: ChatSession,
|
||||
) {
|
||||
const config = useAppConfig.getState();
|
||||
const session = get().currentSession();
|
||||
const session = targetSession;
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
// skip summarize when using dalle3?
|
||||
if (isDalle3(modelConfig.model)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const api: ClientApi = getClientApi(modelConfig.providerName);
|
||||
// if not config compressModel, then using getSummarizeModel
|
||||
const [model, providerName] = modelConfig.compressModel
|
||||
? [modelConfig.compressModel, modelConfig.compressProviderName]
|
||||
: getSummarizeModel(
|
||||
session.mask.modelConfig.model,
|
||||
session.mask.modelConfig.providerName,
|
||||
);
|
||||
const api: ClientApi = getClientApi(providerName as ServiceProvider);
|
||||
|
||||
// remove error messages if any
|
||||
const messages = session.messages;
|
||||
@ -698,29 +663,43 @@ ${file.partial}
|
||||
// should summarize topic after chating more than 50 words
|
||||
const SUMMARIZE_MIN_LEN = 50;
|
||||
if (
|
||||
!process.env.NEXT_PUBLIC_DISABLE_AUTOGENERATETITLE &&
|
||||
config.enableAutoGenerateTitle &&
|
||||
session.topic === DEFAULT_TOPIC &&
|
||||
countMessages(messages) >= SUMMARIZE_MIN_LEN
|
||||
(!process.env.NEXT_PUBLIC_DISABLE_AUTOGENERATETITLE &&
|
||||
config.enableAutoGenerateTitle &&
|
||||
session.topic === DEFAULT_TOPIC &&
|
||||
countMessages(messages) >= SUMMARIZE_MIN_LEN) ||
|
||||
refreshTitle
|
||||
) {
|
||||
const topicMessages = messages.concat(
|
||||
createMessage({
|
||||
role: "user",
|
||||
content: Locale.Store.Prompt.Topic,
|
||||
}),
|
||||
const startIndex = Math.max(
|
||||
0,
|
||||
messages.length - modelConfig.historyMessageCount,
|
||||
);
|
||||
const topicMessages = messages
|
||||
.slice(
|
||||
startIndex < messages.length ? startIndex : messages.length - 1,
|
||||
messages.length,
|
||||
)
|
||||
.concat(
|
||||
createMessage({
|
||||
role: "user",
|
||||
content: Locale.Store.Prompt.Topic,
|
||||
}),
|
||||
);
|
||||
api.llm.chat({
|
||||
messages: topicMessages,
|
||||
config: {
|
||||
model: getSummarizeModel(session.mask.modelConfig.model),
|
||||
model,
|
||||
stream: false,
|
||||
providerName,
|
||||
},
|
||||
onFinish(message) {
|
||||
get().updateCurrentSession(
|
||||
(session) =>
|
||||
(session.topic =
|
||||
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
|
||||
);
|
||||
onFinish(message, responseRes) {
|
||||
if (responseRes?.status === 200) {
|
||||
get().updateTargetSession(
|
||||
session,
|
||||
(session) =>
|
||||
(session.topic =
|
||||
message.length > 0 ? trimTopic(message) : DEFAULT_TOPIC),
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
@ -734,7 +713,7 @@ ${file.partial}
|
||||
|
||||
const historyMsgLength = countMessages(toBeSummarizedMsgs);
|
||||
|
||||
if (historyMsgLength > modelConfig?.max_tokens ?? 4000) {
|
||||
if (historyMsgLength > (modelConfig?.max_tokens || 4000)) {
|
||||
const n = toBeSummarizedMsgs.length;
|
||||
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
|
||||
Math.max(0, n - modelConfig.historyMessageCount),
|
||||
@ -775,17 +754,20 @@ ${file.partial}
|
||||
config: {
|
||||
...modelcfg,
|
||||
stream: true,
|
||||
model: getSummarizeModel(session.mask.modelConfig.model),
|
||||
model,
|
||||
providerName,
|
||||
},
|
||||
onUpdate(message) {
|
||||
session.memoryPrompt = message;
|
||||
},
|
||||
onFinish(message) {
|
||||
// console.log("[Memory] ", message);
|
||||
get().updateCurrentSession((session) => {
|
||||
session.lastSummarizeIndex = lastSummarizeIndex;
|
||||
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
|
||||
});
|
||||
onFinish(message, responseRes) {
|
||||
if (responseRes?.status === 200) {
|
||||
console.log("[Memory] ", message);
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.lastSummarizeIndex = lastSummarizeIndex;
|
||||
session.memoryPrompt = message; // Update the memory prompt for stored it in local storage
|
||||
});
|
||||
}
|
||||
},
|
||||
onError(err) {
|
||||
console.error("[Summarize] ", err);
|
||||
@ -794,31 +776,39 @@ ${file.partial}
|
||||
}
|
||||
},
|
||||
|
||||
updateStat(message: ChatMessage) {
|
||||
get().updateCurrentSession((session) => {
|
||||
updateStat(message: ChatMessage, session: ChatSession) {
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.stat.charCount += message.content.length;
|
||||
// TODO: should update chat count and word count
|
||||
});
|
||||
},
|
||||
|
||||
updateCurrentSession(updater: (session: ChatSession) => void) {
|
||||
updateTargetSession(
|
||||
targetSession: ChatSession,
|
||||
updater: (session: ChatSession) => void,
|
||||
) {
|
||||
const sessions = get().sessions;
|
||||
const index = get().currentSessionIndex;
|
||||
const index = sessions.findIndex((s) => s.id === targetSession.id);
|
||||
if (index < 0) return;
|
||||
updater(sessions[index]);
|
||||
set(() => ({ sessions }));
|
||||
},
|
||||
|
||||
clearAllData() {
|
||||
async clearAllData() {
|
||||
await indexedDBStorage.clear();
|
||||
localStorage.clear();
|
||||
location.reload();
|
||||
},
|
||||
setLastInput(lastInput: string) {
|
||||
set({
|
||||
lastInput,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
return methods;
|
||||
},
|
||||
{
|
||||
name: StoreKey.Chat,
|
||||
version: 3.1,
|
||||
version: 3.3,
|
||||
migrate(persistedState, version) {
|
||||
const state = persistedState as any;
|
||||
const newState = JSON.parse(
|
||||
@ -865,6 +855,24 @@ ${file.partial}
|
||||
});
|
||||
}
|
||||
|
||||
// add default summarize model for every session
|
||||
if (version < 3.2) {
|
||||
newState.sessions.forEach((s) => {
|
||||
const config = useAppConfig.getState();
|
||||
s.mask.modelConfig.compressModel = config.modelConfig.compressModel;
|
||||
s.mask.modelConfig.compressProviderName =
|
||||
config.modelConfig.compressProviderName;
|
||||
});
|
||||
}
|
||||
// revert default summarize model for every session
|
||||
if (version < 3.3) {
|
||||
newState.sessions.forEach((s) => {
|
||||
const config = useAppConfig.getState();
|
||||
s.mask.modelConfig.compressModel = "";
|
||||
s.mask.modelConfig.compressProviderName = "";
|
||||
});
|
||||
}
|
||||
|
||||
return newState as any;
|
||||
},
|
||||
},
|
||||
|
@ -17,6 +17,7 @@ import {
|
||||
ServiceProvider,
|
||||
} from "../constant";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import type { Voice } from "rt-client";
|
||||
|
||||
export type ModelType = (typeof DEFAULT_MODELS)[number]["name"];
|
||||
export type TTSModelType = (typeof DEFAULT_TTS_MODELS)[number];
|
||||
@ -105,6 +106,19 @@ export const DEFAULT_CONFIG = {
|
||||
enable: false,
|
||||
engine: DEFAULT_STT_ENGINE,
|
||||
},
|
||||
|
||||
realtimeConfig: {
|
||||
enable: false,
|
||||
provider: "OpenAI" as ServiceProvider,
|
||||
model: "gpt-4o-realtime-preview-2024-10-01",
|
||||
apiKey: "",
|
||||
azure: {
|
||||
endpoint: "",
|
||||
deployment: "",
|
||||
},
|
||||
temperature: 0.9,
|
||||
voice: "alloy" as Voice,
|
||||
},
|
||||
};
|
||||
|
||||
export type ChatConfig = typeof DEFAULT_CONFIG;
|
||||
@ -113,6 +127,7 @@ export type ModelConfig = ChatConfig["modelConfig"];
|
||||
export type PluginConfig = ChatConfig["pluginConfig"];
|
||||
export type TTSConfig = ChatConfig["ttsConfig"];
|
||||
export type STTConfig = ChatConfig["sttConfig"];
|
||||
export type RealtimeConfig = ChatConfig["realtimeConfig"];
|
||||
|
||||
export function limitNumber(
|
||||
x: number,
|
||||
|
@ -17,6 +17,14 @@ export type Plugin = {
|
||||
builtin: boolean;
|
||||
enable: boolean;
|
||||
onlyNodeRuntime: boolean;
|
||||
|
||||
title: string;
|
||||
version: string;
|
||||
content: string;
|
||||
authType?: string;
|
||||
authLocation?: string;
|
||||
authHeader?: string;
|
||||
authToken?: string;
|
||||
};
|
||||
|
||||
export const DEFAULT_PLUGIN_STATE = {
|
||||
|
36
app/utils.ts
@ -278,6 +278,24 @@ export function isDalle3(model: string) {
|
||||
return "dall-e-3" === model;
|
||||
}
|
||||
|
||||
export function showPlugins(provider: ServiceProvider, model: string) {
|
||||
if (
|
||||
provider == ServiceProvider.OpenAI ||
|
||||
provider == ServiceProvider.Azure ||
|
||||
provider == ServiceProvider.Moonshot ||
|
||||
provider == ServiceProvider.ChatGLM
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
if (provider == ServiceProvider.Anthropic && !model.includes("claude-2")) {
|
||||
return true;
|
||||
}
|
||||
if (provider == ServiceProvider.Google && !model.includes("vision")) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function isSupportRAGModel(modelName: string) {
|
||||
const specialModels = [
|
||||
"gpt-4-turbo",
|
||||
@ -328,24 +346,6 @@ export function isFunctionCallModel(modelName: string) {
|
||||
).some((model) => model.name === modelName);
|
||||
}
|
||||
|
||||
export function showPlugins(provider: ServiceProvider, model: string) {
|
||||
if (
|
||||
provider == ServiceProvider.OpenAI ||
|
||||
provider == ServiceProvider.Azure ||
|
||||
provider == ServiceProvider.Moonshot ||
|
||||
provider == ServiceProvider.ChatGLM
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
if (provider == ServiceProvider.Anthropic && !model.includes("claude-2")) {
|
||||
return true;
|
||||
}
|
||||
if (provider == ServiceProvider.Google && !model.includes("vision")) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function fetch(
|
||||
url: string,
|
||||
options?: Record<string, unknown>,
|
||||
|
@ -37,7 +37,8 @@
|
||||
"@svgr/webpack": "^6.5.1",
|
||||
"@vercel/analytics": "^0.1.11",
|
||||
"@vercel/speed-insights": "^1.0.2",
|
||||
"axios": "^0.26.0",
|
||||
"axios": "^1.7.5",
|
||||
"clsx": "^2.1.1",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"d3-dsv": "2",
|
||||
"duck-duck-scrape": "^2.2.4",
|
||||
@ -76,7 +77,8 @@
|
||||
"spark-md5": "^3.0.2",
|
||||
"srt-parser-2": "^1.2.3",
|
||||
"use-debounce": "^9.0.4",
|
||||
"zustand": "^4.3.8"
|
||||
"zustand": "^4.3.8",
|
||||
"rt-client": "https://github.com/Azure-Samples/aoai-realtime-audio-sdk/releases/download/js/v0.5.0/rt-client-0.5.0.tgz"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tauri-apps/api": "^1.6.0",
|
||||
|
48
public/audio-processor.js
Normal file
@ -0,0 +1,48 @@
|
||||
// @ts-nocheck
|
||||
class AudioRecorderProcessor extends AudioWorkletProcessor {
|
||||
constructor() {
|
||||
super();
|
||||
this.isRecording = false;
|
||||
this.bufferSize = 2400; // 100ms at 24kHz
|
||||
this.currentBuffer = [];
|
||||
|
||||
this.port.onmessage = (event) => {
|
||||
if (event.data.command === "START_RECORDING") {
|
||||
this.isRecording = true;
|
||||
} else if (event.data.command === "STOP_RECORDING") {
|
||||
this.isRecording = false;
|
||||
|
||||
if (this.currentBuffer.length > 0) {
|
||||
this.sendBuffer();
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
sendBuffer() {
|
||||
if (this.currentBuffer.length > 0) {
|
||||
const audioData = new Float32Array(this.currentBuffer);
|
||||
this.port.postMessage({
|
||||
eventType: "audio",
|
||||
audioData: audioData,
|
||||
});
|
||||
this.currentBuffer = [];
|
||||
}
|
||||
}
|
||||
|
||||
process(inputs) {
|
||||
const input = inputs[0];
|
||||
if (input.length > 0 && this.isRecording) {
|
||||
const audioData = input[0];
|
||||
|
||||
this.currentBuffer.push(...audioData);
|
||||
|
||||
if (this.currentBuffer.length >= this.bufferSize) {
|
||||
this.sendBuffer();
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
registerProcessor("audio-recorder-processor", AudioRecorderProcessor);
|
17
public/plugins.json
Normal file
@ -0,0 +1,17 @@
|
||||
[
|
||||
{
|
||||
"id": "dalle3",
|
||||
"name": "Dalle3",
|
||||
"schema": "https://ghp.ci/https://raw.githubusercontent.com/ChatGPTNextWeb/NextChat-Awesome-Plugins/main/plugins/dalle/openapi.json"
|
||||
},
|
||||
{
|
||||
"id": "arxivsearch",
|
||||
"name": "ArxivSearch",
|
||||
"schema": "https://ghp.ci/https://raw.githubusercontent.com/ChatGPTNextWeb/NextChat-Awesome-Plugins/main/plugins/arxivsearch/openapi.json"
|
||||
},
|
||||
{
|
||||
"id": "duckduckgolite",
|
||||
"name": "DuckDuckGoLiteSearch",
|
||||
"schema": "https://ghp.ci/https://raw.githubusercontent.com/ChatGPTNextWeb/NextChat-Awesome-Plugins/main/plugins/duckduckgolite/openapi.json"
|
||||
}
|
||||
]
|
@ -15,6 +15,10 @@ self.addEventListener("install", function (event) {
|
||||
);
|
||||
});
|
||||
|
||||
function jsonify(data) {
|
||||
return new Response(JSON.stringify(data), { headers: { 'content-type': 'application/json' } })
|
||||
}
|
||||
|
||||
async function upload(request, url) {
|
||||
const formData = await request.formData()
|
||||
const file = formData.getAll('file')[0]
|
||||
@ -33,13 +37,13 @@ async function upload(request, url) {
|
||||
'server': 'ServiceWorker',
|
||||
}
|
||||
}))
|
||||
return Response.json({ code: 0, data: fileUrl })
|
||||
return jsonify({ code: 0, data: fileUrl })
|
||||
}
|
||||
|
||||
async function remove(request, url) {
|
||||
const cache = await caches.open(CHATGPT_NEXT_WEB_FILE_CACHE)
|
||||
const res = await cache.delete(request.url)
|
||||
return Response.json({ code: 0 })
|
||||
return jsonify({ code: 0 })
|
||||
}
|
||||
|
||||
self.addEventListener("fetch", (e) => {
|
||||
@ -56,4 +60,3 @@ self.addEventListener("fetch", (e) => {
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
38
yarn.lock
@ -4323,12 +4323,14 @@ axe-core@^4.9.1:
|
||||
resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.9.1.tgz#fcd0f4496dad09e0c899b44f6c4bb7848da912ae"
|
||||
integrity sha512-QbUdXJVTpvUTHU7871ppZkdOLBeGUKBQWHkHrvN2V9IQWGMt61zf3B45BtzjxEJzYuj0JBjBZP/hmYS/R9pmAw==
|
||||
|
||||
axios@^0.26.0:
|
||||
version "0.26.1"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
|
||||
integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
|
||||
axios@^1.7.5:
|
||||
version "1.7.9"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.9.tgz#d7d071380c132a24accda1b2cfc1535b79ec650a"
|
||||
integrity sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==
|
||||
dependencies:
|
||||
follow-redirects "^1.14.8"
|
||||
follow-redirects "^1.15.6"
|
||||
form-data "^4.0.0"
|
||||
proxy-from-env "^1.1.0"
|
||||
|
||||
axobject-query@~3.1.1:
|
||||
version "3.1.1"
|
||||
@ -4744,6 +4746,11 @@ cliui@^8.0.1:
|
||||
strip-ansi "^6.0.1"
|
||||
wrap-ansi "^7.0.0"
|
||||
|
||||
clsx@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999"
|
||||
integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==
|
||||
|
||||
co@^4.6.0:
|
||||
version "4.6.0"
|
||||
resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
|
||||
@ -6495,10 +6502,10 @@ flatted@^3.2.9:
|
||||
resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.3.1.tgz#21db470729a6734d4997002f439cb308987f567a"
|
||||
integrity sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==
|
||||
|
||||
follow-redirects@^1.14.8:
|
||||
version "1.15.6"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b"
|
||||
integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==
|
||||
follow-redirects@^1.15.6:
|
||||
version "1.15.9"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1"
|
||||
integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==
|
||||
|
||||
for-each@^0.3.3:
|
||||
version "0.3.3"
|
||||
@ -9493,6 +9500,11 @@ property-information@^6.0.0:
|
||||
resolved "https://registry.yarnpkg.com/property-information/-/property-information-6.5.0.tgz#6212fbb52ba757e92ef4fb9d657563b933b7ffec"
|
||||
integrity sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==
|
||||
|
||||
proxy-from-env@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2"
|
||||
integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==
|
||||
|
||||
psl@^1.1.33:
|
||||
version "1.9.0"
|
||||
resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7"
|
||||
@ -9898,6 +9910,12 @@ robust-predicates@^3.0.2:
|
||||
resolved "https://registry.yarnpkg.com/robust-predicates/-/robust-predicates-3.0.2.tgz#d5b28528c4824d20fc48df1928d41d9efa1ad771"
|
||||
integrity sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==
|
||||
|
||||
"rt-client@https://github.com/Azure-Samples/aoai-realtime-audio-sdk/releases/download/js/v0.5.0/rt-client-0.5.0.tgz":
|
||||
version "0.5.0"
|
||||
resolved "https://github.com/Azure-Samples/aoai-realtime-audio-sdk/releases/download/js/v0.5.0/rt-client-0.5.0.tgz#abf2e9a850201e3571b8d36830f77bc52af3de9b"
|
||||
dependencies:
|
||||
ws "^8.18.0"
|
||||
|
||||
run-parallel@^1.1.9:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee"
|
||||
@ -11219,7 +11237,7 @@ write-file-atomic@^4.0.2:
|
||||
imurmurhash "^0.1.4"
|
||||
signal-exit "^3.0.7"
|
||||
|
||||
ws@^8.11.0, ws@^8.14.2:
|
||||
ws@^8.11.0, ws@^8.14.2, ws@^8.18.0:
|
||||
version "8.18.0"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc"
|
||||
integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==
|
||||
|