feat: 支持全局配置插件

This commit is contained in:
Hk-Gosuto 2023-08-16 13:38:15 +08:00
parent dd0b451a7c
commit 76eb2afd06
11 changed files with 147 additions and 8 deletions

View File

@ -59,7 +59,7 @@
- [ ] 插件列表页面开发
- [ ] 支持开关指定插件
- [ ] 支持添加自定义插件
- [ ] 支持 Agent 参数配置( agentType, maxIterations, returnIntermediateSteps 等)
- [x] 支持 Agent 参数配置( ~~agentType~~, maxIterations, returnIntermediateSteps 等)
- [x] 支持 ChatSession 级别插件功能开关
仅在使用 `0613` 版本模型时会出现插件开关,其它模型默认为关闭状态,开关也不会显示。

View File

@ -35,6 +35,8 @@ interface RequestBody {
presence_penalty?: number;
frequency_penalty?: number;
top_p?: number;
maxIterations: number;
returnIntermediateSteps: boolean;
}
class ResponseBody {
@ -120,6 +122,7 @@ async function handle(req: NextRequest) {
`tool: ${action.tool} toolInput: ${action.toolInput}`,
{ action },
);
if (!reqBody.returnIntermediateSteps) return;
var response = new ResponseBody();
response.isToolMessage = true;
let toolInput = <ToolInput>(<unknown>action.toolInput);
@ -202,8 +205,8 @@ async function handle(req: NextRequest) {
});
const executor = await initializeAgentExecutorWithOptions(tools, llm, {
agentType: "openai-functions",
returnIntermediateSteps: true,
maxIterations: 3,
returnIntermediateSteps: reqBody.returnIntermediateSteps,
maxIterations: reqBody.maxIterations,
memory: memory,
});

View File

@ -23,6 +23,11 @@ export interface LLMConfig {
frequency_penalty?: number;
}
export interface LLMAgentConfig {
maxIterations: number;
returnIntermediateSteps: boolean;
}
export interface ChatOptions {
messages: RequestMessage[];
config: LLMConfig;
@ -33,6 +38,17 @@ export interface ChatOptions {
onController?: (controller: AbortController) => void;
}
export interface AgentChatOptions {
messages: RequestMessage[];
config: LLMConfig;
agentConfig: LLMAgentConfig;
onToolUpdate?: (toolName: string, toolInput: string) => void;
onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
}
export interface LLMUsage {
used: number;
total: number;
@ -45,7 +61,7 @@ export interface LLMModel {
export abstract class LLMApi {
abstract chat(options: ChatOptions): Promise<void>;
abstract toolAgentChat(options: ChatOptions): Promise<void>;
abstract toolAgentChat(options: AgentChatOptions): Promise<void>;
abstract usage(): Promise<LLMUsage>;
abstract models(): Promise<LLMModel[]>;
}

View File

@ -6,7 +6,14 @@ import {
} from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import {
AgentChatOptions,
ChatOptions,
getHeaders,
LLMApi,
LLMModel,
LLMUsage,
} from "../api";
import Locale from "../../locales";
import {
EventStreamContentType,
@ -188,7 +195,7 @@ export class ChatGPTApi implements LLMApi {
}
}
async toolAgentChat(options: ChatOptions) {
async toolAgentChat(options: AgentChatOptions) {
const messages = options.messages.map((v) => ({
role: v.role,
content: v.content,
@ -210,6 +217,8 @@ export class ChatGPTApi implements LLMApi {
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
maxIterations: options.agentConfig.maxIterations,
returnIntermediateSteps: options.agentConfig.returnIntermediateSteps,
};
console.log("[Request] openai payload: ", requestPayload);

View File

@ -511,7 +511,7 @@ export function ChatActions(props: {
icon={<RobotIcon />}
/>
{currentModel.endsWith("0613") && (
{config.pluginConfig.enable && currentModel.endsWith("0613") && (
<ChatAction
onClick={switchUsePlugins}
text={

View File

@ -0,0 +1,60 @@
import { PluginConfig } from "../store";
import Locale from "../locales";
import { ListItem } from "./ui-lib";
export function PluginConfigList(props: {
pluginConfig: PluginConfig;
updateConfig: (updater: (config: PluginConfig) => void) => void;
}) {
return (
<>
<ListItem
title={Locale.Settings.Plugin.Enable.Title}
subTitle={Locale.Settings.Plugin.Enable.SubTitle}
>
<input
type="checkbox"
checked={props.pluginConfig.enable}
onChange={(e) =>
props.updateConfig(
(config) => (config.enable = e.currentTarget.checked),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.Plugin.MaxIteration.Title}
subTitle={Locale.Settings.Plugin.MaxIteration.SubTitle}
>
<input
type="number"
min={1}
max={10}
value={props.pluginConfig.maxIterations}
onChange={(e) =>
props.updateConfig(
(config) =>
(config.maxIterations = e.currentTarget.valueAsNumber),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.Plugin.ReturnIntermediateStep.Title}
subTitle={Locale.Settings.Plugin.ReturnIntermediateStep.SubTitle}
>
<input
type="checkbox"
checked={props.pluginConfig.returnIntermediateSteps}
onChange={(e) =>
props.updateConfig(
(config) =>
(config.returnIntermediateSteps = e.currentTarget.checked),
)
}
></input>
</ListItem>
</>
);
}

View File

@ -49,6 +49,7 @@ import { Avatar, AvatarPicker } from "./emoji";
import { getClientConfig } from "../config/client";
import { useSyncStore } from "../store/sync";
import { nanoid } from "nanoid";
import { PluginConfigList } from "./plugin-config";
function EditPromptModal(props: { id: string; onClose: () => void }) {
const promptStore = usePromptStore();
@ -739,6 +740,17 @@ export function Settings() {
<UserPromptModal onClose={() => setShowPromptModal(false)} />
)}
<List>
<PluginConfigList
pluginConfig={config.pluginConfig}
updateConfig={(updater) => {
const pluginConfig = { ...config.pluginConfig };
updater(pluginConfig);
config.update((config) => (config.pluginConfig = pluginConfig));
}}
/>
</List>
<DangerItems />
</div>
</ErrorBoundary>

View File

@ -261,6 +261,20 @@ const cn = {
Title: "频率惩罚度 (frequency_penalty)",
SubTitle: "值越大,越有可能降低重复字词",
},
Plugin: {
Enable: {
Title: "启用插件",
SubTitle: "启用插件调用功能",
},
MaxIteration: {
Title: "最大迭代数",
SubTitle: "插件调用最大迭代数",
},
ReturnIntermediateStep: {
Title: "返回中间步骤",
SubTitle: "是否返回插件调用的中间步骤",
},
},
},
Store: {
DefaultTopic: "新的聊天",

View File

@ -265,6 +265,20 @@ const en: LocaleType = {
SubTitle:
"A larger value decreasing the likelihood to repeat the same line",
},
Plugin: {
Enable: {
Title: "Enable Plugin",
SubTitle: "Enable plugin invocation",
},
MaxIteration: {
Title: "Max Iterations",
SubTitle: "Max of plugin iterations",
},
ReturnIntermediateStep: {
Title: "Return Intermediate Steps",
SubTitle: "Return Intermediate Steps",
},
},
},
Store: {
DefaultTopic: "New Conversation",

View File

@ -305,6 +305,9 @@ export const useChatStore = create<ChatStore>()(
const sendMessages = recentMessages.concat(userMessage);
const messageIndex = get().currentSession().messages.length + 1;
const config = useAppConfig.getState();
const pluginConfig = useAppConfig.getState().pluginConfig;
// save user's and bot's message
get().updateCurrentSession((session) => {
const savedUserMessage = {
@ -315,11 +318,12 @@ export const useChatStore = create<ChatStore>()(
session.messages.push(botMessage);
});
if (session.mask.usePlugins) {
if (config.pluginConfig.enable && session.mask.usePlugins) {
console.log("[ToolAgent] start");
api.llm.toolAgentChat({
messages: sendMessages,
config: { ...modelConfig, stream: true },
agentConfig: { ...pluginConfig },
onUpdate(message) {
botMessage.streaming = true;
if (message) {

View File

@ -51,6 +51,12 @@ export const DEFAULT_CONFIG = {
enableInjectSystemPrompts: true,
template: DEFAULT_INPUT_TEMPLATE,
},
pluginConfig: {
enable: true,
maxIterations: 3,
returnIntermediateSteps: true,
},
};
export type ChatConfig = typeof DEFAULT_CONFIG;
@ -63,6 +69,7 @@ export type ChatConfigStore = ChatConfig & {
};
export type ModelConfig = ChatConfig["modelConfig"];
export type PluginConfig = ChatConfig["pluginConfig"];
export function limitNumber(
x: number,