mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-24 22:50:22 +09:00
feat: agent支持baseUrl配置
This commit is contained in:
parent
bfc3e90580
commit
20366fee35
@ -69,7 +69,7 @@
|
|||||||
|
|
||||||
尝试使用 `chat-conversational-react-description` 等类型的 `agent` 使用插件时效果并不理想,不再考虑支持其它版本的模型。
|
尝试使用 `chat-conversational-react-description` 等类型的 `agent` 使用插件时效果并不理想,不再考虑支持其它版本的模型。
|
||||||
- [x] `SERPAPI_API_KEY` 目前为必填,后续会支持使用 DuckDuckGo 替换搜索插件
|
- [x] `SERPAPI_API_KEY` 目前为必填,后续会支持使用 DuckDuckGo 替换搜索插件
|
||||||
- [ ] Agent 不支持自定义接口地址
|
- [x] Agent 不支持自定义接口地址
|
||||||
- [x] ~~部分场景下插件会调用失败~~
|
- [x] ~~部分场景下插件会调用失败~~
|
||||||
|
|
||||||
问题出现在使用 [Calculator](https://js.langchain.com/docs/api/tools_calculator/classes/Calculator) 进行计算时的参数错误,暂时无法干预。
|
问题出现在使用 [Calculator](https://js.langchain.com/docs/api/tools_calculator/classes/Calculator) 进行计算时的参数错误,暂时无法干预。
|
||||||
|
@ -35,6 +35,7 @@ interface RequestBody {
|
|||||||
presence_penalty?: number;
|
presence_penalty?: number;
|
||||||
frequency_penalty?: number;
|
frequency_penalty?: number;
|
||||||
top_p?: number;
|
top_p?: number;
|
||||||
|
baseUrl?: string;
|
||||||
maxIterations: number;
|
maxIterations: number;
|
||||||
returnIntermediateSteps: boolean;
|
returnIntermediateSteps: boolean;
|
||||||
}
|
}
|
||||||
@ -194,15 +195,29 @@ async function handle(req: NextRequest) {
|
|||||||
outputKey: "output",
|
outputKey: "output",
|
||||||
chatHistory: new ChatMessageHistory(pastMessages),
|
chatHistory: new ChatMessageHistory(pastMessages),
|
||||||
});
|
});
|
||||||
const llm = new ChatOpenAI({
|
// support base url
|
||||||
modelName: reqBody.model,
|
let baseUrl = "https://api.openai.com/v1";
|
||||||
openAIApiKey: serverConfig.apiKey,
|
if (serverConfig.baseUrl) baseUrl = serverConfig.baseUrl;
|
||||||
temperature: reqBody.temperature,
|
if (
|
||||||
streaming: reqBody.stream,
|
reqBody.baseUrl?.startsWith("http://") ||
|
||||||
topP: reqBody.top_p,
|
reqBody.baseUrl?.startsWith("https://")
|
||||||
presencePenalty: reqBody.presence_penalty,
|
)
|
||||||
frequencyPenalty: reqBody.frequency_penalty,
|
baseUrl = reqBody.baseUrl;
|
||||||
});
|
if (!baseUrl.endsWith("/v1"))
|
||||||
|
baseUrl = baseUrl.endsWith("/") ? `${baseUrl}v1` : `${baseUrl}/v1`;
|
||||||
|
console.log("[baseUrl]", baseUrl);
|
||||||
|
const llm = new ChatOpenAI(
|
||||||
|
{
|
||||||
|
modelName: reqBody.model,
|
||||||
|
openAIApiKey: serverConfig.apiKey,
|
||||||
|
temperature: reqBody.temperature,
|
||||||
|
streaming: reqBody.stream,
|
||||||
|
topP: reqBody.top_p,
|
||||||
|
presencePenalty: reqBody.presence_penalty,
|
||||||
|
frequencyPenalty: reqBody.frequency_penalty,
|
||||||
|
},
|
||||||
|
{ basePath: baseUrl },
|
||||||
|
);
|
||||||
const executor = await initializeAgentExecutorWithOptions(tools, llm, {
|
const executor = await initializeAgentExecutorWithOptions(tools, llm, {
|
||||||
agentType: "openai-functions",
|
agentType: "openai-functions",
|
||||||
returnIntermediateSteps: reqBody.returnIntermediateSteps,
|
returnIntermediateSteps: reqBody.returnIntermediateSteps,
|
||||||
|
@ -217,6 +217,7 @@ export class ChatGPTApi implements LLMApi {
|
|||||||
presence_penalty: modelConfig.presence_penalty,
|
presence_penalty: modelConfig.presence_penalty,
|
||||||
frequency_penalty: modelConfig.frequency_penalty,
|
frequency_penalty: modelConfig.frequency_penalty,
|
||||||
top_p: modelConfig.top_p,
|
top_p: modelConfig.top_p,
|
||||||
|
baseUrl: useAccessStore.getState().openaiUrl,
|
||||||
maxIterations: options.agentConfig.maxIterations,
|
maxIterations: options.agentConfig.maxIterations,
|
||||||
returnIntermediateSteps: options.agentConfig.returnIntermediateSteps,
|
returnIntermediateSteps: options.agentConfig.returnIntermediateSteps,
|
||||||
};
|
};
|
||||||
|
Loading…
Reference in New Issue
Block a user