From 1998cf5ced81c8313541c01a408594eef564bbc5 Mon Sep 17 00:00:00 2001 From: glay Date: Tue, 5 Nov 2024 20:20:20 +0800 Subject: [PATCH] Merge feature/update-bedrock-api into main --- app/api/bedrock.ts | 5 +---- app/client/platforms/bedrock.ts | 14 ++++++-------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/app/api/bedrock.ts b/app/api/bedrock.ts index 4cad53da1..5465477c8 100644 --- a/app/api/bedrock.ts +++ b/app/api/bedrock.ts @@ -41,10 +41,7 @@ export interface ConverseRequest { function supportsToolUse(modelId: string): boolean { // llama和mistral模型不支持工具调用 - return ( - modelId.toLowerCase().includes("claude-3") && - modelId.toLowerCase().includes("claude-3-5") - ); + return modelId.toLowerCase().includes("claude-3"); } function formatRequestBody( diff --git a/app/client/platforms/bedrock.ts b/app/client/platforms/bedrock.ts index 130b30d31..6d236344e 100644 --- a/app/client/platforms/bedrock.ts +++ b/app/client/platforms/bedrock.ts @@ -38,11 +38,9 @@ export class BedrockApi implements LLMApi { } extractMessage(res: any) { - console.log("[Response] bedrock response: ", res); - if (Array.isArray(res?.content)) { - return res.content; - } - return res; + console.log("[Response] claude response: ", res); + + return res?.content?.[0]?.text; } async chat(options: ChatOptions): Promise { @@ -173,7 +171,6 @@ export class BedrockApi implements LLMApi { funcs, controller, // parseSSE - // parseSSE (text: string, runTools: ChatMessageTool[]) => { // console.log("parseSSE", text, runTools); let chunkJson: @@ -269,13 +266,14 @@ export class BedrockApi implements LLMApi { }; try { - controller.signal.onabort = () => options.onFinish(""); + controller.signal.onabort = () => + options.onFinish("", new Response(null, { status: 400 })); const res = await fetch(conversePath, payload); const resJson = await res.json(); const message = this.extractMessage(resJson); - options.onFinish(message); + options.onFinish(message, res); } catch (e) { console.error("failed to chat", e); options.onError?.(e as Error);