import { ApiPath } from "../../constant"; import { ChatOptions, getHeaders, LLMApi, SpeechOptions } from "../api"; import { useAppConfig, usePluginStore, useChatStore, ChatMessageTool, } from "../../store"; import { getMessageTextContent, isVisionModel } from "../../utils"; import { fetch } from "../../utils/stream"; import { preProcessImageContent, stream } from "../../utils/chat"; import { RequestPayload } from "./openai"; export type MultiBlockContent = { type: "image" | "text"; source?: { type: string; media_type: string; data: string; }; text?: string; }; export type AnthropicMessage = { role: (typeof ClaudeMapper)[keyof typeof ClaudeMapper]; content: string | MultiBlockContent[]; }; const ClaudeMapper = { assistant: "assistant", user: "user", system: "user", } as const; export class BedrockApi implements LLMApi { speech(options: SpeechOptions): Promise { throw new Error("Speech not implemented for Bedrock."); } extractMessage(res: any) { console.log("[Response] Bedrock not stream response: ", res); if (res.error) { return "```\n" + JSON.stringify(res, null, 4) + "\n```"; } return res?.content ?? res; } async chat(options: ChatOptions): Promise { const visionModel = isVisionModel(options.config.model); const shouldStream = !!options.config.stream; const modelConfig = { ...useAppConfig.getState().modelConfig, ...useChatStore.getState().currentSession().mask.modelConfig, ...{ model: options.config.model, }, }; // try get base64image from local cache image_url const messages: ChatOptions["messages"] = []; for (const v of options.messages) { const content = await preProcessImageContent(v.content); messages.push({ role: v.role, content }); } const keys = ["system", "user"]; // roles must alternate between "user" and "assistant" in claude, so add a fake assistant message between two user messages for (let i = 0; i < messages.length - 1; i++) { const message = messages[i]; const nextMessage = messages[i + 1]; if (keys.includes(message.role) && keys.includes(nextMessage.role)) { messages[i] = [ message, { role: "assistant", content: ";", }, ] as any; } } const prompt = messages .flat() .filter((v) => { if (!v.content) return false; if (typeof v.content === "string" && !v.content.trim()) return false; return true; }) .map((v) => { const { role, content } = v; const insideRole = ClaudeMapper[role] ?? "user"; if (!visionModel || typeof content === "string") { return { role: insideRole, content: getMessageTextContent(v), }; } return { role: insideRole, content: content .filter((v) => v.image_url || v.text) .map(({ type, text, image_url }) => { if (type === "text") { return { type, text: text!, }; } const { url = "" } = image_url || {}; const colonIndex = url.indexOf(":"); const semicolonIndex = url.indexOf(";"); const comma = url.indexOf(","); const mimeType = url.slice(colonIndex + 1, semicolonIndex); const encodeType = url.slice(semicolonIndex + 1, comma); const data = url.slice(comma + 1); return { type: "image" as const, source: { type: encodeType, media_type: mimeType, data, }, }; }), }; }); if (prompt[0]?.role === "assistant") { prompt.unshift({ role: "user", content: ";", }); } const requestBody = { modelId: options.config.model, messages: prompt, inferenceConfig: { maxTokens: modelConfig.max_tokens, temperature: modelConfig.temperature, topP: modelConfig.top_p, stopSequences: [], }, stream: shouldStream, }; const conversePath = `${ApiPath.Bedrock}/converse`; const controller = new AbortController(); options.onController?.(controller); if (shouldStream) { let currentToolUse: ChatMessageTool | null = null; let index = -1; const [tools, funcs] = usePluginStore .getState() .getAsTools( useChatStore.getState().currentSession().mask?.plugin || [], ); return stream( conversePath, requestBody, getHeaders(), // @ts-ignore tools.map((tool) => ({ name: tool?.function?.name, description: tool?.function?.description, input_schema: tool?.function?.parameters, })), funcs, controller, // parseSSE (text: string, runTools: ChatMessageTool[]) => { // console.log("parseSSE", text, runTools); let chunkJson: | undefined | { type: "content_block_delta" | "content_block_stop"; content_block?: { type: "tool_use"; id: string; name: string; }; delta?: { type: "text_delta" | "input_json_delta"; text?: string; partial_json?: string; }; index: number; }; chunkJson = JSON.parse(text); if (chunkJson?.content_block?.type == "tool_use") { index += 1; const id = chunkJson?.content_block.id; const name = chunkJson?.content_block.name; runTools.push({ id, type: "function", function: { name, arguments: "", }, }); } if ( chunkJson?.delta?.type == "input_json_delta" && chunkJson?.delta?.partial_json ) { // @ts-ignore runTools[index]["function"]["arguments"] += chunkJson?.delta?.partial_json; } return chunkJson?.delta?.text; }, // processToolMessage, include tool_calls message and tool call results ( requestPayload: RequestPayload, toolCallMessage: any, toolCallResult: any[], ) => { // reset index value index = -1; // @ts-ignore requestPayload?.messages?.splice( // @ts-ignore requestPayload?.messages?.length, 0, { role: "assistant", content: toolCallMessage.tool_calls.map( (tool: ChatMessageTool) => ({ type: "tool_use", id: tool.id, name: tool?.function?.name, input: tool?.function?.arguments ? JSON.parse(tool?.function?.arguments) : {}, }), ), }, // @ts-ignore ...toolCallResult.map((result) => ({ role: "user", content: [ { type: "tool_result", tool_use_id: result.tool_call_id, content: result.content, }, ], })), ); }, options, ); } else { const payload = { method: "POST", body: JSON.stringify(requestBody), signal: controller.signal, headers: { ...getHeaders(), // get common headers }, }; try { controller.signal.onabort = () => options.onFinish("", new Response(null, { status: 400 })); const res = await fetch(conversePath, payload); const resJson = await res.json(); const message = this.extractMessage(resJson); options.onFinish(message, res); } catch (e) { console.error("failed to chat", e); options.onError?.(e as Error); } } } async usage() { return { used: 0, total: 0, }; } async models() { return []; } }