mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-29 00:50:22 +09:00
alpha version
This commit is contained in:
parent
ac57b2c770
commit
958ab02d1e
@ -23,15 +23,13 @@ async function handle(req: NextRequest) {
|
||||
const file = formData.get("file") as File;
|
||||
const originalFileName = file?.name;
|
||||
|
||||
const fileReader = file.stream().getReader();
|
||||
const fileData: number[] = [];
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await fileReader.read();
|
||||
if (done) break;
|
||||
fileData.push(...value);
|
||||
let fileData: ArrayBuffer | undefined;
|
||||
for (const [key, value] of formData.entries()) {
|
||||
if (value instanceof File) {
|
||||
fileData = await value.arrayBuffer();
|
||||
}
|
||||
}
|
||||
|
||||
if (!fileData) throw new Error("Get file buffer error");
|
||||
const buffer = Buffer.from(fileData);
|
||||
const fileType = path.extname(originalFileName).slice(1);
|
||||
var fileName = `${Date.now()}.${fileType}`;
|
||||
|
@ -7,16 +7,15 @@ import { StableDiffusionNodeWrapper } from "@/app/api/langchain-tools/stable_dif
|
||||
import { Calculator } from "langchain/tools/calculator";
|
||||
import { WebBrowser } from "langchain/tools/webbrowser";
|
||||
import { WolframAlphaTool } from "@/app/api/langchain-tools/wolframalpha";
|
||||
import { RAGSearch } from "./rag_search";
|
||||
|
||||
export class NodeJSTool {
|
||||
private apiKey: string | undefined;
|
||||
|
||||
private baseUrl: string;
|
||||
|
||||
private model: BaseLanguageModel;
|
||||
|
||||
private embeddings: Embeddings;
|
||||
|
||||
private sessionId: string;
|
||||
private ragEmbeddings: Embeddings;
|
||||
private callback?: (data: string) => Promise<void>;
|
||||
|
||||
constructor(
|
||||
@ -24,12 +23,16 @@ export class NodeJSTool {
|
||||
baseUrl: string,
|
||||
model: BaseLanguageModel,
|
||||
embeddings: Embeddings,
|
||||
sessionId: string,
|
||||
ragEmbeddings: Embeddings,
|
||||
callback?: (data: string) => Promise<void>,
|
||||
) {
|
||||
this.apiKey = apiKey;
|
||||
this.baseUrl = baseUrl;
|
||||
this.model = model;
|
||||
this.embeddings = embeddings;
|
||||
this.sessionId = sessionId;
|
||||
this.ragEmbeddings = ragEmbeddings;
|
||||
this.callback = callback;
|
||||
}
|
||||
|
||||
@ -57,6 +60,9 @@ export class NodeJSTool {
|
||||
wolframAlphaTool,
|
||||
pdfBrowserTool,
|
||||
];
|
||||
if (!!process.env.NEXT_PUBLIC_ENABLE_RAG) {
|
||||
tools.push(new RAGSearch(this.sessionId, this.model, this.ragEmbeddings));
|
||||
}
|
||||
return tools;
|
||||
}
|
||||
}
|
||||
|
64
app/api/langchain-tools/rag_search.ts
Normal file
64
app/api/langchain-tools/rag_search.ts
Normal file
@ -0,0 +1,64 @@
|
||||
import { Tool } from "@langchain/core/tools";
|
||||
import { CallbackManagerForToolRun } from "@langchain/core/callbacks/manager";
|
||||
import { BaseLanguageModel } from "langchain/dist/base_language";
|
||||
import { formatDocumentsAsString } from "langchain/util/document";
|
||||
import { Embeddings } from "langchain/dist/embeddings/base.js";
|
||||
import { RunnableSequence } from "@langchain/core/runnables";
|
||||
import { StringOutputParser } from "@langchain/core/output_parsers";
|
||||
import { Pinecone } from "@pinecone-database/pinecone";
|
||||
import { PineconeStore } from "@langchain/pinecone";
|
||||
|
||||
export class RAGSearch extends Tool {
|
||||
static lc_name() {
|
||||
return "RAGSearch";
|
||||
}
|
||||
|
||||
get lc_namespace() {
|
||||
return [...super.lc_namespace, "ragsearch"];
|
||||
}
|
||||
|
||||
private sessionId: string;
|
||||
private model: BaseLanguageModel;
|
||||
private embeddings: Embeddings;
|
||||
|
||||
constructor(
|
||||
sessionId: string,
|
||||
model: BaseLanguageModel,
|
||||
embeddings: Embeddings,
|
||||
) {
|
||||
super();
|
||||
this.sessionId = sessionId;
|
||||
this.model = model;
|
||||
this.embeddings = embeddings;
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
async _call(inputs: string, runManager?: CallbackManagerForToolRun) {
|
||||
const pinecone = new Pinecone();
|
||||
const pineconeIndex = pinecone.Index(process.env.PINECONE_INDEX!);
|
||||
const vectorStore = await PineconeStore.fromExistingIndex(this.embeddings, {
|
||||
pineconeIndex,
|
||||
});
|
||||
|
||||
let context;
|
||||
const returnCunt = process.env.RAG_RETURN_COUNT
|
||||
? parseInt(process.env.RAG_RETURN_COUNT, 10)
|
||||
: 4;
|
||||
const results = await vectorStore.similaritySearch(inputs, returnCunt, {
|
||||
sessionId: this.sessionId,
|
||||
});
|
||||
context = formatDocumentsAsString(results);
|
||||
console.log("[rag-search]", context);
|
||||
return context;
|
||||
// const input = `Text:${context}\n\nQuestion:${inputs}\n\nI need you to answer the question based on the text.`;
|
||||
|
||||
// console.log("[rag-search]", input);
|
||||
|
||||
// const chain = RunnableSequence.from([this.model, new StringOutputParser()]);
|
||||
// return chain.invoke(input, runManager?.getChild());
|
||||
}
|
||||
|
||||
name = "rag-search";
|
||||
|
||||
description = `It is used to query documents entered by the user.The input content is the keywords extracted from the user's question, and multiple keywords are separated by spaces and passed in.`;
|
||||
}
|
105
app/api/langchain/rag/search/route.ts
Normal file
105
app/api/langchain/rag/search/route.ts
Normal file
@ -0,0 +1,105 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { ACCESS_CODE_PREFIX, ModelProvider } from "@/app/constant";
|
||||
import { OpenAIEmbeddings } from "@langchain/openai";
|
||||
import { Pinecone } from "@pinecone-database/pinecone";
|
||||
import { PineconeStore } from "@langchain/pinecone";
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
|
||||
interface RequestBody {
|
||||
sessionId: string;
|
||||
query: string;
|
||||
baseUrl?: string;
|
||||
}
|
||||
|
||||
async function handle(req: NextRequest) {
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
try {
|
||||
const authResult = auth(req, ModelProvider.GPT);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
const reqBody: RequestBody = await req.json();
|
||||
const authToken = req.headers.get("Authorization") ?? "";
|
||||
const token = authToken.trim().replaceAll("Bearer ", "").trim();
|
||||
|
||||
const pinecone = new Pinecone();
|
||||
const pineconeIndex = pinecone.Index(process.env.PINECONE_INDEX!);
|
||||
const apiKey = getOpenAIApiKey(token);
|
||||
const baseUrl = getOpenAIBaseUrl(reqBody.baseUrl);
|
||||
const embeddings = new OpenAIEmbeddings(
|
||||
{
|
||||
modelName: process.env.RAG_EMBEDDING_MODEL ?? "text-embedding-3-large",
|
||||
openAIApiKey: apiKey,
|
||||
},
|
||||
{ basePath: baseUrl },
|
||||
);
|
||||
const vectorStore = await PineconeStore.fromExistingIndex(embeddings, {
|
||||
pineconeIndex,
|
||||
});
|
||||
const results = await vectorStore.similaritySearch(reqBody.query, 1, {
|
||||
sessionId: reqBody.sessionId,
|
||||
});
|
||||
console.log(results);
|
||||
return NextResponse.json(results, {
|
||||
status: 200,
|
||||
});
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return new Response(JSON.stringify({ error: (e as any).message }), {
|
||||
status: 500,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getOpenAIApiKey(token: string) {
|
||||
const serverConfig = getServerSideConfig();
|
||||
const isApiKey = !token.startsWith(ACCESS_CODE_PREFIX);
|
||||
|
||||
let apiKey = serverConfig.apiKey;
|
||||
if (isApiKey && token) {
|
||||
apiKey = token;
|
||||
}
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
function getOpenAIBaseUrl(reqBaseUrl: string | undefined) {
|
||||
const serverConfig = getServerSideConfig();
|
||||
let baseUrl = "https://api.openai.com/v1";
|
||||
if (serverConfig.baseUrl) baseUrl = serverConfig.baseUrl;
|
||||
if (reqBaseUrl?.startsWith("http://") || reqBaseUrl?.startsWith("https://"))
|
||||
baseUrl = reqBaseUrl;
|
||||
if (!baseUrl.endsWith("/v1"))
|
||||
baseUrl = baseUrl.endsWith("/") ? `${baseUrl}v1` : `${baseUrl}/v1`;
|
||||
console.log("[baseUrl]", baseUrl);
|
||||
return baseUrl;
|
||||
}
|
||||
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "nodejs";
|
||||
export const preferredRegion = [
|
||||
"arn1",
|
||||
"bom1",
|
||||
"cdg1",
|
||||
"cle1",
|
||||
"cpt1",
|
||||
"dub1",
|
||||
"fra1",
|
||||
"gru1",
|
||||
"hnd1",
|
||||
"iad1",
|
||||
"icn1",
|
||||
"kix1",
|
||||
"lhr1",
|
||||
"pdx1",
|
||||
"sfo1",
|
||||
"sin1",
|
||||
"syd1",
|
||||
];
|
@ -1,16 +1,66 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { NodeJSTool } from "@/app/api/langchain-tools/nodejs_tools";
|
||||
import { ACCESS_CODE_PREFIX, ModelProvider } from "@/app/constant";
|
||||
import { OpenAI, OpenAIEmbeddings } from "@langchain/openai";
|
||||
import path from "path";
|
||||
import { PDFLoader } from "langchain/document_loaders/fs/pdf";
|
||||
import { TextLoader } from "langchain/document_loaders/fs/text";
|
||||
import { CSVLoader } from "langchain/document_loaders/fs/csv";
|
||||
import { DocxLoader } from "langchain/document_loaders/fs/docx";
|
||||
import { EPubLoader } from "langchain/document_loaders/fs/epub";
|
||||
import { JSONLoader } from "langchain/document_loaders/fs/json";
|
||||
import { JSONLinesLoader } from "langchain/document_loaders/fs/json";
|
||||
import { OpenAIWhisperAudio } from "langchain/document_loaders/fs/openai_whisper_audio";
|
||||
// import { PPTXLoader } from "langchain/document_loaders/fs/pptx";
|
||||
import { SRTLoader } from "langchain/document_loaders/fs/srt";
|
||||
import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
|
||||
import { Pinecone } from "@pinecone-database/pinecone";
|
||||
import { Document } from "@langchain/core/documents";
|
||||
import { PineconeStore } from "@langchain/pinecone";
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import { RequestBody } from "../../tool/agent/agentapi";
|
||||
import { FileInfo } from "@/app/client/platforms/utils";
|
||||
import mime from "mime";
|
||||
import LocalFileStorage from "@/app/utils/local_file_storage";
|
||||
import S3FileStorage from "@/app/utils/s3_file_storage";
|
||||
|
||||
interface RequestBody {
|
||||
sessionId: string;
|
||||
fileInfos: FileInfo[];
|
||||
baseUrl?: string;
|
||||
}
|
||||
|
||||
function getLoader(
|
||||
fileName: string,
|
||||
fileBlob: Blob,
|
||||
openaiApiKey: string,
|
||||
openaiBaseUrl: string,
|
||||
) {
|
||||
const extension = fileName.split(".").pop();
|
||||
switch (extension) {
|
||||
case "txt":
|
||||
case "md":
|
||||
return new TextLoader(fileBlob);
|
||||
case "pdf":
|
||||
return new PDFLoader(fileBlob);
|
||||
case "docx":
|
||||
return new DocxLoader(fileBlob);
|
||||
case "csv":
|
||||
return new CSVLoader(fileBlob);
|
||||
case "json":
|
||||
return new JSONLoader(fileBlob);
|
||||
// case 'pptx':
|
||||
// return new PPTXLoader(fileBlob);
|
||||
case "srt":
|
||||
return new SRTLoader(fileBlob);
|
||||
case "mp3":
|
||||
return new OpenAIWhisperAudio(fileBlob, {
|
||||
clientOptions: {
|
||||
apiKey: openaiApiKey,
|
||||
baseURL: openaiBaseUrl,
|
||||
},
|
||||
});
|
||||
default:
|
||||
throw new Error(`Unsupported file type: ${extension}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function handle(req: NextRequest) {
|
||||
if (req.method === "OPTIONS") {
|
||||
@ -27,88 +77,70 @@ async function handle(req: NextRequest) {
|
||||
const reqBody: RequestBody = await req.json();
|
||||
const authToken = req.headers.get("Authorization") ?? "";
|
||||
const token = authToken.trim().replaceAll("Bearer ", "").trim();
|
||||
|
||||
//https://js.langchain.com/docs/integrations/vectorstores/pinecone
|
||||
// const formData = await req.formData();
|
||||
// const file = formData.get("file") as File;
|
||||
// const originalFileName = file?.name;
|
||||
|
||||
// const fileReader = file.stream().getReader();
|
||||
// const fileData: number[] = [];
|
||||
|
||||
// while (true) {
|
||||
// const { done, value } = await fileReader.read();
|
||||
// if (done) break;
|
||||
// fileData.push(...value);
|
||||
// }
|
||||
|
||||
// const buffer = Buffer.from(fileData);
|
||||
// const fileType = path.extname(originalFileName).slice(1);
|
||||
// const fileBlob = bufferToBlob(buffer, "application/pdf")
|
||||
|
||||
// const loader = new PDFLoader(fileBlob);
|
||||
// const docs = await loader.load();
|
||||
// const textSplitter = new RecursiveCharacterTextSplitter({
|
||||
// chunkSize: 1000,
|
||||
// chunkOverlap: 200,
|
||||
// });
|
||||
// const splits = await textSplitter.splitDocuments(docs);
|
||||
const pinecone = new Pinecone();
|
||||
// await pinecone.createIndex({
|
||||
// name: 'example-index',
|
||||
// dimension: 1536,
|
||||
// metric: 'cosine',
|
||||
// spec: {
|
||||
// pod: {
|
||||
// environment: 'gcp-starter',
|
||||
// podType: 'p1.x1',
|
||||
// pods: 1
|
||||
// }
|
||||
// }
|
||||
// });
|
||||
const pineconeIndex = pinecone.Index("example-index");
|
||||
const docs = [
|
||||
new Document({
|
||||
metadata: { foo: "bar" },
|
||||
pageContent: "pinecone is a vector db",
|
||||
}),
|
||||
new Document({
|
||||
metadata: { foo: "bar" },
|
||||
pageContent: "the quick brown fox jumped over the lazy dog",
|
||||
}),
|
||||
new Document({
|
||||
metadata: { baz: "qux" },
|
||||
pageContent: "lorem ipsum dolor sit amet",
|
||||
}),
|
||||
new Document({
|
||||
metadata: { baz: "qux" },
|
||||
pageContent: "pinecones are the woody fruiting body and of a pine tree",
|
||||
}),
|
||||
];
|
||||
const apiKey = getOpenAIApiKey(token);
|
||||
const baseUrl = getOpenAIBaseUrl(reqBody.baseUrl);
|
||||
console.log(baseUrl);
|
||||
const serverConfig = getServerSideConfig();
|
||||
const pinecone = new Pinecone();
|
||||
const pineconeIndex = pinecone.Index(process.env.PINECONE_INDEX!);
|
||||
const embeddings = new OpenAIEmbeddings(
|
||||
{
|
||||
modelName: "text-embedding-ada-002",
|
||||
modelName: process.env.RAG_EMBEDDING_MODEL ?? "text-embedding-3-large",
|
||||
openAIApiKey: apiKey,
|
||||
},
|
||||
{ basePath: baseUrl },
|
||||
);
|
||||
await PineconeStore.fromDocuments(docs, embeddings, {
|
||||
pineconeIndex,
|
||||
maxConcurrency: 5,
|
||||
});
|
||||
const vectorStore = await PineconeStore.fromExistingIndex(embeddings, {
|
||||
pineconeIndex,
|
||||
});
|
||||
const results = await vectorStore.similaritySearch("pinecone", 1, {
|
||||
foo: "bar",
|
||||
});
|
||||
console.log(results);
|
||||
//https://js.langchain.com/docs/integrations/vectorstores/pinecone
|
||||
// process files
|
||||
for (let i = 0; i < reqBody.fileInfos.length; i++) {
|
||||
const fileInfo = reqBody.fileInfos[i];
|
||||
const contentType = mime.getType(fileInfo.fileName);
|
||||
// get file buffer
|
||||
var fileBuffer: Buffer | undefined;
|
||||
if (serverConfig.isStoreFileToLocal) {
|
||||
fileBuffer = await LocalFileStorage.get(fileInfo.fileName);
|
||||
} else {
|
||||
var file = await S3FileStorage.get(fileInfo.fileName);
|
||||
var fileByteArray = await file?.transformToByteArray();
|
||||
if (fileByteArray) fileBuffer = Buffer.from(fileByteArray);
|
||||
}
|
||||
if (!fileBuffer || !contentType) {
|
||||
console.error(`get ${fileInfo.fileName} buffer fail`);
|
||||
continue;
|
||||
}
|
||||
// load file to docs
|
||||
const fileBlob = bufferToBlob(fileBuffer, contentType);
|
||||
const loader = getLoader(fileInfo.fileName, fileBlob, apiKey, baseUrl);
|
||||
const docs = await loader.load();
|
||||
// modify doc meta
|
||||
docs.forEach((doc) => {
|
||||
doc.metadata = {
|
||||
...doc.metadata,
|
||||
sessionId: reqBody.sessionId,
|
||||
sourceFileName: fileInfo.originalFilename,
|
||||
fileName: fileInfo.fileName,
|
||||
};
|
||||
});
|
||||
// split
|
||||
const chunkSize = process.env.RAG_CHUNK_SIZE
|
||||
? parseInt(process.env.RAG_CHUNK_SIZE, 10)
|
||||
: 2000;
|
||||
const chunkOverlap = process.env.RAG_CHUNK_OVERLAP
|
||||
? parseInt(process.env.RAG_CHUNK_OVERLAP, 10)
|
||||
: 200;
|
||||
const textSplitter = new RecursiveCharacterTextSplitter({
|
||||
chunkSize: chunkSize,
|
||||
chunkOverlap: chunkOverlap,
|
||||
});
|
||||
const splits = await textSplitter.splitDocuments(docs);
|
||||
// remove history
|
||||
await PineconeStore.fromDocuments(splits, embeddings, {
|
||||
pineconeIndex,
|
||||
maxConcurrency: 5,
|
||||
});
|
||||
}
|
||||
return NextResponse.json(
|
||||
{
|
||||
storeId: "",
|
||||
sessionId: reqBody.sessionId,
|
||||
},
|
||||
{
|
||||
status: 200,
|
||||
@ -140,7 +172,6 @@ function getOpenAIApiKey(token: string) {
|
||||
}
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
function getOpenAIBaseUrl(reqBaseUrl: string | undefined) {
|
||||
const serverConfig = getServerSideConfig();
|
||||
let baseUrl = "https://api.openai.com/v1";
|
||||
@ -153,7 +184,25 @@ function getOpenAIBaseUrl(reqBaseUrl: string | undefined) {
|
||||
return baseUrl;
|
||||
}
|
||||
|
||||
export const GET = handle;
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "nodejs";
|
||||
export const preferredRegion = [
|
||||
"arn1",
|
||||
"bom1",
|
||||
"cdg1",
|
||||
"cle1",
|
||||
"cpt1",
|
||||
"dub1",
|
||||
"fra1",
|
||||
"gru1",
|
||||
"hnd1",
|
||||
"iad1",
|
||||
"icn1",
|
||||
"kix1",
|
||||
"lhr1",
|
||||
"pdx1",
|
||||
"sfo1",
|
||||
"sin1",
|
||||
"syd1",
|
||||
];
|
||||
|
@ -44,6 +44,7 @@ export interface RequestMessage {
|
||||
}
|
||||
|
||||
export interface RequestBody {
|
||||
chatSessionId: string;
|
||||
messages: RequestMessage[];
|
||||
isAzure: boolean;
|
||||
azureApiVersion?: string;
|
||||
|
@ -44,6 +44,13 @@ async function handle(req: NextRequest) {
|
||||
},
|
||||
{ basePath: baseUrl },
|
||||
);
|
||||
const ragEmbeddings = new OpenAIEmbeddings(
|
||||
{
|
||||
modelName: process.env.RAG_EMBEDDING_MODEL ?? "text-embedding-3-large",
|
||||
openAIApiKey: apiKey,
|
||||
},
|
||||
{ basePath: baseUrl },
|
||||
);
|
||||
|
||||
var dalleCallback = async (data: string) => {
|
||||
var response = new ResponseBody();
|
||||
@ -62,6 +69,8 @@ async function handle(req: NextRequest) {
|
||||
baseUrl,
|
||||
model,
|
||||
embeddings,
|
||||
reqBody.chatSessionId,
|
||||
ragEmbeddings,
|
||||
dalleCallback,
|
||||
);
|
||||
var nodejsTools = await nodejsTool.getCustomTools();
|
||||
|
@ -7,7 +7,7 @@ import {
|
||||
} from "../constant";
|
||||
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
|
||||
import { ChatGPTApi } from "./platforms/openai";
|
||||
import { FileApi } from "./platforms/utils";
|
||||
import { FileApi, FileInfo } from "./platforms/utils";
|
||||
import { GeminiProApi } from "./platforms/google";
|
||||
export const ROLES = ["system", "user", "assistant"] as const;
|
||||
export type MessageRole = (typeof ROLES)[number];
|
||||
@ -27,6 +27,7 @@ export interface MultimodalContent {
|
||||
export interface RequestMessage {
|
||||
role: MessageRole;
|
||||
content: string | MultimodalContent[];
|
||||
fileInfos?: FileInfo[];
|
||||
}
|
||||
|
||||
export interface LLMConfig {
|
||||
@ -74,6 +75,7 @@ export interface ChatOptions {
|
||||
}
|
||||
|
||||
export interface AgentChatOptions {
|
||||
chatSessionId?: string;
|
||||
messages: RequestMessage[];
|
||||
config: LLMConfig;
|
||||
agentConfig: LLMAgentConfig;
|
||||
@ -84,6 +86,13 @@ export interface AgentChatOptions {
|
||||
onController?: (controller: AbortController) => void;
|
||||
}
|
||||
|
||||
export interface CreateRAGStoreOptions {
|
||||
chatSessionId: string;
|
||||
fileInfos: FileInfo[];
|
||||
onError?: (err: Error) => void;
|
||||
onController?: (controller: AbortController) => void;
|
||||
}
|
||||
|
||||
export interface LLMUsage {
|
||||
used: number;
|
||||
total: number;
|
||||
@ -106,6 +115,7 @@ export abstract class LLMApi {
|
||||
abstract speech(options: SpeechOptions): Promise<ArrayBuffer>;
|
||||
abstract transcription(options: TranscriptionOptions): Promise<string>;
|
||||
abstract toolAgentChat(options: AgentChatOptions): Promise<void>;
|
||||
abstract createRAGSore(options: CreateRAGStoreOptions): Promise<void>;
|
||||
abstract usage(): Promise<LLMUsage>;
|
||||
abstract models(): Promise<LLMModel[]>;
|
||||
}
|
||||
@ -213,8 +223,8 @@ export function getHeaders(ignoreHeaders?: boolean) {
|
||||
const apiKey = isGoogle
|
||||
? accessStore.googleApiKey
|
||||
: isAzure
|
||||
? accessStore.azureApiKey
|
||||
: accessStore.openaiApiKey;
|
||||
? accessStore.azureApiKey
|
||||
: accessStore.openaiApiKey;
|
||||
|
||||
const makeBearer = (s: string) =>
|
||||
`${isGoogle || isAzure ? "" : "Bearer "}${s.trim()}`;
|
||||
|
@ -2,6 +2,7 @@ import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import {
|
||||
AgentChatOptions,
|
||||
ChatOptions,
|
||||
CreateRAGStoreOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
@ -19,6 +20,9 @@ import {
|
||||
} from "@/app/utils";
|
||||
|
||||
export class GeminiProApi implements LLMApi {
|
||||
createRAGSore(options: CreateRAGStoreOptions): Promise<void> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
transcription(options: TranscriptionOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
@ -12,6 +12,7 @@ import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import {
|
||||
AgentChatOptions,
|
||||
ChatOptions,
|
||||
CreateRAGStoreOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
@ -362,6 +363,34 @@ export class ChatGPTApi implements LLMApi {
|
||||
}
|
||||
}
|
||||
|
||||
async createRAGSore(options: CreateRAGStoreOptions): Promise<void> {
|
||||
try {
|
||||
const accessStore = useAccessStore.getState();
|
||||
const isAzure = accessStore.provider === ServiceProvider.Azure;
|
||||
let baseUrl = isAzure ? accessStore.azureUrl : accessStore.openaiUrl;
|
||||
const requestPayload = {
|
||||
sessionId: options.chatSessionId,
|
||||
fileInfos: options.fileInfos,
|
||||
baseUrl: baseUrl,
|
||||
};
|
||||
console.log("[Request] openai payload: ", requestPayload);
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
let path = "/api/langchain/rag/store";
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
const res = await fetch(path, chatPayload);
|
||||
if (res.status !== 200) throw new Error(await res.text());
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat reqeust", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
|
||||
async toolAgentChat(options: AgentChatOptions) {
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
@ -379,6 +408,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
const isAzure = accessStore.provider === ServiceProvider.Azure;
|
||||
let baseUrl = isAzure ? accessStore.azureUrl : accessStore.openaiUrl;
|
||||
const requestPayload = {
|
||||
chatSessionId: options.chatSessionId,
|
||||
messages,
|
||||
isAzure,
|
||||
azureApiVersion: accessStore.azureApiVersion,
|
||||
|
@ -18,21 +18,25 @@
|
||||
background-position: center;
|
||||
background-color: var(--second);
|
||||
display: flex;
|
||||
position: relative;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
|
||||
span {
|
||||
.attach-file-info {
|
||||
top: 5px;
|
||||
width: 100%;
|
||||
position: absolute;
|
||||
font-size: 12px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
display: -webkit-box;
|
||||
font-weight: bolder;
|
||||
text-align: center;
|
||||
word-wrap: break-word;
|
||||
word-break: break-all;
|
||||
-webkit-line-clamp: 3;
|
||||
-webkit-box-orient: vertical;
|
||||
line-height: 1.5;
|
||||
top: 8px;
|
||||
bottom: 8px;
|
||||
left: 5px;
|
||||
right: 10px;
|
||||
pointer-events: none;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
display: -webkit-box;
|
||||
}
|
||||
|
||||
.attach-file-mask {
|
||||
@ -516,6 +520,17 @@
|
||||
transition: all ease 0.3s;
|
||||
}
|
||||
|
||||
.chat-message-item-files {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(var(--file-count), auto);
|
||||
grid-gap: 5px;
|
||||
}
|
||||
|
||||
.chat-message-item-file {
|
||||
text-decoration: none;
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
.chat-message-item-image {
|
||||
width: 100%;
|
||||
margin-top: 10px;
|
||||
|
@ -876,7 +876,7 @@ function _Chat() {
|
||||
}
|
||||
setIsLoading(true);
|
||||
chatStore
|
||||
.onUserInput(userInput, attachImages)
|
||||
.onUserInput(userInput, attachImages, attachFiles)
|
||||
.then(() => setIsLoading(false));
|
||||
setAttachImages([]);
|
||||
setAttachFiles([]);
|
||||
@ -1106,34 +1106,36 @@ function _Chat() {
|
||||
|
||||
// preview messages
|
||||
const renderMessages = useMemo(() => {
|
||||
return context
|
||||
.concat(session.messages as RenderMessage[])
|
||||
.concat(
|
||||
isLoading
|
||||
? [
|
||||
{
|
||||
...createMessage({
|
||||
role: "assistant",
|
||||
content: "……",
|
||||
}),
|
||||
preview: true,
|
||||
},
|
||||
]
|
||||
: [],
|
||||
)
|
||||
.concat(
|
||||
userInput.length > 0 && config.sendPreviewBubble
|
||||
? [
|
||||
{
|
||||
...createMessage({
|
||||
role: "user",
|
||||
content: userInput,
|
||||
}),
|
||||
preview: true,
|
||||
},
|
||||
]
|
||||
: [],
|
||||
);
|
||||
return (
|
||||
context
|
||||
.concat(session.messages as RenderMessage[])
|
||||
// .concat(
|
||||
// isLoading
|
||||
// ? [
|
||||
// {
|
||||
// ...createMessage({
|
||||
// role: "assistant",
|
||||
// content: "……",
|
||||
// }),
|
||||
// preview: true,
|
||||
// },
|
||||
// ]
|
||||
// : [],
|
||||
// )
|
||||
.concat(
|
||||
userInput.length > 0 && config.sendPreviewBubble
|
||||
? [
|
||||
{
|
||||
...createMessage({
|
||||
role: "user",
|
||||
content: userInput,
|
||||
}),
|
||||
preview: true,
|
||||
},
|
||||
]
|
||||
: [],
|
||||
)
|
||||
);
|
||||
}, [
|
||||
config.sendPreviewBubble,
|
||||
context,
|
||||
@ -1658,6 +1660,29 @@ function _Chat() {
|
||||
parentRef={scrollRef}
|
||||
defaultShow={i >= messages.length - 6}
|
||||
/>
|
||||
{message.fileInfos && message.fileInfos.length > 0 && (
|
||||
<nav
|
||||
className={styles["chat-message-item-files"]}
|
||||
style={
|
||||
{
|
||||
"--file-count": message.fileInfos.length,
|
||||
} as React.CSSProperties
|
||||
}
|
||||
>
|
||||
{message.fileInfos.map((fileInfo, index) => {
|
||||
return (
|
||||
<a
|
||||
key={index}
|
||||
href={fileInfo.filePath}
|
||||
className={styles["chat-message-item-file"]}
|
||||
target="_blank"
|
||||
>
|
||||
{fileInfo.originalFilename}
|
||||
</a>
|
||||
);
|
||||
})}
|
||||
</nav>
|
||||
)}
|
||||
{getMessageImages(message).length == 1 && (
|
||||
<img
|
||||
className={styles["chat-message-item-image"]}
|
||||
@ -1784,7 +1809,9 @@ function _Chat() {
|
||||
className={styles["attach-file"]}
|
||||
title={file.originalFilename}
|
||||
>
|
||||
<span>{file.originalFilename}</span>
|
||||
<div className={styles["attach-file-info"]}>
|
||||
{file.originalFilename}
|
||||
</div>
|
||||
<div className={styles["attach-file-mask"]}>
|
||||
<DeleteFileButton
|
||||
deleteFile={() => {
|
||||
|
@ -70,6 +70,7 @@ const en: LocaleType = {
|
||||
EnablePlugins: "Enable Plugins",
|
||||
DisablePlugins: "Disable Plugins",
|
||||
UploadImage: "Upload Images",
|
||||
UploadFle: "Upload Files",
|
||||
},
|
||||
Rename: "Rename Chat",
|
||||
Typing: "Typing…",
|
||||
|
@ -26,6 +26,7 @@ export interface ChatToolMessage {
|
||||
toolInput?: string;
|
||||
}
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { FileInfo } from "../client/platforms/utils";
|
||||
|
||||
export type ChatMessage = RequestMessage & {
|
||||
date: string;
|
||||
@ -304,7 +305,11 @@ export const useChatStore = createPersistStore(
|
||||
get().summarizeSession();
|
||||
},
|
||||
|
||||
async onUserInput(content: string, attachImages?: string[]) {
|
||||
async onUserInput(
|
||||
content: string,
|
||||
attachImages?: string[],
|
||||
attachFiles?: FileInfo[],
|
||||
) {
|
||||
const session = get().currentSession();
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
|
||||
@ -335,6 +340,7 @@ export const useChatStore = createPersistStore(
|
||||
let userMessage: ChatMessage = createMessage({
|
||||
role: "user",
|
||||
content: mContent,
|
||||
fileInfos: attachFiles,
|
||||
});
|
||||
const botMessage: ChatMessage = createMessage({
|
||||
role: "assistant",
|
||||
@ -359,7 +365,6 @@ export const useChatStore = createPersistStore(
|
||||
m.lang === (getLang() == "cn" ? getLang() : "en")) &&
|
||||
m.enable,
|
||||
);
|
||||
|
||||
// save user's and bot's message
|
||||
get().updateCurrentSession((session) => {
|
||||
const savedUserMessage = {
|
||||
@ -374,13 +379,23 @@ export const useChatStore = createPersistStore(
|
||||
if (
|
||||
config.pluginConfig.enable &&
|
||||
session.mask.usePlugins &&
|
||||
allPlugins.length > 0 &&
|
||||
(allPlugins.length > 0 || !!process.env.NEXT_PUBLIC_ENABLE_RAG) &&
|
||||
modelConfig.model.startsWith("gpt") &&
|
||||
modelConfig.model != "gpt-4-vision-preview"
|
||||
) {
|
||||
console.log("[ToolAgent] start");
|
||||
const pluginToolNames = allPlugins.map((m) => m.toolName);
|
||||
if (!!process.env.NEXT_PUBLIC_ENABLE_RAG)
|
||||
pluginToolNames.push("rag-search");
|
||||
if (attachFiles && attachFiles.length > 0) {
|
||||
console.log("crete rag store");
|
||||
await api.llm.createRAGSore({
|
||||
chatSessionId: session.id,
|
||||
fileInfos: attachFiles,
|
||||
});
|
||||
}
|
||||
api.llm.toolAgentChat({
|
||||
chatSessionId: session.id,
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
agentConfig: { ...pluginConfig, useTools: pluginToolNames },
|
||||
|
10
package.json
10
package.json
@ -21,6 +21,7 @@
|
||||
"@aws-sdk/s3-request-presigner": "^3.414.0",
|
||||
"@fortaine/fetch-event-source": "^3.0.6",
|
||||
"@hello-pangea/dnd": "^16.5.0",
|
||||
"@langchain/cohere": "^0.0.6",
|
||||
"@langchain/community": "0.0.30",
|
||||
"@langchain/openai": "0.0.14",
|
||||
"@langchain/pinecone": "^0.0.4",
|
||||
@ -31,20 +32,24 @@
|
||||
"@vercel/speed-insights": "^1.0.2",
|
||||
"axios": "^0.26.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"d3-dsv": "2",
|
||||
"duck-duck-scrape": "^2.2.4",
|
||||
"emoji-picker-react": "^4.9.2",
|
||||
"encoding": "^0.1.13",
|
||||
"epub2": "^3.0.2",
|
||||
"fuse.js": "^7.0.0",
|
||||
"html-entities": "^2.4.0",
|
||||
"html-to-image": "^1.11.11",
|
||||
"html-to-text": "^9.0.5",
|
||||
"https-proxy-agent": "^7.0.2",
|
||||
"langchain": "0.1.20",
|
||||
"langchain": "0.1.30",
|
||||
"mammoth": "^1.7.1",
|
||||
"mermaid": "^10.6.1",
|
||||
"mime": "^4.0.1",
|
||||
"nanoid": "^5.0.3",
|
||||
"next": "^13.4.9",
|
||||
"node-fetch": "^3.3.1",
|
||||
"officeparser": "^4.0.8",
|
||||
"openai": "^4.28.4",
|
||||
"pdf-parse": "^1.1.1",
|
||||
"react": "^18.2.0",
|
||||
@ -58,6 +63,7 @@
|
||||
"remark-math": "^5.1.1",
|
||||
"sass": "^1.59.2",
|
||||
"spark-md5": "^3.0.2",
|
||||
"srt-parser-2": "^1.2.3",
|
||||
"use-debounce": "^9.0.4",
|
||||
"zustand": "^4.3.8"
|
||||
},
|
||||
@ -82,7 +88,7 @@
|
||||
},
|
||||
"resolutions": {
|
||||
"lint-staged/yaml": "^2.2.2",
|
||||
"@langchain/core": "0.1.30",
|
||||
"@langchain/core": "0.1.53",
|
||||
"openai": "4.28.4"
|
||||
},
|
||||
"packageManager": "yarn@1.22.19"
|
||||
|
Loading…
Reference in New Issue
Block a user