mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-25 15:10:17 +09:00
Merge remote-tracking branch 'upstream/main'
This commit is contained in:
commit
668ee60e53
@ -78,3 +78,17 @@ BILIBILI_COOKIES=
|
|||||||
# Default: Empty
|
# Default: Empty
|
||||||
# Address of the metaprocess server for advanced video processing features (currently music recognition). Leaving this empty will disable them.
|
# Address of the metaprocess server for advanced video processing features (currently music recognition). Leaving this empty will disable them.
|
||||||
BILIVID_METAPROCESS_SERVER_ADDRESS=
|
BILIVID_METAPROCESS_SERVER_ADDRESS=
|
||||||
|
|
||||||
|
# anthropic claude Api Key.(optional)
|
||||||
|
ANTHROPIC_API_KEY=
|
||||||
|
|
||||||
|
### anthropic claude Api version. (optional)
|
||||||
|
ANTHROPIC_API_VERSION=
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### anthropic claude Api url (optional)
|
||||||
|
ANTHROPIC_URL=
|
||||||
|
|
||||||
|
### (optional)
|
||||||
|
WHITE_WEBDEV_ENDPOINTS=
|
||||||
|
23
README.md
23
README.md
@ -265,6 +265,29 @@ Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.micro
|
|||||||
|
|
||||||
如果你不想让用户使用历史摘要功能,将此环境变量设置为 1 即可。
|
如果你不想让用户使用历史摘要功能,将此环境变量设置为 1 即可。
|
||||||
|
|
||||||
|
### `ANTHROPIC_API_KEY` (optional)
|
||||||
|
|
||||||
|
anthropic claude Api Key.
|
||||||
|
|
||||||
|
### `ANTHROPIC_API_VERSION` (optional)
|
||||||
|
|
||||||
|
anthropic claude Api version.
|
||||||
|
|
||||||
|
### `ANTHROPIC_URL` (optional)
|
||||||
|
|
||||||
|
anthropic claude Api Url.
|
||||||
|
|
||||||
|
### `DISABLE_FAST_LINK` (可选)
|
||||||
|
|
||||||
|
如果你想禁用从链接解析预制设置,将此环境变量设置为 1 即可。
|
||||||
|
|
||||||
|
### `WHITE_WEBDEV_ENDPOINTS` (可选)
|
||||||
|
|
||||||
|
如果你想增加允许访问的webdav服务地址,可以使用该选项,格式要求:
|
||||||
|
- 每一个地址必须是一个完整的 endpoint
|
||||||
|
> `https://xxxx/xxx`
|
||||||
|
- 多个地址以`,`相连
|
||||||
|
|
||||||
## 部署
|
## 部署
|
||||||
|
|
||||||
### 容器部署 (推荐)
|
### 容器部署 (推荐)
|
||||||
|
19
README_CN.md
19
README_CN.md
@ -114,6 +114,18 @@ Google Gemini Pro 密钥.
|
|||||||
|
|
||||||
Google Gemini Pro Api Url.
|
Google Gemini Pro Api Url.
|
||||||
|
|
||||||
|
### `ANTHROPIC_API_KEY` (optional)
|
||||||
|
|
||||||
|
anthropic claude Api Key.
|
||||||
|
|
||||||
|
### `ANTHROPIC_API_VERSION` (optional)
|
||||||
|
|
||||||
|
anthropic claude Api version.
|
||||||
|
|
||||||
|
### `ANTHROPIC_URL` (optional)
|
||||||
|
|
||||||
|
anthropic claude Api Url.
|
||||||
|
|
||||||
### `HIDE_USER_API_KEY` (可选)
|
### `HIDE_USER_API_KEY` (可选)
|
||||||
|
|
||||||
如果你不想让用户自行填入 API Key,将此环境变量设置为 1 即可。
|
如果你不想让用户自行填入 API Key,将此环境变量设置为 1 即可。
|
||||||
@ -130,6 +142,13 @@ Google Gemini Pro Api Url.
|
|||||||
|
|
||||||
如果你想禁用从链接解析预制设置,将此环境变量设置为 1 即可。
|
如果你想禁用从链接解析预制设置,将此环境变量设置为 1 即可。
|
||||||
|
|
||||||
|
### `WHITE_WEBDEV_ENDPOINTS` (可选)
|
||||||
|
|
||||||
|
如果你想增加允许访问的webdav服务地址,可以使用该选项,格式要求:
|
||||||
|
- 每一个地址必须是一个完整的 endpoint
|
||||||
|
> `https://xxxx/xxx`
|
||||||
|
- 多个地址以`,`相连
|
||||||
|
|
||||||
### `CUSTOM_MODELS` (可选)
|
### `CUSTOM_MODELS` (可选)
|
||||||
|
|
||||||
> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo,gpt-4-1106-preview=gpt-4-turbo` 表示增加 `qwen-7b-chat` 和 `glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`,并将 `gpt-4-1106-preview` 模型名字展示为 `gpt-4-turbo`。
|
> 示例:`+qwen-7b-chat,+glm-6b,-gpt-3.5-turbo,gpt-4-1106-preview=gpt-4-turbo` 表示增加 `qwen-7b-chat` 和 `glm-6b` 到模型列表,而从列表中删除 `gpt-3.5-turbo`,并将 `gpt-4-1106-preview` 模型名字展示为 `gpt-4-turbo`。
|
||||||
|
189
app/api/anthropic/[...path]/route.ts
Normal file
189
app/api/anthropic/[...path]/route.ts
Normal file
@ -0,0 +1,189 @@
|
|||||||
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
|
import {
|
||||||
|
ANTHROPIC_BASE_URL,
|
||||||
|
Anthropic,
|
||||||
|
ApiPath,
|
||||||
|
DEFAULT_MODELS,
|
||||||
|
ModelProvider,
|
||||||
|
} from "@/app/constant";
|
||||||
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { auth } from "../../auth";
|
||||||
|
import { collectModelTable } from "@/app/utils/model";
|
||||||
|
|
||||||
|
const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
|
||||||
|
|
||||||
|
async function handle(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: { path: string[] } },
|
||||||
|
) {
|
||||||
|
console.log("[Anthropic Route] params ", params);
|
||||||
|
|
||||||
|
if (req.method === "OPTIONS") {
|
||||||
|
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const subpath = params.path.join("/");
|
||||||
|
|
||||||
|
if (!ALLOWD_PATH.has(subpath)) {
|
||||||
|
console.log("[Anthropic Route] forbidden path ", subpath);
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: true,
|
||||||
|
msg: "you are not allowed to request " + subpath,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 403,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const authResult = auth(req, ModelProvider.Claude);
|
||||||
|
if (authResult.error) {
|
||||||
|
return NextResponse.json(authResult, {
|
||||||
|
status: 401,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await request(req);
|
||||||
|
return response;
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[Anthropic] ", e);
|
||||||
|
return NextResponse.json(prettyObject(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET = handle;
|
||||||
|
export const POST = handle;
|
||||||
|
|
||||||
|
export const runtime = "edge";
|
||||||
|
export const preferredRegion = [
|
||||||
|
"arn1",
|
||||||
|
"bom1",
|
||||||
|
"cdg1",
|
||||||
|
"cle1",
|
||||||
|
"cpt1",
|
||||||
|
"dub1",
|
||||||
|
"fra1",
|
||||||
|
"gru1",
|
||||||
|
"hnd1",
|
||||||
|
"iad1",
|
||||||
|
"icn1",
|
||||||
|
"kix1",
|
||||||
|
"lhr1",
|
||||||
|
"pdx1",
|
||||||
|
"sfo1",
|
||||||
|
"sin1",
|
||||||
|
"syd1",
|
||||||
|
];
|
||||||
|
|
||||||
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
|
async function request(req: NextRequest) {
|
||||||
|
const controller = new AbortController();
|
||||||
|
|
||||||
|
let authHeaderName = "x-api-key";
|
||||||
|
let authValue =
|
||||||
|
req.headers.get(authHeaderName) ||
|
||||||
|
req.headers.get("Authorization")?.replaceAll("Bearer ", "").trim() ||
|
||||||
|
serverConfig.anthropicApiKey ||
|
||||||
|
"";
|
||||||
|
|
||||||
|
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Anthropic, "");
|
||||||
|
|
||||||
|
let baseUrl =
|
||||||
|
serverConfig.anthropicUrl || serverConfig.baseUrl || ANTHROPIC_BASE_URL;
|
||||||
|
|
||||||
|
if (!baseUrl.startsWith("http")) {
|
||||||
|
baseUrl = `https://${baseUrl}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (baseUrl.endsWith("/")) {
|
||||||
|
baseUrl = baseUrl.slice(0, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("[Proxy] ", path);
|
||||||
|
console.log("[Base Url]", baseUrl);
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(
|
||||||
|
() => {
|
||||||
|
controller.abort();
|
||||||
|
},
|
||||||
|
10 * 60 * 1000,
|
||||||
|
);
|
||||||
|
|
||||||
|
const fetchUrl = `${baseUrl}${path}`;
|
||||||
|
|
||||||
|
const fetchOptions: RequestInit = {
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Cache-Control": "no-store",
|
||||||
|
[authHeaderName]: authValue,
|
||||||
|
"anthropic-version":
|
||||||
|
req.headers.get("anthropic-version") ||
|
||||||
|
serverConfig.anthropicApiVersion ||
|
||||||
|
Anthropic.Vision,
|
||||||
|
},
|
||||||
|
method: req.method,
|
||||||
|
body: req.body,
|
||||||
|
redirect: "manual",
|
||||||
|
// @ts-ignore
|
||||||
|
duplex: "half",
|
||||||
|
signal: controller.signal,
|
||||||
|
};
|
||||||
|
|
||||||
|
// #1815 try to refuse some request to some models
|
||||||
|
if (serverConfig.customModels && req.body) {
|
||||||
|
try {
|
||||||
|
const modelTable = collectModelTable(
|
||||||
|
DEFAULT_MODELS,
|
||||||
|
serverConfig.customModels,
|
||||||
|
);
|
||||||
|
const clonedBody = await req.text();
|
||||||
|
fetchOptions.body = clonedBody;
|
||||||
|
|
||||||
|
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||||
|
|
||||||
|
// not undefined and is false
|
||||||
|
if (modelTable[jsonBody?.model ?? ""].available === false) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: true,
|
||||||
|
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
status: 403,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`[Anthropic] filter`, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.log("[Anthropic request]", fetchOptions.headers, req.method);
|
||||||
|
try {
|
||||||
|
const res = await fetch(fetchUrl, fetchOptions);
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
"[Anthropic response]",
|
||||||
|
res.status,
|
||||||
|
" ",
|
||||||
|
res.headers,
|
||||||
|
res.url,
|
||||||
|
);
|
||||||
|
// to prevent browser prompt for credentials
|
||||||
|
const newHeaders = new Headers(res.headers);
|
||||||
|
newHeaders.delete("www-authenticate");
|
||||||
|
// to disable nginx buffering
|
||||||
|
newHeaders.set("X-Accel-Buffering", "no");
|
||||||
|
|
||||||
|
return new Response(res.body, {
|
||||||
|
status: res.status,
|
||||||
|
statusText: res.statusText,
|
||||||
|
headers: newHeaders,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
}
|
||||||
|
}
|
@ -62,12 +62,31 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
|
|||||||
if (!apiKey) {
|
if (!apiKey) {
|
||||||
const serverConfig = getServerSideConfig();
|
const serverConfig = getServerSideConfig();
|
||||||
|
|
||||||
const systemApiKey =
|
// const systemApiKey =
|
||||||
modelProvider === ModelProvider.GeminiPro
|
// modelProvider === ModelProvider.GeminiPro
|
||||||
? serverConfig.googleApiKey
|
// ? serverConfig.googleApiKey
|
||||||
: serverConfig.isAzure
|
// : serverConfig.isAzure
|
||||||
? serverConfig.azureApiKey
|
// ? serverConfig.azureApiKey
|
||||||
: serverConfig.apiKey;
|
// : serverConfig.apiKey;
|
||||||
|
|
||||||
|
let systemApiKey: string | undefined;
|
||||||
|
|
||||||
|
switch (modelProvider) {
|
||||||
|
case ModelProvider.GeminiPro:
|
||||||
|
systemApiKey = serverConfig.googleApiKey;
|
||||||
|
break;
|
||||||
|
case ModelProvider.Claude:
|
||||||
|
systemApiKey = serverConfig.anthropicApiKey;
|
||||||
|
break;
|
||||||
|
case ModelProvider.GPT:
|
||||||
|
default:
|
||||||
|
if (serverConfig.isAzure) {
|
||||||
|
systemApiKey = serverConfig.azureApiKey;
|
||||||
|
} else {
|
||||||
|
systemApiKey = serverConfig.apiKey;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (systemApiKey) {
|
if (systemApiKey) {
|
||||||
console.log("[Auth] use system api key");
|
console.log("[Auth] use system api key");
|
||||||
req.headers.set("Authorization", `Bearer ${systemApiKey}`);
|
req.headers.set("Authorization", `Bearer ${systemApiKey}`);
|
||||||
|
@ -1,5 +1,14 @@
|
|||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
import { STORAGE_KEY } from "../../../constant";
|
import { STORAGE_KEY, internalWhiteWebDavEndpoints } from "../../../constant";
|
||||||
|
import { getServerSideConfig } from "@/app/config/server";
|
||||||
|
|
||||||
|
const config = getServerSideConfig();
|
||||||
|
|
||||||
|
const mergedWhiteWebDavEndpoints = [
|
||||||
|
...internalWhiteWebDavEndpoints,
|
||||||
|
...config.whiteWebDevEndpoints,
|
||||||
|
].filter((domain) => Boolean(domain.trim()));
|
||||||
|
|
||||||
async function handle(
|
async function handle(
|
||||||
req: NextRequest,
|
req: NextRequest,
|
||||||
{ params }: { params: { path: string[] } },
|
{ params }: { params: { path: string[] } },
|
||||||
@ -14,7 +23,9 @@ async function handle(
|
|||||||
let endpoint = requestUrl.searchParams.get("endpoint");
|
let endpoint = requestUrl.searchParams.get("endpoint");
|
||||||
|
|
||||||
// Validate the endpoint to prevent potential SSRF attacks
|
// Validate the endpoint to prevent potential SSRF attacks
|
||||||
if (!endpoint || !endpoint.startsWith("/")) {
|
if (
|
||||||
|
!mergedWhiteWebDavEndpoints.some((white) => endpoint?.startsWith(white))
|
||||||
|
) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
error: true,
|
error: true,
|
||||||
@ -25,6 +36,11 @@ async function handle(
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!endpoint?.endsWith("/")) {
|
||||||
|
endpoint += "/";
|
||||||
|
}
|
||||||
|
|
||||||
const endpointPath = params.path.join("/");
|
const endpointPath = params.path.join("/");
|
||||||
const targetPath = `${endpoint}/${endpointPath}`;
|
const targetPath = `${endpoint}/${endpointPath}`;
|
||||||
|
|
||||||
@ -108,7 +124,7 @@ async function handle(
|
|||||||
return fetchResult;
|
return fetchResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const POST = handle;
|
export const PUT = handle;
|
||||||
export const GET = handle;
|
export const GET = handle;
|
||||||
export const OPTIONS = handle;
|
export const OPTIONS = handle;
|
||||||
|
|
||||||
|
@ -9,6 +9,7 @@ import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
|
|||||||
import { ChatGPTApi } from "./platforms/openai";
|
import { ChatGPTApi } from "./platforms/openai";
|
||||||
import { FileApi, FileInfo } from "./platforms/utils";
|
import { FileApi, FileInfo } from "./platforms/utils";
|
||||||
import { GeminiProApi } from "./platforms/google";
|
import { GeminiProApi } from "./platforms/google";
|
||||||
|
import { ClaudeApi } from "./platforms/anthropic";
|
||||||
export const ROLES = ["system", "user", "assistant"] as const;
|
export const ROLES = ["system", "user", "assistant"] as const;
|
||||||
export type MessageRole = (typeof ROLES)[number];
|
export type MessageRole = (typeof ROLES)[number];
|
||||||
|
|
||||||
@ -152,9 +153,14 @@ export class ClientApi {
|
|||||||
public file: FileApi;
|
public file: FileApi;
|
||||||
|
|
||||||
constructor(provider: ModelProvider = ModelProvider.GPT) {
|
constructor(provider: ModelProvider = ModelProvider.GPT) {
|
||||||
if (provider === ModelProvider.GeminiPro) {
|
switch (provider) {
|
||||||
|
case ModelProvider.GeminiPro:
|
||||||
this.llm = new GeminiProApi();
|
this.llm = new GeminiProApi();
|
||||||
} else {
|
break;
|
||||||
|
case ModelProvider.Claude:
|
||||||
|
this.llm = new ClaudeApi();
|
||||||
|
break;
|
||||||
|
default:
|
||||||
this.llm = new ChatGPTApi();
|
this.llm = new ChatGPTApi();
|
||||||
}
|
}
|
||||||
this.file = new FileApi();
|
this.file = new FileApi();
|
||||||
|
424
app/client/platforms/anthropic.ts
Normal file
424
app/client/platforms/anthropic.ts
Normal file
@ -0,0 +1,424 @@
|
|||||||
|
import { ACCESS_CODE_PREFIX, Anthropic, ApiPath } from "@/app/constant";
|
||||||
|
import {
|
||||||
|
AgentChatOptions,
|
||||||
|
ChatOptions,
|
||||||
|
CreateRAGStoreOptions,
|
||||||
|
LLMApi,
|
||||||
|
MultimodalContent,
|
||||||
|
SpeechOptions,
|
||||||
|
TranscriptionOptions,
|
||||||
|
} from "../api";
|
||||||
|
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||||
|
import { getClientConfig } from "@/app/config/client";
|
||||||
|
import { DEFAULT_API_HOST } from "@/app/constant";
|
||||||
|
import { RequestMessage } from "@/app/typing";
|
||||||
|
import {
|
||||||
|
EventStreamContentType,
|
||||||
|
fetchEventSource,
|
||||||
|
} from "@fortaine/fetch-event-source";
|
||||||
|
|
||||||
|
import Locale from "../../locales";
|
||||||
|
import { prettyObject } from "@/app/utils/format";
|
||||||
|
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||||
|
|
||||||
|
export type MultiBlockContent = {
|
||||||
|
type: "image" | "text";
|
||||||
|
source?: {
|
||||||
|
type: string;
|
||||||
|
media_type: string;
|
||||||
|
data: string;
|
||||||
|
};
|
||||||
|
text?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type AnthropicMessage = {
|
||||||
|
role: (typeof ClaudeMapper)[keyof typeof ClaudeMapper];
|
||||||
|
content: string | MultiBlockContent[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface AnthropicChatRequest {
|
||||||
|
model: string; // The model that will complete your prompt.
|
||||||
|
messages: AnthropicMessage[]; // The prompt that you want Claude to complete.
|
||||||
|
max_tokens: number; // The maximum number of tokens to generate before stopping.
|
||||||
|
stop_sequences?: string[]; // Sequences that will cause the model to stop generating completion text.
|
||||||
|
temperature?: number; // Amount of randomness injected into the response.
|
||||||
|
top_p?: number; // Use nucleus sampling.
|
||||||
|
top_k?: number; // Only sample from the top K options for each subsequent token.
|
||||||
|
metadata?: object; // An object describing metadata about the request.
|
||||||
|
stream?: boolean; // Whether to incrementally stream the response using server-sent events.
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChatRequest {
|
||||||
|
model: string; // The model that will complete your prompt.
|
||||||
|
prompt: string; // The prompt that you want Claude to complete.
|
||||||
|
max_tokens_to_sample: number; // The maximum number of tokens to generate before stopping.
|
||||||
|
stop_sequences?: string[]; // Sequences that will cause the model to stop generating completion text.
|
||||||
|
temperature?: number; // Amount of randomness injected into the response.
|
||||||
|
top_p?: number; // Use nucleus sampling.
|
||||||
|
top_k?: number; // Only sample from the top K options for each subsequent token.
|
||||||
|
metadata?: object; // An object describing metadata about the request.
|
||||||
|
stream?: boolean; // Whether to incrementally stream the response using server-sent events.
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChatResponse {
|
||||||
|
completion: string;
|
||||||
|
stop_reason: "stop_sequence" | "max_tokens";
|
||||||
|
model: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ChatStreamResponse = ChatResponse & {
|
||||||
|
stop?: string;
|
||||||
|
log_id: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const ClaudeMapper = {
|
||||||
|
assistant: "assistant",
|
||||||
|
user: "user",
|
||||||
|
system: "user",
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
const keys = ["claude-2, claude-instant-1"];
|
||||||
|
|
||||||
|
export class ClaudeApi implements LLMApi {
|
||||||
|
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
transcription(options: TranscriptionOptions): Promise<string> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
createRAGStore(options: CreateRAGStoreOptions): Promise<void> {
|
||||||
|
throw new Error("Method not implemented.");
|
||||||
|
}
|
||||||
|
extractMessage(res: any) {
|
||||||
|
console.log("[Response] claude response: ", res);
|
||||||
|
|
||||||
|
return res?.content?.[0]?.text;
|
||||||
|
}
|
||||||
|
async chat(options: ChatOptions): Promise<void> {
|
||||||
|
const visionModel = isVisionModel(options.config.model);
|
||||||
|
|
||||||
|
const accessStore = useAccessStore.getState();
|
||||||
|
|
||||||
|
const shouldStream = !!options.config.stream;
|
||||||
|
|
||||||
|
const modelConfig = {
|
||||||
|
...useAppConfig.getState().modelConfig,
|
||||||
|
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||||
|
...{
|
||||||
|
model: options.config.model,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const messages = [...options.messages];
|
||||||
|
|
||||||
|
const keys = ["system", "user"];
|
||||||
|
|
||||||
|
// roles must alternate between "user" and "assistant" in claude, so add a fake assistant message between two user messages
|
||||||
|
for (let i = 0; i < messages.length - 1; i++) {
|
||||||
|
const message = messages[i];
|
||||||
|
const nextMessage = messages[i + 1];
|
||||||
|
|
||||||
|
if (keys.includes(message.role) && keys.includes(nextMessage.role)) {
|
||||||
|
messages[i] = [
|
||||||
|
message,
|
||||||
|
{
|
||||||
|
role: "assistant",
|
||||||
|
content: ";",
|
||||||
|
},
|
||||||
|
] as any;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const prompt = messages
|
||||||
|
.flat()
|
||||||
|
.filter((v) => {
|
||||||
|
if (!v.content) return false;
|
||||||
|
if (typeof v.content === "string" && !v.content.trim()) return false;
|
||||||
|
return true;
|
||||||
|
})
|
||||||
|
.map((v) => {
|
||||||
|
const { role, content } = v;
|
||||||
|
const insideRole = ClaudeMapper[role] ?? "user";
|
||||||
|
|
||||||
|
if (!visionModel || typeof content === "string") {
|
||||||
|
return {
|
||||||
|
role: insideRole,
|
||||||
|
content: getMessageTextContent(v),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
role: insideRole,
|
||||||
|
content: content
|
||||||
|
.filter((v) => v.image_url || v.text)
|
||||||
|
.map(({ type, text, image_url }) => {
|
||||||
|
if (type === "text") {
|
||||||
|
return {
|
||||||
|
type,
|
||||||
|
text: text!,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const { url = "" } = image_url || {};
|
||||||
|
const colonIndex = url.indexOf(":");
|
||||||
|
const semicolonIndex = url.indexOf(";");
|
||||||
|
const comma = url.indexOf(",");
|
||||||
|
|
||||||
|
const mimeType = url.slice(colonIndex + 1, semicolonIndex);
|
||||||
|
const encodeType = url.slice(semicolonIndex + 1, comma);
|
||||||
|
const data = url.slice(comma + 1);
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "image" as const,
|
||||||
|
source: {
|
||||||
|
type: encodeType,
|
||||||
|
media_type: mimeType,
|
||||||
|
data,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const requestBody: AnthropicChatRequest = {
|
||||||
|
messages: prompt,
|
||||||
|
stream: shouldStream,
|
||||||
|
|
||||||
|
model: modelConfig.model,
|
||||||
|
max_tokens: modelConfig.max_tokens,
|
||||||
|
temperature: modelConfig.temperature,
|
||||||
|
top_p: modelConfig.top_p,
|
||||||
|
// top_k: modelConfig.top_k,
|
||||||
|
top_k: 5,
|
||||||
|
};
|
||||||
|
|
||||||
|
const path = this.path(Anthropic.ChatPath);
|
||||||
|
|
||||||
|
const controller = new AbortController();
|
||||||
|
options.onController?.(controller);
|
||||||
|
|
||||||
|
const payload = {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(requestBody),
|
||||||
|
signal: controller.signal,
|
||||||
|
headers: {
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
Accept: "application/json",
|
||||||
|
"x-api-key": accessStore.anthropicApiKey,
|
||||||
|
"anthropic-version": accessStore.anthropicApiVersion,
|
||||||
|
Authorization: getAuthKey(accessStore.anthropicApiKey),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
if (shouldStream) {
|
||||||
|
try {
|
||||||
|
const context = {
|
||||||
|
text: "",
|
||||||
|
finished: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
const finish = () => {
|
||||||
|
if (!context.finished) {
|
||||||
|
options.onFinish(context.text);
|
||||||
|
context.finished = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
controller.signal.onabort = finish;
|
||||||
|
fetchEventSource(path, {
|
||||||
|
...payload,
|
||||||
|
async onopen(res) {
|
||||||
|
const contentType = res.headers.get("content-type");
|
||||||
|
console.log("response content type: ", contentType);
|
||||||
|
|
||||||
|
if (contentType?.startsWith("text/plain")) {
|
||||||
|
context.text = await res.clone().text();
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!res.ok ||
|
||||||
|
!res.headers
|
||||||
|
.get("content-type")
|
||||||
|
?.startsWith(EventStreamContentType) ||
|
||||||
|
res.status !== 200
|
||||||
|
) {
|
||||||
|
const responseTexts = [context.text];
|
||||||
|
let extraInfo = await res.clone().text();
|
||||||
|
try {
|
||||||
|
const resJson = await res.clone().json();
|
||||||
|
extraInfo = prettyObject(resJson);
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
if (res.status === 401) {
|
||||||
|
responseTexts.push(Locale.Error.Unauthorized);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (extraInfo) {
|
||||||
|
responseTexts.push(extraInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
context.text = responseTexts.join("\n\n");
|
||||||
|
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onmessage(msg) {
|
||||||
|
let chunkJson:
|
||||||
|
| undefined
|
||||||
|
| {
|
||||||
|
type: "content_block_delta" | "content_block_stop";
|
||||||
|
delta?: {
|
||||||
|
type: "text_delta";
|
||||||
|
text: string;
|
||||||
|
};
|
||||||
|
index: number;
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
chunkJson = JSON.parse(msg.data);
|
||||||
|
} catch (e) {
|
||||||
|
console.error("[Response] parse error", msg.data);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!chunkJson || chunkJson.type === "content_block_stop") {
|
||||||
|
return finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
const { delta } = chunkJson;
|
||||||
|
if (delta?.text) {
|
||||||
|
context.text += delta.text;
|
||||||
|
options.onUpdate?.(context.text, delta.text);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onclose() {
|
||||||
|
finish();
|
||||||
|
},
|
||||||
|
onerror(e) {
|
||||||
|
options.onError?.(e);
|
||||||
|
throw e;
|
||||||
|
},
|
||||||
|
openWhenHidden: true,
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
console.error("failed to chat", e);
|
||||||
|
options.onError?.(e as Error);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
controller.signal.onabort = () => options.onFinish("");
|
||||||
|
|
||||||
|
const res = await fetch(path, payload);
|
||||||
|
const resJson = await res.json();
|
||||||
|
|
||||||
|
const message = this.extractMessage(resJson);
|
||||||
|
options.onFinish(message);
|
||||||
|
} catch (e) {
|
||||||
|
console.error("failed to chat", e);
|
||||||
|
options.onError?.(e as Error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async usage() {
|
||||||
|
return {
|
||||||
|
used: 0,
|
||||||
|
total: 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
async models() {
|
||||||
|
// const provider = {
|
||||||
|
// id: "anthropic",
|
||||||
|
// providerName: "Anthropic",
|
||||||
|
// providerType: "anthropic",
|
||||||
|
// };
|
||||||
|
|
||||||
|
return [
|
||||||
|
// {
|
||||||
|
// name: "claude-instant-1.2",
|
||||||
|
// available: true,
|
||||||
|
// provider,
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// name: "claude-2.0",
|
||||||
|
// available: true,
|
||||||
|
// provider,
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// name: "claude-2.1",
|
||||||
|
// available: true,
|
||||||
|
// provider,
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// name: "claude-3-opus-20240229",
|
||||||
|
// available: true,
|
||||||
|
// provider,
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// name: "claude-3-sonnet-20240229",
|
||||||
|
// available: true,
|
||||||
|
// provider,
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// name: "claude-3-haiku-20240307",
|
||||||
|
// available: true,
|
||||||
|
// provider,
|
||||||
|
// },
|
||||||
|
];
|
||||||
|
}
|
||||||
|
path(path: string): string {
|
||||||
|
const accessStore = useAccessStore.getState();
|
||||||
|
|
||||||
|
let baseUrl: string = accessStore.anthropicUrl;
|
||||||
|
|
||||||
|
// if endpoint is empty, use default endpoint
|
||||||
|
if (baseUrl.trim().length === 0) {
|
||||||
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
|
|
||||||
|
baseUrl = isApp
|
||||||
|
? DEFAULT_API_HOST + "/api/proxy/anthropic"
|
||||||
|
: ApiPath.Anthropic;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
|
||||||
|
baseUrl = "https://" + baseUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
baseUrl = trimEnd(baseUrl, "/");
|
||||||
|
|
||||||
|
return `${baseUrl}/${path}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function trimEnd(s: string, end = " ") {
|
||||||
|
if (end.length === 0) return s;
|
||||||
|
|
||||||
|
while (s.endsWith(end)) {
|
||||||
|
s = s.slice(0, -end.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
function bearer(value: string) {
|
||||||
|
return `Bearer ${value.trim()}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getAuthKey(apiKey = "") {
|
||||||
|
const accessStore = useAccessStore.getState();
|
||||||
|
const isApp = !!getClientConfig()?.isApp;
|
||||||
|
let authKey = "";
|
||||||
|
|
||||||
|
if (apiKey) {
|
||||||
|
// use user's api key first
|
||||||
|
authKey = bearer(apiKey);
|
||||||
|
} else if (
|
||||||
|
accessStore.enabledAccessControl() &&
|
||||||
|
!isApp &&
|
||||||
|
!!accessStore.accessCode
|
||||||
|
) {
|
||||||
|
// or use access code
|
||||||
|
authKey = bearer(ACCESS_CODE_PREFIX + accessStore.accessCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
return authKey;
|
||||||
|
}
|
@ -44,6 +44,20 @@ export interface OpenAIListModelResponse {
|
|||||||
}>;
|
}>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface RequestPayload {
|
||||||
|
messages: {
|
||||||
|
role: "system" | "user" | "assistant";
|
||||||
|
content: string | MultimodalContent[];
|
||||||
|
}[];
|
||||||
|
stream?: boolean;
|
||||||
|
model: string;
|
||||||
|
temperature: number;
|
||||||
|
presence_penalty: number;
|
||||||
|
frequency_penalty: number;
|
||||||
|
top_p: number;
|
||||||
|
max_tokens?: number;
|
||||||
|
}
|
||||||
|
|
||||||
export class ChatGPTApi implements LLMApi {
|
export class ChatGPTApi implements LLMApi {
|
||||||
private disableListModels = true;
|
private disableListModels = true;
|
||||||
|
|
||||||
@ -181,7 +195,8 @@ export class ChatGPTApi implements LLMApi {
|
|||||||
model: options.config.model,
|
model: options.config.model,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const requestPayload = {
|
|
||||||
|
const requestPayload: RequestPayload = {
|
||||||
messages,
|
messages,
|
||||||
stream: options.config.stream,
|
stream: options.config.stream,
|
||||||
model: modelConfig.model,
|
model: modelConfig.model,
|
||||||
@ -189,21 +204,13 @@ export class ChatGPTApi implements LLMApi {
|
|||||||
presence_penalty: modelConfig.presence_penalty,
|
presence_penalty: modelConfig.presence_penalty,
|
||||||
frequency_penalty: modelConfig.frequency_penalty,
|
frequency_penalty: modelConfig.frequency_penalty,
|
||||||
top_p: modelConfig.top_p,
|
top_p: modelConfig.top_p,
|
||||||
max_tokens: modelConfig.model.includes("vision")
|
|
||||||
? modelConfig.max_tokens
|
|
||||||
: null,
|
|
||||||
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
||||||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||||
};
|
};
|
||||||
|
|
||||||
// add max_tokens to vision model
|
// add max_tokens to vision model
|
||||||
if (visionModel) {
|
if (visionModel) {
|
||||||
Object.defineProperty(requestPayload, "max_tokens", {
|
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
||||||
enumerable: true,
|
|
||||||
configurable: true,
|
|
||||||
writable: true,
|
|
||||||
value: modelConfig.max_tokens,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("[Request] openai payload: ", requestPayload);
|
console.log("[Request] openai payload: ", requestPayload);
|
||||||
|
@ -40,6 +40,7 @@ import { EXPORT_MESSAGE_CLASS_NAME, ModelProvider } from "../constant";
|
|||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import { ClientApi } from "../client/api";
|
import { ClientApi } from "../client/api";
|
||||||
import { getMessageTextContent } from "../utils";
|
import { getMessageTextContent } from "../utils";
|
||||||
|
import { identifyDefaultClaudeModel } from "../utils/checkers";
|
||||||
|
|
||||||
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
|
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
|
||||||
loading: () => <LoadingIcon />,
|
loading: () => <LoadingIcon />,
|
||||||
@ -315,6 +316,8 @@ export function PreviewActions(props: {
|
|||||||
var api: ClientApi;
|
var api: ClientApi;
|
||||||
if (config.modelConfig.model.startsWith("gemini")) {
|
if (config.modelConfig.model.startsWith("gemini")) {
|
||||||
api = new ClientApi(ModelProvider.GeminiPro);
|
api = new ClientApi(ModelProvider.GeminiPro);
|
||||||
|
} else if (identifyDefaultClaudeModel(config.modelConfig.model)) {
|
||||||
|
api = new ClientApi(ModelProvider.Claude);
|
||||||
} else {
|
} else {
|
||||||
api = new ClientApi(ModelProvider.GPT);
|
api = new ClientApi(ModelProvider.GPT);
|
||||||
}
|
}
|
||||||
|
@ -29,6 +29,7 @@ import { AuthPage } from "./auth";
|
|||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import { ClientApi } from "../client/api";
|
import { ClientApi } from "../client/api";
|
||||||
import { useAccessStore } from "../store";
|
import { useAccessStore } from "../store";
|
||||||
|
import { identifyDefaultClaudeModel } from "../utils/checkers";
|
||||||
|
|
||||||
export function Loading(props: { noLogo?: boolean }) {
|
export function Loading(props: { noLogo?: boolean }) {
|
||||||
return (
|
return (
|
||||||
@ -180,6 +181,8 @@ export function useLoadData() {
|
|||||||
var api: ClientApi;
|
var api: ClientApi;
|
||||||
if (config.modelConfig.model.startsWith("gemini")) {
|
if (config.modelConfig.model.startsWith("gemini")) {
|
||||||
api = new ClientApi(ModelProvider.GeminiPro);
|
api = new ClientApi(ModelProvider.GeminiPro);
|
||||||
|
} else if (identifyDefaultClaudeModel(config.modelConfig.model)) {
|
||||||
|
api = new ClientApi(ModelProvider.Claude);
|
||||||
} else {
|
} else {
|
||||||
api = new ClientApi(ModelProvider.GPT);
|
api = new ClientApi(ModelProvider.GPT);
|
||||||
}
|
}
|
||||||
|
@ -135,10 +135,9 @@ function escapeBrackets(text: string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function _MarkDownContent(props: { content: string }) {
|
function _MarkDownContent(props: { content: string }) {
|
||||||
const escapedContent = useMemo(
|
const escapedContent = useMemo(() => {
|
||||||
() => escapeBrackets(escapeDollarNumber(props.content)),
|
return escapeBrackets(escapeDollarNumber(props.content));
|
||||||
[props.content],
|
}, [props.content]);
|
||||||
);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<ReactMarkdown
|
<ReactMarkdown
|
||||||
|
@ -51,6 +51,7 @@ import Locale, {
|
|||||||
import { copyToClipboard } from "../utils";
|
import { copyToClipboard } from "../utils";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import {
|
import {
|
||||||
|
Anthropic,
|
||||||
Azure,
|
Azure,
|
||||||
Google,
|
Google,
|
||||||
OPENAI_BASE_URL,
|
OPENAI_BASE_URL,
|
||||||
@ -970,7 +971,7 @@ export function Settings() {
|
|||||||
</Select>
|
</Select>
|
||||||
</ListItem>
|
</ListItem>
|
||||||
|
|
||||||
{accessStore.provider === "OpenAI" ? (
|
{accessStore.provider === ServiceProvider.OpenAI && (
|
||||||
<>
|
<>
|
||||||
<ListItem
|
<ListItem
|
||||||
title={Locale.Settings.Access.OpenAI.Endpoint.Title}
|
title={Locale.Settings.Access.OpenAI.Endpoint.Title}
|
||||||
@ -1009,7 +1010,8 @@ export function Settings() {
|
|||||||
/>
|
/>
|
||||||
</ListItem>
|
</ListItem>
|
||||||
</>
|
</>
|
||||||
) : accessStore.provider === "Azure" ? (
|
)}
|
||||||
|
{accessStore.provider === ServiceProvider.Azure && (
|
||||||
<>
|
<>
|
||||||
<ListItem
|
<ListItem
|
||||||
title={Locale.Settings.Access.Azure.Endpoint.Title}
|
title={Locale.Settings.Access.Azure.Endpoint.Title}
|
||||||
@ -1068,7 +1070,8 @@ export function Settings() {
|
|||||||
></input>
|
></input>
|
||||||
</ListItem>
|
</ListItem>
|
||||||
</>
|
</>
|
||||||
) : accessStore.provider === "Google" ? (
|
)}
|
||||||
|
{accessStore.provider === ServiceProvider.Google && (
|
||||||
<>
|
<>
|
||||||
<ListItem
|
<ListItem
|
||||||
title={Locale.Settings.Access.Google.Endpoint.Title}
|
title={Locale.Settings.Access.Google.Endpoint.Title}
|
||||||
@ -1127,7 +1130,70 @@ export function Settings() {
|
|||||||
></input>
|
></input>
|
||||||
</ListItem>
|
</ListItem>
|
||||||
</>
|
</>
|
||||||
) : null}
|
)}
|
||||||
|
{accessStore.provider === ServiceProvider.Anthropic && (
|
||||||
|
<>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Anthropic.Endpoint.Title}
|
||||||
|
subTitle={
|
||||||
|
Locale.Settings.Access.Anthropic.Endpoint.SubTitle +
|
||||||
|
Anthropic.ExampleEndpoint
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={accessStore.anthropicUrl}
|
||||||
|
placeholder={Anthropic.ExampleEndpoint}
|
||||||
|
onChange={(e) =>
|
||||||
|
accessStore.update(
|
||||||
|
(access) =>
|
||||||
|
(access.anthropicUrl = e.currentTarget.value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></input>
|
||||||
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Anthropic.ApiKey.Title}
|
||||||
|
subTitle={
|
||||||
|
Locale.Settings.Access.Anthropic.ApiKey.SubTitle
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<PasswordInput
|
||||||
|
value={accessStore.anthropicApiKey}
|
||||||
|
type="text"
|
||||||
|
placeholder={
|
||||||
|
Locale.Settings.Access.Anthropic.ApiKey.Placeholder
|
||||||
|
}
|
||||||
|
onChange={(e) => {
|
||||||
|
accessStore.update(
|
||||||
|
(access) =>
|
||||||
|
(access.anthropicApiKey =
|
||||||
|
e.currentTarget.value),
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.Access.Anthropic.ApiVerion.Title}
|
||||||
|
subTitle={
|
||||||
|
Locale.Settings.Access.Anthropic.ApiVerion.SubTitle
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<input
|
||||||
|
type="text"
|
||||||
|
value={accessStore.anthropicApiVersion}
|
||||||
|
placeholder={Anthropic.Vision}
|
||||||
|
onChange={(e) =>
|
||||||
|
accessStore.update(
|
||||||
|
(access) =>
|
||||||
|
(access.anthropicApiVersion =
|
||||||
|
e.currentTarget.value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></input>
|
||||||
|
</ListItem>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
</>
|
</>
|
||||||
|
@ -69,6 +69,7 @@ export const getServerSideConfig = () => {
|
|||||||
|
|
||||||
const isAzure = !!process.env.AZURE_URL;
|
const isAzure = !!process.env.AZURE_URL;
|
||||||
const isGoogle = !!process.env.GOOGLE_API_KEY;
|
const isGoogle = !!process.env.GOOGLE_API_KEY;
|
||||||
|
const isAnthropic = !!process.env.ANTHROPIC_API_KEY;
|
||||||
|
|
||||||
const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||||
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||||
@ -78,6 +79,10 @@ export const getServerSideConfig = () => {
|
|||||||
`[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`,
|
`[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const whiteWebDevEndpoints = (process.env.WHITE_WEBDEV_ENDPOINTS ?? "").split(
|
||||||
|
",",
|
||||||
|
);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
baseUrl: process.env.BASE_URL,
|
baseUrl: process.env.BASE_URL,
|
||||||
apiKey,
|
apiKey,
|
||||||
@ -92,6 +97,11 @@ export const getServerSideConfig = () => {
|
|||||||
googleApiKey: process.env.GOOGLE_API_KEY,
|
googleApiKey: process.env.GOOGLE_API_KEY,
|
||||||
googleUrl: process.env.GEMINI_BASE_URL ?? process.env.GOOGLE_URL,
|
googleUrl: process.env.GEMINI_BASE_URL ?? process.env.GOOGLE_URL,
|
||||||
|
|
||||||
|
isAnthropic,
|
||||||
|
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
|
||||||
|
anthropicApiVersion: process.env.ANTHROPIC_API_VERSION,
|
||||||
|
anthropicUrl: process.env.ANTHROPIC_URL,
|
||||||
|
|
||||||
gtmId: process.env.GTM_ID,
|
gtmId: process.env.GTM_ID,
|
||||||
|
|
||||||
needCode: ACCESS_CODES.size > 0,
|
needCode: ACCESS_CODES.size > 0,
|
||||||
@ -107,6 +117,8 @@ export const getServerSideConfig = () => {
|
|||||||
disableFastLink: !!process.env.DISABLE_FAST_LINK,
|
disableFastLink: !!process.env.DISABLE_FAST_LINK,
|
||||||
customModels,
|
customModels,
|
||||||
|
|
||||||
|
whiteWebDevEndpoints,
|
||||||
|
|
||||||
isStoreFileToLocal:
|
isStoreFileToLocal:
|
||||||
!!process.env.NEXT_PUBLIC_ENABLE_NODEJS_PLUGIN &&
|
!!process.env.NEXT_PUBLIC_ENABLE_NODEJS_PLUGIN &&
|
||||||
!process.env.R2_ACCOUNT_ID &&
|
!process.env.R2_ACCOUNT_ID &&
|
||||||
|
107
app/constant.ts
107
app/constant.ts
@ -11,6 +11,8 @@ export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
|
|||||||
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
|
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
|
||||||
export const OPENAI_BASE_URL = "https://api.openai.com";
|
export const OPENAI_BASE_URL = "https://api.openai.com";
|
||||||
export const GOOGLE_BASE_URL = "https://generativelanguage.googleapis.com";
|
export const GOOGLE_BASE_URL = "https://generativelanguage.googleapis.com";
|
||||||
|
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
|
||||||
|
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
|
||||||
|
|
||||||
export enum Path {
|
export enum Path {
|
||||||
Home = "/",
|
Home = "/",
|
||||||
@ -26,6 +28,7 @@ export enum ApiPath {
|
|||||||
Cors = "",
|
Cors = "",
|
||||||
OpenAI = "/api/openai",
|
OpenAI = "/api/openai",
|
||||||
GoogleAI = "/api/google",
|
GoogleAI = "/api/google",
|
||||||
|
Anthropic = "/api/anthropic",
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum SlotID {
|
export enum SlotID {
|
||||||
@ -70,13 +73,22 @@ export enum ServiceProvider {
|
|||||||
OpenAI = "OpenAI",
|
OpenAI = "OpenAI",
|
||||||
Azure = "Azure",
|
Azure = "Azure",
|
||||||
Google = "Google",
|
Google = "Google",
|
||||||
|
Anthropic = "Anthropic",
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum ModelProvider {
|
export enum ModelProvider {
|
||||||
GPT = "GPT",
|
GPT = "GPT",
|
||||||
GeminiPro = "GeminiPro",
|
GeminiPro = "GeminiPro",
|
||||||
|
Claude = "Claude",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const Anthropic = {
|
||||||
|
ChatPath: "v1/messages",
|
||||||
|
ChatPath1: "v1/complete",
|
||||||
|
ExampleEndpoint: "https://api.anthropic.com",
|
||||||
|
Vision: "2023-06-01",
|
||||||
|
};
|
||||||
|
|
||||||
export const OpenaiPath = {
|
export const OpenaiPath = {
|
||||||
ChatPath: "v1/chat/completions",
|
ChatPath: "v1/chat/completions",
|
||||||
SpeechPath: "v1/audio/speech",
|
SpeechPath: "v1/audio/speech",
|
||||||
@ -99,12 +111,20 @@ export const Google = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||||
|
// export const DEFAULT_SYSTEM_TEMPLATE = `
|
||||||
|
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
|
||||||
|
// Knowledge cutoff: {{cutoff}}
|
||||||
|
// Current model: {{model}}
|
||||||
|
// Current time: {{time}}
|
||||||
|
// Latex inline: $x^2$
|
||||||
|
// Latex block: $$e=mc^2$$
|
||||||
|
// `;
|
||||||
export const DEFAULT_SYSTEM_TEMPLATE = `
|
export const DEFAULT_SYSTEM_TEMPLATE = `
|
||||||
You are ChatGPT, a large language model trained by {{ServiceProvider}}.
|
You are ChatGPT, a large language model trained by {{ServiceProvider}}.
|
||||||
Knowledge cutoff: {{cutoff}}
|
Knowledge cutoff: {{cutoff}}
|
||||||
Current model: {{model}}
|
Current model: {{model}}
|
||||||
Current time: {{time}}
|
Current time: {{time}}
|
||||||
Latex inline: $x^2$
|
Latex inline: \\(x^2\\)
|
||||||
Latex block: $$e=mc^2$$
|
Latex block: $$e=mc^2$$
|
||||||
`;
|
`;
|
||||||
|
|
||||||
@ -113,6 +133,7 @@ export const GEMINI_SUMMARIZE_MODEL = "gemini-pro";
|
|||||||
|
|
||||||
export const KnowledgeCutOffDate: Record<string, string> = {
|
export const KnowledgeCutOffDate: Record<string, string> = {
|
||||||
default: "2021-09",
|
default: "2021-09",
|
||||||
|
"gpt-4-turbo": "2023-12",
|
||||||
"gpt-4-turbo-preview": "2023-12",
|
"gpt-4-turbo-preview": "2023-12",
|
||||||
"gpt-4-1106-preview": "2023-04",
|
"gpt-4-1106-preview": "2023-04",
|
||||||
"gpt-4-0125-preview": "2023-12",
|
"gpt-4-0125-preview": "2023-12",
|
||||||
@ -175,6 +196,24 @@ export const DEFAULT_MODELS = [
|
|||||||
providerType: "openai",
|
providerType: "openai",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "gpt-4-turbo",
|
||||||
|
available: true,
|
||||||
|
provider: {
|
||||||
|
id: "openai",
|
||||||
|
providerName: "OpenAI",
|
||||||
|
providerType: "openai",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "gpt-4-turbo-2024-04-09",
|
||||||
|
available: true,
|
||||||
|
provider: {
|
||||||
|
id: "openai",
|
||||||
|
providerName: "OpenAI",
|
||||||
|
providerType: "openai",
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "gpt-4-turbo-preview",
|
name: "gpt-4-turbo-preview",
|
||||||
available: true,
|
available: true,
|
||||||
@ -283,7 +322,73 @@ export const DEFAULT_MODELS = [
|
|||||||
providerType: "google",
|
providerType: "google",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "claude-instant-1.2",
|
||||||
|
available: true,
|
||||||
|
provider: {
|
||||||
|
id: "anthropic",
|
||||||
|
providerName: "Anthropic",
|
||||||
|
providerType: "anthropic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "claude-2.0",
|
||||||
|
available: true,
|
||||||
|
provider: {
|
||||||
|
id: "anthropic",
|
||||||
|
providerName: "Anthropic",
|
||||||
|
providerType: "anthropic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "claude-2.1",
|
||||||
|
available: true,
|
||||||
|
provider: {
|
||||||
|
id: "anthropic",
|
||||||
|
providerName: "Anthropic",
|
||||||
|
providerType: "anthropic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "claude-3-opus-20240229",
|
||||||
|
available: true,
|
||||||
|
provider: {
|
||||||
|
id: "anthropic",
|
||||||
|
providerName: "Anthropic",
|
||||||
|
providerType: "anthropic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "claude-3-sonnet-20240229",
|
||||||
|
available: true,
|
||||||
|
provider: {
|
||||||
|
id: "anthropic",
|
||||||
|
providerName: "Anthropic",
|
||||||
|
providerType: "anthropic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "claude-3-haiku-20240307",
|
||||||
|
available: true,
|
||||||
|
provider: {
|
||||||
|
id: "anthropic",
|
||||||
|
providerName: "Anthropic",
|
||||||
|
providerType: "anthropic",
|
||||||
|
},
|
||||||
|
},
|
||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
export const CHAT_PAGE_SIZE = 15;
|
export const CHAT_PAGE_SIZE = 15;
|
||||||
export const MAX_RENDER_MSG_COUNT = 45;
|
export const MAX_RENDER_MSG_COUNT = 45;
|
||||||
|
|
||||||
|
// some famous webdav endpoints
|
||||||
|
export const internalWhiteWebDavEndpoints = [
|
||||||
|
"https://dav.jianguoyun.com/dav/",
|
||||||
|
"https://dav.dropdav.com/",
|
||||||
|
"https://dav.box.com/dav",
|
||||||
|
"https://nanao.teracloud.jp/dav/",
|
||||||
|
"https://webdav.4shared.com/",
|
||||||
|
"https://dav.idrivesync.com",
|
||||||
|
"https://webdav.yandex.com",
|
||||||
|
"https://app.koofr.net/dav/Koofr",
|
||||||
|
];
|
||||||
|
@ -36,6 +36,10 @@ export default function RootLayout({
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta name="config" content={JSON.stringify(getClientConfig())} />
|
<meta name="config" content={JSON.stringify(getClientConfig())} />
|
||||||
|
<meta
|
||||||
|
name="viewport"
|
||||||
|
content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no"
|
||||||
|
/>
|
||||||
<link rel="manifest" href="/site.webmanifest"></link>
|
<link rel="manifest" href="/site.webmanifest"></link>
|
||||||
<script src="/serviceWorkerRegister.js" defer></script>
|
<script src="/serviceWorkerRegister.js" defer></script>
|
||||||
</head>
|
</head>
|
||||||
|
@ -321,6 +321,23 @@ const cn = {
|
|||||||
SubTitle: "选择指定的部分版本",
|
SubTitle: "选择指定的部分版本",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Anthropic: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "接口密钥",
|
||||||
|
SubTitle: "使用自定义 Anthropic Key 绕过密码访问限制",
|
||||||
|
Placeholder: "Anthropic API Key",
|
||||||
|
},
|
||||||
|
|
||||||
|
Endpoint: {
|
||||||
|
Title: "接口地址",
|
||||||
|
SubTitle: "样例:",
|
||||||
|
},
|
||||||
|
|
||||||
|
ApiVerion: {
|
||||||
|
Title: "接口版本 (claude api version)",
|
||||||
|
SubTitle: "选择一个特定的 API 版本输入",
|
||||||
|
},
|
||||||
|
},
|
||||||
Google: {
|
Google: {
|
||||||
ApiKey: {
|
ApiKey: {
|
||||||
Title: "API 密钥",
|
Title: "API 密钥",
|
||||||
|
@ -324,6 +324,24 @@ const en: LocaleType = {
|
|||||||
SubTitle: "Check your api version from azure console",
|
SubTitle: "Check your api version from azure console",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Anthropic: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "Anthropic API Key",
|
||||||
|
SubTitle:
|
||||||
|
"Use a custom Anthropic Key to bypass password access restrictions",
|
||||||
|
Placeholder: "Anthropic API Key",
|
||||||
|
},
|
||||||
|
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Endpoint Address",
|
||||||
|
SubTitle: "Example:",
|
||||||
|
},
|
||||||
|
|
||||||
|
ApiVerion: {
|
||||||
|
Title: "API Version (claude api version)",
|
||||||
|
SubTitle: "Select and input a specific API version",
|
||||||
|
},
|
||||||
|
},
|
||||||
CustomModel: {
|
CustomModel: {
|
||||||
Title: "Custom Models",
|
Title: "Custom Models",
|
||||||
SubTitle: "Custom model options, seperated by comma",
|
SubTitle: "Custom model options, seperated by comma",
|
||||||
|
@ -316,6 +316,23 @@ const pt: PartialLocaleType = {
|
|||||||
SubTitle: "Verifique sua versão API do console Azure",
|
SubTitle: "Verifique sua versão API do console Azure",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Anthropic: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "Chave API Anthropic",
|
||||||
|
SubTitle: "Verifique sua chave API do console Anthropic",
|
||||||
|
Placeholder: "Chave API Anthropic",
|
||||||
|
},
|
||||||
|
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Endpoint Address",
|
||||||
|
SubTitle: "Exemplo: ",
|
||||||
|
},
|
||||||
|
|
||||||
|
ApiVerion: {
|
||||||
|
Title: "Versão API (Versão api claude)",
|
||||||
|
SubTitle: "Verifique sua versão API do console Anthropic",
|
||||||
|
},
|
||||||
|
},
|
||||||
CustomModel: {
|
CustomModel: {
|
||||||
Title: "Modelos Personalizados",
|
Title: "Modelos Personalizados",
|
||||||
SubTitle: "Opções de modelo personalizado, separados por vírgula",
|
SubTitle: "Opções de modelo personalizado, separados por vírgula",
|
||||||
|
@ -317,6 +317,23 @@ const sk: PartialLocaleType = {
|
|||||||
SubTitle: "Skontrolujte svoju verziu API v Azure konzole",
|
SubTitle: "Skontrolujte svoju verziu API v Azure konzole",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Anthropic: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "API kľúč Anthropic",
|
||||||
|
SubTitle: "Skontrolujte svoj API kľúč v Anthropic konzole",
|
||||||
|
Placeholder: "API kľúč Anthropic",
|
||||||
|
},
|
||||||
|
|
||||||
|
Endpoint: {
|
||||||
|
Title: "Adresa koncového bodu",
|
||||||
|
SubTitle: "Príklad:",
|
||||||
|
},
|
||||||
|
|
||||||
|
ApiVerion: {
|
||||||
|
Title: "Verzia API (claude verzia API)",
|
||||||
|
SubTitle: "Vyberte špecifickú verziu časti",
|
||||||
|
},
|
||||||
|
},
|
||||||
CustomModel: {
|
CustomModel: {
|
||||||
Title: "Vlastné modely",
|
Title: "Vlastné modely",
|
||||||
SubTitle: "Možnosti vlastného modelu, oddelené čiarkou",
|
SubTitle: "Možnosti vlastného modelu, oddelené čiarkou",
|
||||||
|
@ -314,6 +314,23 @@ const tw = {
|
|||||||
SubTitle: "選擇指定的部分版本",
|
SubTitle: "選擇指定的部分版本",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
Anthropic: {
|
||||||
|
ApiKey: {
|
||||||
|
Title: "API 密鑰",
|
||||||
|
SubTitle: "從 Anthropic AI 獲取您的 API 密鑰",
|
||||||
|
Placeholder: "Anthropic API Key",
|
||||||
|
},
|
||||||
|
|
||||||
|
Endpoint: {
|
||||||
|
Title: "終端地址",
|
||||||
|
SubTitle: "示例:",
|
||||||
|
},
|
||||||
|
|
||||||
|
ApiVerion: {
|
||||||
|
Title: "API 版本 (claude api version)",
|
||||||
|
SubTitle: "選擇一個特定的 API 版本输入",
|
||||||
|
},
|
||||||
|
},
|
||||||
Google: {
|
Google: {
|
||||||
ApiKey: {
|
ApiKey: {
|
||||||
Title: "API 密鑰",
|
Title: "API 密鑰",
|
||||||
|
@ -36,6 +36,11 @@ const DEFAULT_ACCESS_STATE = {
|
|||||||
googleApiKey: "",
|
googleApiKey: "",
|
||||||
googleApiVersion: "v1",
|
googleApiVersion: "v1",
|
||||||
|
|
||||||
|
// anthropic
|
||||||
|
anthropicApiKey: "",
|
||||||
|
anthropicApiVersion: "2023-06-01",
|
||||||
|
anthropicUrl: "",
|
||||||
|
|
||||||
// server config
|
// server config
|
||||||
needCode: true,
|
needCode: true,
|
||||||
hideUserApiKey: false,
|
hideUserApiKey: false,
|
||||||
@ -74,6 +79,10 @@ export const useAccessStore = createPersistStore(
|
|||||||
return ensure(get(), ["googleApiKey"]);
|
return ensure(get(), ["googleApiKey"]);
|
||||||
},
|
},
|
||||||
|
|
||||||
|
isValidAnthropic() {
|
||||||
|
return ensure(get(), ["anthropicApiKey"]);
|
||||||
|
},
|
||||||
|
|
||||||
isAuthorized() {
|
isAuthorized() {
|
||||||
this.fetch();
|
this.fetch();
|
||||||
|
|
||||||
@ -82,6 +91,7 @@ export const useAccessStore = createPersistStore(
|
|||||||
this.isValidOpenAI() ||
|
this.isValidOpenAI() ||
|
||||||
this.isValidAzure() ||
|
this.isValidAzure() ||
|
||||||
this.isValidGoogle() ||
|
this.isValidGoogle() ||
|
||||||
|
this.isValidAnthropic() ||
|
||||||
!this.enabledAccessControl() ||
|
!this.enabledAccessControl() ||
|
||||||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
|
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
|
||||||
);
|
);
|
||||||
|
@ -27,6 +27,7 @@ export interface ChatToolMessage {
|
|||||||
}
|
}
|
||||||
import { createPersistStore } from "../utils/store";
|
import { createPersistStore } from "../utils/store";
|
||||||
import { FileInfo } from "../client/platforms/utils";
|
import { FileInfo } from "../client/platforms/utils";
|
||||||
|
import { identifyDefaultClaudeModel } from "../utils/checkers";
|
||||||
|
|
||||||
export type ChatMessage = RequestMessage & {
|
export type ChatMessage = RequestMessage & {
|
||||||
date: string;
|
date: string;
|
||||||
@ -141,6 +142,11 @@ function fillTemplateWith(input: string, modelConfig: ModelConfig) {
|
|||||||
|
|
||||||
let output = modelConfig.template ?? DEFAULT_INPUT_TEMPLATE;
|
let output = modelConfig.template ?? DEFAULT_INPUT_TEMPLATE;
|
||||||
|
|
||||||
|
// remove duplicate
|
||||||
|
if (input.startsWith(output)) {
|
||||||
|
output = "";
|
||||||
|
}
|
||||||
|
|
||||||
// must contains {{input}}
|
// must contains {{input}}
|
||||||
const inputVar = "{{input}}";
|
const inputVar = "{{input}}";
|
||||||
if (!output.includes(inputVar)) {
|
if (!output.includes(inputVar)) {
|
||||||
@ -469,6 +475,10 @@ export const useChatStore = createPersistStore(
|
|||||||
} else {
|
} else {
|
||||||
if (modelConfig.model.startsWith("gemini")) {
|
if (modelConfig.model.startsWith("gemini")) {
|
||||||
api = new ClientApi(ModelProvider.GeminiPro);
|
api = new ClientApi(ModelProvider.GeminiPro);
|
||||||
|
} else if (identifyDefaultClaudeModel(modelConfig.model)) {
|
||||||
|
api = new ClientApi(ModelProvider.Claude);
|
||||||
|
} else {
|
||||||
|
api = new ClientApi(ModelProvider.GPT);
|
||||||
}
|
}
|
||||||
// make request
|
// make request
|
||||||
api.llm.chat({
|
api.llm.chat({
|
||||||
@ -613,7 +623,6 @@ export const useChatStore = createPersistStore(
|
|||||||
tokenCount += estimateTokenLength(getMessageTextContent(msg));
|
tokenCount += estimateTokenLength(getMessageTextContent(msg));
|
||||||
reversedRecentMessages.push(msg);
|
reversedRecentMessages.push(msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
// concat all messages
|
// concat all messages
|
||||||
const recentMessages = [
|
const recentMessages = [
|
||||||
...systemPrompts,
|
...systemPrompts,
|
||||||
@ -652,6 +661,8 @@ export const useChatStore = createPersistStore(
|
|||||||
var api: ClientApi;
|
var api: ClientApi;
|
||||||
if (modelConfig.model.startsWith("gemini")) {
|
if (modelConfig.model.startsWith("gemini")) {
|
||||||
api = new ClientApi(ModelProvider.GeminiPro);
|
api = new ClientApi(ModelProvider.GeminiPro);
|
||||||
|
} else if (identifyDefaultClaudeModel(modelConfig.model)) {
|
||||||
|
api = new ClientApi(ModelProvider.Claude);
|
||||||
} else {
|
} else {
|
||||||
api = new ClientApi(ModelProvider.GPT);
|
api = new ClientApi(ModelProvider.GPT);
|
||||||
}
|
}
|
||||||
|
@ -1 +1,9 @@
|
|||||||
export type Updater<T> = (updater: (value: T) => void) => void;
|
export type Updater<T> = (updater: (value: T) => void) => void;
|
||||||
|
|
||||||
|
export const ROLES = ["system", "user", "assistant"] as const;
|
||||||
|
export type MessageRole = (typeof ROLES)[number];
|
||||||
|
|
||||||
|
export interface RequestMessage {
|
||||||
|
role: MessageRole;
|
||||||
|
content: string;
|
||||||
|
}
|
||||||
|
@ -3,7 +3,6 @@ import { showToast } from "./components/ui-lib";
|
|||||||
import Locale from "./locales";
|
import Locale from "./locales";
|
||||||
import { RequestMessage } from "./client/api";
|
import { RequestMessage } from "./client/api";
|
||||||
import { DEFAULT_MODELS } from "./constant";
|
import { DEFAULT_MODELS } from "./constant";
|
||||||
import { useAccessStore } from "./store";
|
|
||||||
|
|
||||||
export function trimTopic(topic: string) {
|
export function trimTopic(topic: string) {
|
||||||
// Fix an issue where double quotes still show in the Indonesian language
|
// Fix an issue where double quotes still show in the Indonesian language
|
||||||
@ -292,10 +291,13 @@ export function getMessageImages(message: RequestMessage): string[] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function isVisionModel(model: string) {
|
export function isVisionModel(model: string) {
|
||||||
// Note: This is a better way using the TypeScript feature instead of `&&` or `||` (ts v5.5.0-dev.20240314 I've been using)
|
|
||||||
const visionKeywords = ["vision", "claude-3"];
|
const visionKeywords = ["vision", "claude-3"];
|
||||||
|
const isGpt4Turbo =
|
||||||
|
model.includes("gpt-4-turbo") && !model.includes("preview");
|
||||||
|
|
||||||
return visionKeywords.some((keyword) => model.includes(keyword));
|
return (
|
||||||
|
visionKeywords.some((keyword) => model.includes(keyword)) || isGpt4Turbo
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isSupportRAGModel(modelName: string) {
|
export function isSupportRAGModel(modelName: string) {
|
||||||
|
21
app/utils/checkers.ts
Normal file
21
app/utils/checkers.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import { useAccessStore } from "../store/access";
|
||||||
|
import { useAppConfig } from "../store/config";
|
||||||
|
import { collectModels } from "./model";
|
||||||
|
|
||||||
|
export function identifyDefaultClaudeModel(modelName: string) {
|
||||||
|
const accessStore = useAccessStore.getState();
|
||||||
|
const configStore = useAppConfig.getState();
|
||||||
|
|
||||||
|
const allModals = collectModels(
|
||||||
|
configStore.models,
|
||||||
|
[configStore.customModels, accessStore.customModels].join(","),
|
||||||
|
);
|
||||||
|
|
||||||
|
const modelMeta = allModals.find((m) => m.name === modelName);
|
||||||
|
|
||||||
|
return (
|
||||||
|
modelName.startsWith("claude") &&
|
||||||
|
modelMeta &&
|
||||||
|
modelMeta.provider?.providerType === "anthropic"
|
||||||
|
);
|
||||||
|
}
|
@ -22,6 +22,12 @@ export function collectModelTable(
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const customProvider = (modelName: string) => ({
|
||||||
|
id: modelName,
|
||||||
|
providerName: "",
|
||||||
|
providerType: "custom",
|
||||||
|
});
|
||||||
|
|
||||||
// server custom models
|
// server custom models
|
||||||
customModels
|
customModels
|
||||||
.split(",")
|
.split(",")
|
||||||
@ -42,7 +48,7 @@ export function collectModelTable(
|
|||||||
name,
|
name,
|
||||||
displayName: displayName || name,
|
displayName: displayName || name,
|
||||||
available,
|
available,
|
||||||
provider: modelTable[name]?.provider, // Use optional chaining
|
provider: modelTable[name]?.provider ?? customProvider(name), // Use optional chaining
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
17
app/utils/object.ts
Normal file
17
app/utils/object.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
export function omit<T extends object, U extends (keyof T)[]>(
|
||||||
|
obj: T,
|
||||||
|
...keys: U
|
||||||
|
): Omit<T, U[number]> {
|
||||||
|
const ret: any = { ...obj };
|
||||||
|
keys.forEach((key) => delete ret[key]);
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function pick<T extends object, U extends (keyof T)[]>(
|
||||||
|
obj: T,
|
||||||
|
...keys: U
|
||||||
|
): Pick<T, U[number]> {
|
||||||
|
const ret: any = {};
|
||||||
|
keys.forEach((key) => (ret[key] = obj[key]));
|
||||||
|
return ret;
|
||||||
|
}
|
@ -77,6 +77,10 @@ if (mode !== "export") {
|
|||||||
source: "/api/proxy/openai/:path*",
|
source: "/api/proxy/openai/:path*",
|
||||||
destination: "https://api.openai.com/:path*",
|
destination: "https://api.openai.com/:path*",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
source: "/api/proxy/anthropic/:path*",
|
||||||
|
destination: "https://api.anthropic.com/:path*",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
source: "/google-fonts/:path*",
|
source: "/google-fonts/:path*",
|
||||||
destination: "https://fonts.googleapis.com/:path*",
|
destination: "https://fonts.googleapis.com/:path*",
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
"@hello-pangea/dnd": "^16.5.0",
|
"@hello-pangea/dnd": "^16.5.0",
|
||||||
"@langchain/cohere": "^0.0.6",
|
"@langchain/cohere": "^0.0.6",
|
||||||
"@langchain/community": "0.0.30",
|
"@langchain/community": "0.0.30",
|
||||||
"@langchain/openai": "0.0.14",
|
"@langchain/openai": "0.0.26",
|
||||||
"@langchain/pinecone": "^0.0.4",
|
"@langchain/pinecone": "^0.0.4",
|
||||||
"@next/third-parties": "^14.1.0",
|
"@next/third-parties": "^14.1.0",
|
||||||
"@pinecone-database/pinecone": "^2.2.0",
|
"@pinecone-database/pinecone": "^2.2.0",
|
||||||
@ -44,7 +44,7 @@
|
|||||||
"html-to-text": "^9.0.5",
|
"html-to-text": "^9.0.5",
|
||||||
"https-proxy-agent": "^7.0.2",
|
"https-proxy-agent": "^7.0.2",
|
||||||
"md5": "^2.3.0",
|
"md5": "^2.3.0",
|
||||||
"langchain": "0.1.30",
|
"langchain": "0.1.33",
|
||||||
"mammoth": "^1.7.1",
|
"mammoth": "^1.7.1",
|
||||||
"mermaid": "^10.6.1",
|
"mermaid": "^10.6.1",
|
||||||
"mime": "^4.0.1",
|
"mime": "^4.0.1",
|
||||||
@ -92,7 +92,7 @@
|
|||||||
},
|
},
|
||||||
"resolutions": {
|
"resolutions": {
|
||||||
"lint-staged/yaml": "^2.2.2",
|
"lint-staged/yaml": "^2.2.2",
|
||||||
"@langchain/core": "0.1.53",
|
"@langchain/core": "0.1.57",
|
||||||
"openai": "4.28.4"
|
"openai": "4.28.4"
|
||||||
},
|
},
|
||||||
"packageManager": "yarn@1.22.19"
|
"packageManager": "yarn@1.22.19"
|
||||||
|
49
yarn.lock
49
yarn.lock
@ -1939,13 +1939,13 @@
|
|||||||
uuid "^9.0.0"
|
uuid "^9.0.0"
|
||||||
zod "^3.22.3"
|
zod "^3.22.3"
|
||||||
|
|
||||||
"@langchain/community@~0.0.41":
|
"@langchain/community@~0.0.47":
|
||||||
version "0.0.44"
|
version "0.0.47"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.0.44.tgz#b4f3453e3fd0b7a8c704fc35b004d7d738bd3416"
|
resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.0.47.tgz#e7b00b3f98e3217932785c70e9926abf750f8cfc"
|
||||||
integrity sha512-II9Hz90jJmfWRICtxTg1auQWzFw0npqacWiiOpaxNhzs6rptdf56gyfC48Z6n1ii4R8FfAlfX6YxhOE7lGGKXg==
|
integrity sha512-j20KhnWs16K/qxaY3QccOFMfcEmiTXN00NB1IPeO1pexRsvHL7GfqZ8rR5cOSSBfft2iuoGxaj1EZIyxh5ftgA==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@langchain/core" "~0.1.44"
|
"@langchain/core" "~0.1.56"
|
||||||
"@langchain/openai" "~0.0.19"
|
"@langchain/openai" "~0.0.28"
|
||||||
expr-eval "^2.0.2"
|
expr-eval "^2.0.2"
|
||||||
flat "^5.0.2"
|
flat "^5.0.2"
|
||||||
langsmith "~0.1.1"
|
langsmith "~0.1.1"
|
||||||
@ -1953,10 +1953,10 @@
|
|||||||
zod "^3.22.3"
|
zod "^3.22.3"
|
||||||
zod-to-json-schema "^3.22.5"
|
zod-to-json-schema "^3.22.5"
|
||||||
|
|
||||||
"@langchain/core@0.1.53", "@langchain/core@~0.1", "@langchain/core@~0.1.13", "@langchain/core@~0.1.29", "@langchain/core@~0.1.44", "@langchain/core@~0.1.45":
|
"@langchain/core@0.1.57", "@langchain/core@~0.1", "@langchain/core@~0.1.13", "@langchain/core@~0.1.29", "@langchain/core@~0.1.45", "@langchain/core@~0.1.56":
|
||||||
version "0.1.53"
|
version "0.1.57"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.1.53.tgz#40bf273b6d5e1426c60ce9cc259562fe656573f1"
|
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.1.57.tgz#c592ad6715e373e4203f590f20b8b999e64cfb16"
|
||||||
integrity sha512-khfRTu2DSCNMPUmnKx7iH0TpEaunW/4BgR6STTteRRDd0NFtXGfAwUuY9sm0+EKi/XKhdAmpGnfLwSfNg5F0Qw==
|
integrity sha512-6wOwidPkkRcANrOKl88+YYpm3jHfpg6W8EqZHQCImSAlxdEhyDSq2eeQKHOPCFCrfNWkClaNn+Wlzzz4Qwf9Tg==
|
||||||
dependencies:
|
dependencies:
|
||||||
ansi-styles "^5.0.0"
|
ansi-styles "^5.0.0"
|
||||||
camelcase "6"
|
camelcase "6"
|
||||||
@ -1981,7 +1981,7 @@
|
|||||||
zod "^3.22.4"
|
zod "^3.22.4"
|
||||||
zod-to-json-schema "^3.22.3"
|
zod-to-json-schema "^3.22.3"
|
||||||
|
|
||||||
"@langchain/openai@~0.0.14", "@langchain/openai@~0.0.19":
|
"@langchain/openai@~0.0.14":
|
||||||
version "0.0.23"
|
version "0.0.23"
|
||||||
resolved "https://registry.npmjs.org/@langchain/openai/-/openai-0.0.23.tgz"
|
resolved "https://registry.npmjs.org/@langchain/openai/-/openai-0.0.23.tgz"
|
||||||
integrity sha512-H5yv2hKQ5JVa6jC1wQxiN2299lJbPc5JUv93c6IUw+0jr0kFqH48NWbcythz1UFj2rOpZdaFJSYJs2nr9bhVLg==
|
integrity sha512-H5yv2hKQ5JVa6jC1wQxiN2299lJbPc5JUv93c6IUw+0jr0kFqH48NWbcythz1UFj2rOpZdaFJSYJs2nr9bhVLg==
|
||||||
@ -1992,6 +1992,17 @@
|
|||||||
zod "^3.22.4"
|
zod "^3.22.4"
|
||||||
zod-to-json-schema "^3.22.3"
|
zod-to-json-schema "^3.22.3"
|
||||||
|
|
||||||
|
"@langchain/openai@~0.0.28":
|
||||||
|
version "0.0.28"
|
||||||
|
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.0.28.tgz#afaeec61b44816935db9ae937496c964c81ab571"
|
||||||
|
integrity sha512-2s1RA3/eAnz4ahdzsMPBna9hfAqpFNlWdHiPxVGZ5yrhXsbLWWoPcF+22LCk9t0HJKtazi2GCIWc0HVXH9Abig==
|
||||||
|
dependencies:
|
||||||
|
"@langchain/core" "~0.1.56"
|
||||||
|
js-tiktoken "^1.0.7"
|
||||||
|
openai "^4.32.1"
|
||||||
|
zod "^3.22.4"
|
||||||
|
zod-to-json-schema "^3.22.3"
|
||||||
|
|
||||||
"@langchain/pinecone@^0.0.4":
|
"@langchain/pinecone@^0.0.4":
|
||||||
version "0.0.4"
|
version "0.0.4"
|
||||||
resolved "https://registry.yarnpkg.com/@langchain/pinecone/-/pinecone-0.0.4.tgz#312f3ff4286b1278c47c676d7be5a4f0f5c1409c"
|
resolved "https://registry.yarnpkg.com/@langchain/pinecone/-/pinecone-0.0.4.tgz#312f3ff4286b1278c47c676d7be5a4f0f5c1409c"
|
||||||
@ -6091,15 +6102,15 @@ kleur@^4.0.3:
|
|||||||
resolved "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz"
|
resolved "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz"
|
||||||
integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==
|
integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==
|
||||||
|
|
||||||
langchain@0.1.30:
|
langchain@0.1.33:
|
||||||
version "0.1.30"
|
version "0.1.33"
|
||||||
resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.1.30.tgz#e1adb3f1849fcd5c596c668300afd5dc8cb37a97"
|
resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.1.33.tgz#959f0f023975092569b49f1f07fe60c11c643530"
|
||||||
integrity sha512-5h/vNMmutQ98tbB0sPDlAileZVca6A2McFgGa3+D56Dm8mSSCzTQL2DngPA6h09DlKDpSr7+6PdFw5Hoj0ZDSw==
|
integrity sha512-IrRd839x8eAmDutHNDMHGzIjufyWt/ckJrnRB2WifIJmtLWNRNQo5jZd7toeBU0UOVOQxoPtQGYf8lR0ar7vIQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@anthropic-ai/sdk" "^0.9.1"
|
"@anthropic-ai/sdk" "^0.9.1"
|
||||||
"@langchain/community" "~0.0.41"
|
"@langchain/community" "~0.0.47"
|
||||||
"@langchain/core" "~0.1.44"
|
"@langchain/core" "~0.1.56"
|
||||||
"@langchain/openai" "~0.0.19"
|
"@langchain/openai" "~0.0.28"
|
||||||
binary-extensions "^2.2.0"
|
binary-extensions "^2.2.0"
|
||||||
js-tiktoken "^1.0.7"
|
js-tiktoken "^1.0.7"
|
||||||
js-yaml "^4.1.0"
|
js-yaml "^4.1.0"
|
||||||
@ -7135,7 +7146,7 @@ onetime@^6.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
mimic-fn "^4.0.0"
|
mimic-fn "^4.0.0"
|
||||||
|
|
||||||
openai@4.28.4, openai@^4.26.0:
|
openai@4.28.4, openai@^4.26.0, openai@^4.32.1:
|
||||||
version "4.28.4"
|
version "4.28.4"
|
||||||
resolved "https://registry.yarnpkg.com/openai/-/openai-4.28.4.tgz#d4bf1f53a89ef151bf066ef284489e12e7dd1657"
|
resolved "https://registry.yarnpkg.com/openai/-/openai-4.28.4.tgz#d4bf1f53a89ef151bf066ef284489e12e7dd1657"
|
||||||
integrity sha512-RNIwx4MT/F0zyizGcwS+bXKLzJ8QE9IOyigDG/ttnwB220d58bYjYFp0qjvGwEFBO6+pvFVIDABZPGDl46RFsg==
|
integrity sha512-RNIwx4MT/F0zyizGcwS+bXKLzJ8QE9IOyigDG/ttnwB220d58bYjYFp0qjvGwEFBO6+pvFVIDABZPGDl46RFsg==
|
||||||
|
Loading…
Reference in New Issue
Block a user