This commit is contained in:
hycqwq 2025-05-11 15:01:44 +00:00 committed by GitHub
commit 492fd4b336
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 608 additions and 6 deletions

View File

@ -81,3 +81,9 @@ SILICONFLOW_API_KEY=
### siliconflow Api url (optional)
SILICONFLOW_URL=
### openrouter Api key (optional)
OPENROUTER_API_KEY=
### openrouter Api url (optional)
OPENROUTER_URL=

View File

@ -311,6 +311,9 @@ To control custom models, use `+` to add a custom model, use `-` to hide a model
User `-all` to disable all default models, `+all` to enable all default models.
Models from OpenRouter (except `openrouter/auto`) need to be configured manually, use `+provider/model@OpenRouter`.
> Example: `+qwen/qwen3-32b:free@OpenRouter` will show `qwen/qwen3-32b:free(OpenRouter)` in model list.
For Azure: use `modelName@Azure=deploymentName` to customize model name and deployment name.
> Example: `+gpt-3.5-turbo@Azure=gpt35` will show option `gpt35(Azure)` in model list.
> If you only can use Azure model, `-all,+gpt-3.5-turbo@Azure=gpt35` will `gpt35(Azure)` the only option in model list.
@ -361,6 +364,14 @@ SiliconFlow API Key.
SiliconFlow API URL.
### `OPENROUTER_API_KEY` (optional)
OpenRouter API Key.
### `OPENROUTER_URL` (optional)
OpenRouter API URL.
## Requirements
NodeJS >= 18, Docker >= 20

View File

@ -232,6 +232,9 @@ DeepSeek Api Url.
用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名=展示名` 来自定义模型的展示名,用英文逗号隔开。
OpenRouter 提供的模型(除 `openrouter/auto` 外)需要手动配置,使用 `+provider/model@OpenRouter`
> 示例:`+qwen/qwen3-32b:free@OpenRouter` 这个配置会在模型列表显示一个 `qwen/qwen3-32b:free(OpenRouter)` 的选项。
在Azure的模式下支持使用`modelName@Azure=deploymentName`的方式配置模型名称和部署名称(deploy-name)
> 示例:`+gpt-3.5-turbo@Azure=gpt35`这个配置会在模型列表显示一个`gpt35(Azure)`的选项。
> 如果你只能使用Azure模式那么设置 `-all,+gpt-3.5-turbo@Azure=gpt35` 则可以让对话的默认使用 `gpt35(Azure)`
@ -275,6 +278,14 @@ SiliconFlow API Key.
SiliconFlow API URL.
### `OPENROUTER_API_KEY` (optional)
OpenRouter API Key.
### `OPENROUTER_URL` (optional)
OpenRouter API URL.
## 开发
点击下方按钮,开始二次开发:

View File

@ -15,6 +15,7 @@ import { handle as siliconflowHandler } from "../../siliconflow";
import { handle as xaiHandler } from "../../xai";
import { handle as chatglmHandler } from "../../glm";
import { handle as proxyHandler } from "../../proxy";
import { handle as openrouterHandler } from "../../openrouter";
async function handle(
req: NextRequest,
@ -50,6 +51,8 @@ async function handle(
return chatglmHandler(req, { params });
case ApiPath.SiliconFlow:
return siliconflowHandler(req, { params });
case ApiPath.OpenRouter:
return openrouterHandler(req, { params });
case ApiPath.OpenAI:
return openaiHandler(req, { params });
default:

View File

@ -104,6 +104,9 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
case ModelProvider.SiliconFlow:
systemApiKey = serverConfig.siliconFlowApiKey;
break;
case ModelProvider.OpenRouter:
systemApiKey = serverConfig.openrouterApiKey;
break;
case ModelProvider.GPT:
default:
if (req.nextUrl.pathname.includes("azure/deployments")) {

128
app/api/openrouter.ts Normal file
View File

@ -0,0 +1,128 @@
import { getServerSideConfig } from "@/app/config/server";
import {
OPENROUTER_BASE_URL,
ApiPath,
ModelProvider,
ServiceProvider,
} from "@/app/constant";
import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server";
import { auth } from "@/app/api/auth";
import { isModelNotavailableInServer } from "@/app/utils/model";
const serverConfig = getServerSideConfig();
export async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
) {
console.log("[OpenRouter Route] params ", params);
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
const authResult = auth(req, ModelProvider.OpenRouter);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}
try {
const response = await request(req);
return response;
} catch (e) {
console.error("[OpenRouter] ", e);
return NextResponse.json(prettyObject(e));
}
}
async function request(req: NextRequest) {
const controller = new AbortController();
// alibaba use base url or just remove the path
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.OpenRouter, "");
let baseUrl = serverConfig.openrouterUrl || OPENROUTER_BASE_URL;
if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, -1);
}
console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);
const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);
const fetchUrl = `${baseUrl}${path}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
Authorization: req.headers.get("Authorization") ?? "",
},
method: req.method,
body: req.body,
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};
// #1815 try to refuse some request to some models
if (serverConfig.customModels && req.body) {
try {
const clonedBody = await req.text();
fetchOptions.body = clonedBody;
const jsonBody = JSON.parse(clonedBody) as { model?: string };
// not undefined and is false
if (
isModelNotavailableInServer(
serverConfig.customModels,
jsonBody?.model as string,
ServiceProvider.OpenRouter as string,
)
) {
return NextResponse.json(
{
error: true,
message: `you are not allowed to use ${jsonBody?.model} model`,
},
{
status: 403,
},
);
}
} catch (e) {
console.error(`[OpenRouter] filter`, e);
}
}
try {
const res = await fetch(fetchUrl, fetchOptions);
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
return new Response(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}

View File

@ -24,6 +24,7 @@ import { DeepSeekApi } from "./platforms/deepseek";
import { XAIApi } from "./platforms/xai";
import { ChatGLMApi } from "./platforms/glm";
import { SiliconflowApi } from "./platforms/siliconflow";
import { OpenRouterApi } from "./platforms/openrouter";
export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number];
@ -173,6 +174,9 @@ export class ClientApi {
case ModelProvider.SiliconFlow:
this.llm = new SiliconflowApi();
break;
case ModelProvider.OpenRouter:
this.llm = new OpenRouterApi();
break;
default:
this.llm = new ChatGPTApi();
}
@ -265,6 +269,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
const isSiliconFlow =
modelConfig.providerName === ServiceProvider.SiliconFlow;
const isOpenRouter =
modelConfig.providerName === ServiceProvider.OpenRouter;
const isEnabledAccessControl = accessStore.enabledAccessControl();
const apiKey = isGoogle
? accessStore.googleApiKey
@ -286,6 +292,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
? accessStore.chatglmApiKey
: isSiliconFlow
? accessStore.siliconflowApiKey
: isOpenRouter
? accessStore.openrouterApiKey
: isIflytek
? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
@ -304,6 +312,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
isXAI,
isChatGLM,
isSiliconFlow,
isOpenRouter,
apiKey,
isEnabledAccessControl,
};
@ -332,6 +341,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
isXAI,
isChatGLM,
isSiliconFlow,
isOpenRouter,
apiKey,
isEnabledAccessControl,
} = getConfig();
@ -382,6 +392,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
return new ClientApi(ModelProvider.ChatGLM);
case ServiceProvider.SiliconFlow:
return new ClientApi(ModelProvider.SiliconFlow);
case ServiceProvider.OpenRouter:
return new ClientApi(ModelProvider.OpenRouter);
default:
return new ClientApi(ModelProvider.GPT);
}

View File

@ -0,0 +1,287 @@
"use client";
// azure and openai, using same models. so using same LLMApi.
import {
ApiPath,
OPENROUTER_BASE_URL,
OpenRouter,
DEFAULT_MODELS,
} from "@/app/constant";
import {
useAccessStore,
useAppConfig,
useChatStore,
ChatMessageTool,
usePluginStore,
} from "@/app/store";
import { preProcessImageContent, streamWithThink } from "@/app/utils/chat";
import {
ChatOptions,
getHeaders,
LLMApi,
LLMModel,
SpeechOptions,
} from "../api";
import { getClientConfig } from "@/app/config/client";
import {
getMessageTextContent,
getMessageTextContentWithoutThinking,
isVisionModel,
getTimeoutMSByModel,
} from "@/app/utils";
import { RequestPayload } from "./openai";
import { fetch } from "@/app/utils/stream";
export interface OpenRouterListModelResponse {
object: string;
data: Array<{
id: string;
object: string;
root: string;
}>;
}
export class OpenRouterApi implements LLMApi {
private disableListModels = false;
path(path: string): string {
const accessStore = useAccessStore.getState();
let baseUrl = "";
if (accessStore.useCustomConfig) {
baseUrl = accessStore.openrouterUrl;
}
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = ApiPath.OpenRouter;
baseUrl = isApp ? OPENROUTER_BASE_URL : apiPath;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
}
if (
!baseUrl.startsWith("http") &&
!baseUrl.startsWith(ApiPath.OpenRouter)
) {
baseUrl = "https://" + baseUrl;
}
console.log("[Proxy Endpoint] ", baseUrl, path);
return [baseUrl, path].join("/");
}
extractMessage(res: any) {
return res.choices?.at(0)?.message?.content ?? "";
}
speech(options: SpeechOptions): Promise<ArrayBuffer> {
throw new Error("Method not implemented.");
}
async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
if (v.role === "assistant") {
const content = getMessageTextContentWithoutThinking(v);
messages.push({ role: v.role, content });
} else {
const content = visionModel
? await preProcessImageContent(v.content)
: getMessageTextContent(v);
messages.push({ role: v.role, content });
}
}
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
...{
model: options.config.model,
providerName: options.config.providerName,
},
};
const requestPayload: RequestPayload = {
messages,
stream: options.config.stream,
model: modelConfig.model,
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};
console.log("[Request] openai payload: ", requestPayload);
const shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
try {
const chatPath = this.path(OpenRouter.ChatPath);
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
};
// console.log(chatPayload);
// Use extended timeout for thinking models as they typically require more processing time
const requestTimeoutId = setTimeout(
() => controller.abort(),
getTimeoutMSByModel(options.config.model),
);
if (shouldStream) {
const [tools, funcs] = usePluginStore
.getState()
.getAsTools(
useChatStore.getState().currentSession().mask?.plugin || [],
);
return streamWithThink(
chatPath,
requestPayload,
getHeaders(),
tools as any,
funcs,
controller,
// parseSSE
(text: string, runTools: ChatMessageTool[]) => {
// console.log("parseSSE", text, runTools);
const json = JSON.parse(text);
const choices = json.choices as Array<{
delta: {
content: string | null;
tool_calls: ChatMessageTool[];
reasoning: string | null;
};
}>;
const tool_calls = choices[0]?.delta?.tool_calls;
if (tool_calls?.length > 0) {
const index = tool_calls[0]?.index;
const id = tool_calls[0]?.id;
const args = tool_calls[0]?.function?.arguments;
if (id) {
runTools.push({
id,
type: tool_calls[0]?.type,
function: {
name: tool_calls[0]?.function?.name as string,
arguments: args,
},
});
} else {
// @ts-ignore
runTools[index]["function"]["arguments"] += args;
}
}
const reasoning = choices[0]?.delta?.reasoning;
const content = choices[0]?.delta?.content;
// Skip if both content and reasoning_content are empty or null
if (
(!reasoning || reasoning.length === 0) &&
(!content || content.length === 0)
) {
return {
isThinking: false,
content: "",
};
}
if (reasoning && reasoning.length > 0) {
return {
isThinking: true,
content: reasoning,
};
} else if (content && content.length > 0) {
return {
isThinking: false,
content: content,
};
}
return {
isThinking: false,
content: "",
};
},
// processToolMessage, include tool_calls message and tool call results
(
requestPayload: RequestPayload,
toolCallMessage: any,
toolCallResult: any[],
) => {
// @ts-ignore
requestPayload?.messages?.splice(
// @ts-ignore
requestPayload?.messages?.length,
0,
toolCallMessage,
...toolCallResult,
);
},
options,
);
} else {
const res = await fetch(chatPath, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
const message = this.extractMessage(resJson);
options.onFinish(message, res);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
options.onError?.(e as Error);
}
}
async usage() {
return {
used: 0,
total: 0,
};
}
async models(): Promise<LLMModel[]> {
if (this.disableListModels) {
return DEFAULT_MODELS.slice();
}
const res = await fetch(this.path(OpenRouter.ListModelPath), {
method: "GET",
headers: {
...getHeaders(),
},
});
const resJson = (await res.json()) as OpenRouterListModelResponse;
const chatModels = resJson.data;
console.log("[Models]", chatModels);
if (!chatModels) {
return [];
}
let seq = 1000; //同 Constant.ts 中的排序保持一致
return chatModels.map((m) => ({
name: m.id,
available: true,
sorted: seq++,
provider: {
id: "openrouter",
providerName: "OpenRouter",
providerType: "openrouter",
sorted: 15,
},
}));
}
}

View File

@ -57,18 +57,32 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
modelName.startsWith("dall-e") ||
modelName.startsWith("dalle") ||
modelName.startsWith("o1") ||
modelName.startsWith("o3")
modelName.startsWith("o3") ||
modelName.startsWith("openai/")
) {
LlmIcon = BotIconOpenAI;
} else if (modelName.startsWith("gemini")) {
} else if (
modelName.startsWith("gemini") ||
modelName.startsWith("google/gemini")
) {
LlmIcon = BotIconGemini;
} else if (modelName.startsWith("gemma")) {
} else if (
modelName.startsWith("gemma") ||
modelName.startsWith("google/gemma")
) {
LlmIcon = BotIconGemma;
} else if (modelName.startsWith("claude")) {
} else if (
modelName.startsWith("claude") ||
modelName.startsWith("anthropic/claude")
) {
LlmIcon = BotIconClaude;
} else if (modelName.includes("llama")) {
LlmIcon = BotIconMeta;
} else if (modelName.startsWith("mixtral") || modelName.startsWith("codestral")) {
} else if (
modelName.startsWith("mixtral") ||
modelName.startsWith("codestral") ||
modelName.startsWith("mistralai/")
) {
LlmIcon = BotIconMistral;
} else if (modelName.includes("deepseek")) {
LlmIcon = BotIconDeepseek;
@ -78,7 +92,10 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
LlmIcon = BotIconQwen;
} else if (modelName.startsWith("ernie")) {
LlmIcon = BotIconWenxin;
} else if (modelName.startsWith("grok")) {
} else if (
modelName.startsWith("grok") ||
modelName.startsWith("x-ai/grok")
) {
LlmIcon = BotIconGrok;
} else if (modelName.startsWith("hunyuan")) {
LlmIcon = BotIconHunyuan;

View File

@ -75,6 +75,7 @@ import {
ChatGLM,
DeepSeek,
SiliconFlow,
OpenRouter,
} from "../constant";
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
import { ErrorBoundary } from "./error";
@ -1359,6 +1360,46 @@ export function Settings() {
</ListItem>
</>
);
const openrouterConfigComponent = accessStore.provider ===
ServiceProvider.OpenRouter && (
<>
<ListItem
title={Locale.Settings.Access.OpenRouter.Endpoint.Title}
subTitle={
Locale.Settings.Access.OpenRouter.Endpoint.SubTitle +
OpenRouter.ExampleEndpoint
}
>
<input
aria-label={Locale.Settings.Access.OpenRouter.Endpoint.Title}
type="text"
value={accessStore.openrouterUrl}
placeholder={OpenRouter.ExampleEndpoint}
onChange={(e) =>
accessStore.update(
(access) => (access.openrouterUrl = e.currentTarget.value),
)
}
></input>
</ListItem>
<ListItem
title={Locale.Settings.Access.OpenRouter.ApiKey.Title}
subTitle={Locale.Settings.Access.OpenRouter.ApiKey.SubTitle}
>
<PasswordInput
aria-label={Locale.Settings.Access.OpenRouter.ApiKey.Title}
value={accessStore.openrouterApiKey}
type="text"
placeholder={Locale.Settings.Access.OpenRouter.ApiKey.Placeholder}
onChange={(e) => {
accessStore.update(
(access) => (access.openrouterApiKey = e.currentTarget.value),
);
}}
/>
</ListItem>
</>
);
const stabilityConfigComponent = accessStore.provider ===
ServiceProvider.Stability && (
@ -1822,6 +1863,7 @@ export function Settings() {
{XAIConfigComponent}
{chatglmConfigComponent}
{siliconflowConfigComponent}
{openrouterConfigComponent}
</>
)}
</>

View File

@ -88,6 +88,10 @@ declare global {
SILICONFLOW_URL?: string;
SILICONFLOW_API_KEY?: string;
// openrouter only
OPENROUTER_URL?: string;
OPENROUTER_API_KEY?: string;
// custom template for preprocessing user input
DEFAULT_INPUT_TEMPLATE?: string;
@ -163,6 +167,7 @@ export const getServerSideConfig = () => {
const isXAI = !!process.env.XAI_API_KEY;
const isChatGLM = !!process.env.CHATGLM_API_KEY;
const isSiliconFlow = !!process.env.SILICONFLOW_API_KEY;
const isOpenRouter = !!process.env.OPENROUTER_API_KEY;
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
@ -246,6 +251,10 @@ export const getServerSideConfig = () => {
siliconFlowUrl: process.env.SILICONFLOW_URL,
siliconFlowApiKey: getApiKey(process.env.SILICONFLOW_API_KEY),
isOpenRouter,
openrouterUrl: process.env.OPENROUTER_URL,
openrouterApiKey: getApiKey(process.env.OPENROUTER_API_KEY),
gtmId: process.env.GTM_ID,
gaId: process.env.GA_ID || DEFAULT_GA_ID,

View File

@ -36,6 +36,8 @@ export const CHATGLM_BASE_URL = "https://open.bigmodel.cn";
export const SILICONFLOW_BASE_URL = "https://api.siliconflow.cn";
export const OPENROUTER_BASE_URL = "https://openrouter.ai/api";
export const CACHE_URL_PREFIX = "/api/cache";
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
@ -72,6 +74,7 @@ export enum ApiPath {
ChatGLM = "/api/chatglm",
DeepSeek = "/api/deepseek",
SiliconFlow = "/api/siliconflow",
OpenRouter = "/api/openrouter",
}
export enum SlotID {
@ -130,6 +133,7 @@ export enum ServiceProvider {
ChatGLM = "ChatGLM",
DeepSeek = "DeepSeek",
SiliconFlow = "SiliconFlow",
OpenRouter = "OpenRouter",
}
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
@ -156,6 +160,7 @@ export enum ModelProvider {
ChatGLM = "ChatGLM",
DeepSeek = "DeepSeek",
SiliconFlow = "SiliconFlow",
OpenRouter = "OpenRouter",
}
export const Stability = {
@ -266,6 +271,12 @@ export const SiliconFlow = {
ListModelPath: "v1/models?&sub_type=chat",
};
export const OpenRouter = {
ExampleEndpoint: OPENROUTER_BASE_URL,
ChatPath: "v1/chat/completions",
ListModelPath: "v1/models",
};
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
// export const DEFAULT_SYSTEM_TEMPLATE = `
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
@ -671,6 +682,12 @@ const siliconflowModels = [
"Pro/deepseek-ai/DeepSeek-V3",
];
// Use this to generate a full model list -> https://gist.github.com/hyc1230/d4b271d161ffcda485f1fa1a27e08096
const openrouterModels = [
// Requires user to customize models
"openrouter/auto",
];
let seq = 1000; // 内置的模型序号生成器从1000开始
export const DEFAULT_MODELS = [
...openaiModels.map((name) => ({
@ -827,6 +844,17 @@ export const DEFAULT_MODELS = [
sorted: 14,
},
})),
...openrouterModels.map((name) => ({
name,
available: true,
sorted: seq++,
provider: {
id: "openrouter",
providerName: "OpenRouter",
providerType: "openrouter",
sorted: 15,
},
})),
] as const;
export const CHAT_PAGE_SIZE = 15;

View File

@ -507,6 +507,17 @@ const cn = {
SubTitle: "样例:",
},
},
OpenRouter: {
ApiKey: {
Title: "接口密钥",
SubTitle: "使用自定义 OpenRouter API Key",
Placeholder: "OpenRouter API Key",
},
Endpoint: {
Title: "接口地址",
SubTitle: "样例:",
},
},
Stability: {
ApiKey: {
Title: "接口密钥",

View File

@ -467,6 +467,17 @@ const da: PartialLocaleType = {
SubTitle: "F.eks.: ",
},
},
OpenRouter: {
ApiKey: {
Title: "OpenRouter-nøgle",
SubTitle: "Din egen OpenRouter-nøgle",
Placeholder: "OpenRouter API Key",
},
Endpoint: {
Title: "Adresse",
SubTitle: "F.eks.: ",
},
},
Stability: {
ApiKey: {
Title: "Stability-nøgle",

View File

@ -491,6 +491,17 @@ const en: LocaleType = {
SubTitle: "Example: ",
},
},
OpenRouter: {
ApiKey: {
Title: "OpenRouter API Key",
SubTitle: "Use a custom OpenRouter API Key",
Placeholder: "OpenRouter API Key",
},
Endpoint: {
Title: "Endpoint Address",
SubTitle: "Example: ",
},
},
Stability: {
ApiKey: {
Title: "Stability API Key",

View File

@ -17,6 +17,7 @@ import {
XAI_BASE_URL,
CHATGLM_BASE_URL,
SILICONFLOW_BASE_URL,
OPENROUTER_BASE_URL,
} from "../constant";
import { getHeaders } from "../client/api";
import { getClientConfig } from "../config/client";
@ -59,6 +60,8 @@ const DEFAULT_SILICONFLOW_URL = isApp
? SILICONFLOW_BASE_URL
: ApiPath.SiliconFlow;
const DEFAULT_OPENROUTER_URL = isApp ? OPENROUTER_BASE_URL : ApiPath.OpenRouter;
const DEFAULT_ACCESS_STATE = {
accessCode: "",
useCustomConfig: false,
@ -132,6 +135,10 @@ const DEFAULT_ACCESS_STATE = {
siliconflowUrl: DEFAULT_SILICONFLOW_URL,
siliconflowApiKey: "",
// openrouter
openrouterUrl: DEFAULT_OPENROUTER_URL,
openrouterApiKey: "",
// server config
needCode: true,
hideUserApiKey: false,
@ -219,6 +226,10 @@ export const useAccessStore = createPersistStore(
return ensure(get(), ["siliconflowApiKey"]);
},
isValidOpenRouter() {
return ensure(get(), ["openrouterApiKey"]);
},
isAuthorized() {
this.fetch();
@ -238,6 +249,7 @@ export const useAccessStore = createPersistStore(
this.isValidXAI() ||
this.isValidChatGLM() ||
this.isValidSiliconFlow() ||
this.isValidOpenRouter() ||
!this.enabledAccessControl() ||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
);