mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-19 12:10:17 +09:00
Merge pull request #6193 from siliconflow/get-models-siliconflow
Model listing of SiliconFlow
This commit is contained in:
commit
77c78b230a
@ -5,6 +5,7 @@ import {
|
|||||||
SILICONFLOW_BASE_URL,
|
SILICONFLOW_BASE_URL,
|
||||||
SiliconFlow,
|
SiliconFlow,
|
||||||
REQUEST_TIMEOUT_MS_FOR_THINKING,
|
REQUEST_TIMEOUT_MS_FOR_THINKING,
|
||||||
|
DEFAULT_MODELS,
|
||||||
} from "@/app/constant";
|
} from "@/app/constant";
|
||||||
import {
|
import {
|
||||||
useAccessStore,
|
useAccessStore,
|
||||||
@ -28,10 +29,19 @@ import {
|
|||||||
isVisionModel,
|
isVisionModel,
|
||||||
} from "@/app/utils";
|
} from "@/app/utils";
|
||||||
import { RequestPayload } from "./openai";
|
import { RequestPayload } from "./openai";
|
||||||
|
|
||||||
import { fetch } from "@/app/utils/stream";
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
export interface SiliconFlowListModelResponse {
|
||||||
|
object: string;
|
||||||
|
data: Array<{
|
||||||
|
id: string;
|
||||||
|
object: string;
|
||||||
|
root: string;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|
||||||
export class SiliconflowApi implements LLMApi {
|
export class SiliconflowApi implements LLMApi {
|
||||||
private disableListModels = true;
|
private disableListModels = false;
|
||||||
|
|
||||||
path(path: string): string {
|
path(path: string): string {
|
||||||
const accessStore = useAccessStore.getState();
|
const accessStore = useAccessStore.getState();
|
||||||
@ -242,6 +252,36 @@ export class SiliconflowApi implements LLMApi {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async models(): Promise<LLMModel[]> {
|
async models(): Promise<LLMModel[]> {
|
||||||
|
if (this.disableListModels) {
|
||||||
|
return DEFAULT_MODELS.slice();
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await fetch(this.path(SiliconFlow.ListModelPath), {
|
||||||
|
method: "GET",
|
||||||
|
headers: {
|
||||||
|
...getHeaders(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const resJson = (await res.json()) as SiliconFlowListModelResponse;
|
||||||
|
const chatModels = resJson.data;
|
||||||
|
console.log("[Models]", chatModels);
|
||||||
|
|
||||||
|
if (!chatModels) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let seq = 1000; //同 Constant.ts 中的排序保持一致
|
||||||
|
return chatModels.map((m) => ({
|
||||||
|
name: m.id,
|
||||||
|
available: true,
|
||||||
|
sorted: seq++,
|
||||||
|
provider: {
|
||||||
|
id: "siliconflow",
|
||||||
|
providerName: "SiliconFlow",
|
||||||
|
providerType: "siliconflow",
|
||||||
|
sorted: 14,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -258,6 +258,7 @@ export const ChatGLM = {
|
|||||||
export const SiliconFlow = {
|
export const SiliconFlow = {
|
||||||
ExampleEndpoint: SILICONFLOW_BASE_URL,
|
ExampleEndpoint: SILICONFLOW_BASE_URL,
|
||||||
ChatPath: "v1/chat/completions",
|
ChatPath: "v1/chat/completions",
|
||||||
|
ListModelPath: "v1/models?&sub_type=chat",
|
||||||
};
|
};
|
||||||
|
|
||||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||||
|
Loading…
Reference in New Issue
Block a user