mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-06-07 05:10:19 +09:00
chore: code
This commit is contained in:
parent
ee08ab4070
commit
f572df051d
@ -141,7 +141,7 @@ export async function requestGoogleGemini(req: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
console.log("[Proxy] ", path);
|
console.log("[Proxy] ", path);
|
||||||
console.log("[Base Url]", baseUrl);
|
console.log("[Google Base Url]", baseUrl);
|
||||||
// this fix [Org ID] undefined in server side if not using custom point
|
// this fix [Org ID] undefined in server side if not using custom point
|
||||||
if (serverConfig.openaiOrgId !== undefined) {
|
if (serverConfig.openaiOrgId !== undefined) {
|
||||||
console.log("[Org ID]", serverConfig.openaiOrgId);
|
console.log("[Org ID]", serverConfig.openaiOrgId);
|
||||||
|
@ -24,8 +24,6 @@ import {
|
|||||||
fetchEventSource,
|
fetchEventSource,
|
||||||
} from "@fortaine/fetch-event-source";
|
} from "@fortaine/fetch-event-source";
|
||||||
import { prettyObject } from "@/app/utils/format";
|
import { prettyObject } from "@/app/utils/format";
|
||||||
import { getClientConfig } from "@/app/config/client";
|
|
||||||
import { makeAzurePath } from "@/app/azure";
|
|
||||||
|
|
||||||
export interface OpenAIListModelResponse {
|
export interface OpenAIListModelResponse {
|
||||||
object: string;
|
object: string;
|
||||||
@ -41,16 +39,6 @@ export class GeminiApi implements LLMApi {
|
|||||||
|
|
||||||
path(path: string): string {
|
path(path: string): string {
|
||||||
const accessStore = useAccessStore.getState();
|
const accessStore = useAccessStore.getState();
|
||||||
|
|
||||||
// const isAzure = accessStore.provider === ServiceProvider.Azure;
|
|
||||||
|
|
||||||
// if (isAzure && !accessStore.isValidAzure()) {
|
|
||||||
// throw Error(
|
|
||||||
// "incomplete azure config, please check it in your settings page",
|
|
||||||
// );
|
|
||||||
// }
|
|
||||||
|
|
||||||
// let baseUrl = isAzure ? accessStore.azureUrl : accessStore.openaiUrl;
|
|
||||||
let baseUrl = ApiPath.GoogleAI;
|
let baseUrl = ApiPath.GoogleAI;
|
||||||
// if (baseUrl.length === 0) {
|
// if (baseUrl.length === 0) {
|
||||||
// const isApp = !!getClientConfig()?.isApp;
|
// const isApp = !!getClientConfig()?.isApp;
|
||||||
@ -176,7 +164,7 @@ export class GeminiApi implements LLMApi {
|
|||||||
clearTimeout(requestTimeoutId);
|
clearTimeout(requestTimeoutId);
|
||||||
const contentType = res.headers.get("content-type");
|
const contentType = res.headers.get("content-type");
|
||||||
console.log(
|
console.log(
|
||||||
"[OpenAI] request response content type: ",
|
"[Google] request response content type: ",
|
||||||
contentType,
|
contentType,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user