ChatGPT-Next-Web/app/utils/stream.ts
Mihail Klimin 8fa7c14f18 feat(tauri): Migrate from Tauri v1 to v2
# Summary
This commit completes the migration from Tauri v1 to v2, resolves configuration issues, upgrades Next.js, and adds test coverage for critical components to ensure stability during the transition.

# Details
## Tauri v2 Migration
- Updated Tauri dependencies to v2.3.0 series in package.json
- Restructured build configuration in `/app/config/build.ts` to align with Tauri v2 requirements
- Fixed imports and API usage patterns across the codebase
- Added compatibility layer for window.__TAURI__ references to maintain backward compatibility

## Next.js Issues
- Upgraded Next.js from 14.1.1 to 14.2.24
- Resolved caching problems with Server Actions
- Updated eslint-config-next to match the new version
- Cleared Next.js cache and temporary files to address build issues

## Testing & Stability
- Added comprehensive tests for `stream.ts` to verify streaming functionality
- Created mocks for Tauri API to support test environment
- Verified that critical functionality continues to work correctly
- Translated all comments to English for consistency

## Infrastructure
- Fixed peer dependency warnings during installation
- Ensured proper integration with Tauri v2 plugins (clipboard-manager, dialog, fs, http, notification, shell, updater, window-state)

# Approach
Prioritized stability by:
1. Making minimal necessary changes to configuration files
2. Preserving most `window.__TAURI__` calls as they still function in v2
3. Planning gradual migration to new APIs with test coverage for critical components
4. Documenting areas that will require future attention

# Testing
- Created unit tests for critical streaming functionality
- Performed manual testing of key application features
- Verified successful build and launch with Tauri v2

# Future Work
- Future PRs will gradually replace deprecated Tauri v1 API calls with v2 equivalents
- Additional test coverage will be added for other critical components
2025-03-16 02:14:47 +03:00

110 lines
3.0 KiB
TypeScript

// using tauri command to send request
// see src-tauri/src/stream.rs, and src-tauri/src/main.rs
// 1. invoke('stream_fetch', {url, method, headers, body}), get response with headers.
// 2. listen event: `stream-response` multi times to get body
import { invoke } from "@tauri-apps/api/core";
import { listen } from "@tauri-apps/api/event";
type ResponseEvent = {
id: number;
payload: {
request_id: number;
status?: number;
chunk?: number[];
};
};
type StreamResponse = {
request_id: number;
status: number;
status_text: string;
headers: Record<string, string>;
};
export function fetch(url: string, options?: RequestInit): Promise<Response> {
if (window.__TAURI__) {
const {
signal,
method = "GET",
headers: _headers = {},
body = [],
} = options || {};
let unlisten: Function | undefined;
let setRequestId: Function | undefined;
const requestIdPromise = new Promise((resolve) => (setRequestId = resolve));
const ts = new TransformStream();
const writer = ts.writable.getWriter();
let closed = false;
const close = () => {
if (closed) return;
closed = true;
unlisten && unlisten();
writer.ready.then(() => {
writer.close().catch((e) => console.error(e));
});
};
if (signal) {
signal.addEventListener("abort", () => close());
}
// Listen for stream response events
listen("stream-response", (e: ResponseEvent) =>
requestIdPromise.then((request_id) => {
const { request_id: rid, chunk, status } = e?.payload || {};
if (request_id != rid) {
return;
}
if (chunk) {
writer.ready.then(() => {
writer.write(new Uint8Array(chunk));
});
} else if (status === 0) {
// end of body
close();
}
}),
).then((u: Function) => (unlisten = u));
const headers: Record<string, string> = {
Accept: "application/json, text/plain, */*",
"Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
"User-Agent": navigator.userAgent,
};
for (const item of new Headers(_headers || {})) {
headers[item[0]] = item[1];
}
return invoke<StreamResponse>("stream_fetch", {
method: method.toUpperCase(),
url,
headers,
// TODO FormData
body:
typeof body === "string"
? Array.from(new TextEncoder().encode(body))
: [],
})
.then((res: StreamResponse) => {
const { request_id, status, status_text: statusText, headers } = res;
setRequestId?.(request_id);
const response = new Response(ts.readable, {
status,
statusText,
headers,
});
if (status >= 300) {
setTimeout(close, 100);
}
return response;
})
.catch((e) => {
console.error("stream error", e);
// throw e;
return new Response("", { status: 599 });
});
}
return window.fetch(url, options);
}