feat: compatible

This commit is contained in:
Hk-Gosuto 2024-03-18 18:05:47 +08:00
parent bab838b9c6
commit 428bf81801
3 changed files with 28 additions and 42 deletions

View File

@ -147,7 +147,7 @@ export class ChatGPTApi implements LLMApi {
method: "POST", method: "POST",
body: formData, body: formData,
signal: controller.signal, signal: controller.signal,
headers: getHeaders(), headers: getHeaders(true),
}; };
// make a fetch request // make a fetch request

View File

@ -899,7 +899,7 @@ function _Chat() {
}); });
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
setSpeechApi( setSpeechApi(
new WebTranscriptionApi((transcription) => new OpenAITranscriptionApi((transcription) =>
onRecognitionEnd(transcription), onRecognitionEnd(transcription),
), ),
); );

View File

@ -13,35 +13,12 @@ export abstract class SpeechApi {
onTranscriptionReceived(callback: TranscriptionCallback) { onTranscriptionReceived(callback: TranscriptionCallback) {
this.onTranscription = callback; this.onTranscription = callback;
} }
protected async getMediaStream(): Promise<MediaStream | null> {
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
return await navigator.mediaDevices.getUserMedia({ audio: true });
} else if (navigator.getUserMedia) {
return new Promise((resolve, reject) => {
navigator.getUserMedia({ audio: true }, resolve, reject);
});
} else {
console.warn("当前浏览器不支持 getUserMedia");
return null;
}
}
protected createRecorder(stream: MediaStream): MediaRecorder | null {
if (MediaRecorder.isTypeSupported("audio/webm")) {
return new MediaRecorder(stream, { mimeType: "audio/webm" });
} else if (MediaRecorder.isTypeSupported("audio/ogg")) {
return new MediaRecorder(stream, { mimeType: "audio/ogg" });
} else {
console.warn("当前浏览器不支持 MediaRecorder");
return null;
}
}
} }
export class OpenAITranscriptionApi extends SpeechApi { export class OpenAITranscriptionApi extends SpeechApi {
private listeningStatus = false; private listeningStatus = false;
private recorder: MediaRecorder | null = null; private mediaRecorder: MediaRecorder | null = null;
private stream: MediaStream | null = null;
private audioChunks: Blob[] = []; private audioChunks: Blob[] = [];
isListening = () => this.listeningStatus; isListening = () => this.listeningStatus;
@ -54,35 +31,44 @@ export class OpenAITranscriptionApi extends SpeechApi {
} }
async start(): Promise<void> { async start(): Promise<void> {
const stream = await this.getMediaStream(); // @ts-ignore
if (!stream) { navigator.getUserMedia =
console.error("无法获取音频流"); navigator.getUserMedia ||
return; navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia;
if (navigator.mediaDevices) {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this.mediaRecorder = new MediaRecorder(stream);
this.mediaRecorder.ondataavailable = (e) => {
if (e.data && e.data.size > 0) {
this.audioChunks.push(e.data);
} }
};
this.recorder = this.createRecorder(stream); this.stream = stream;
if (!this.recorder) { } else {
console.error("无法创建 MediaRecorder"); console.warn("Media Decives will work only with SSL");
return; return;
} }
this.audioChunks = []; this.audioChunks = [];
this.recorder.addEventListener("dataavailable", (event) => { // this.recorder.addEventListener("dataavailable", (event) => {
this.audioChunks.push(event.data); // this.audioChunks.push(event.data);
}); // });
this.recorder.start(); this.mediaRecorder.start();
this.listeningStatus = true; this.listeningStatus = true;
} }
async stop(): Promise<void> { async stop(): Promise<void> {
if (!this.recorder || !this.listeningStatus) { if (!this.mediaRecorder || !this.listeningStatus) {
return; return;
} }
return new Promise((resolve) => { return new Promise((resolve) => {
this.recorder!.addEventListener("stop", async () => { this.mediaRecorder!.addEventListener("stop", async () => {
const audioBlob = new Blob(this.audioChunks, { type: "audio/wav" }); const audioBlob = new Blob(this.audioChunks, { type: "audio/wav" });
const llm = new ChatGPTApi(); const llm = new ChatGPTApi();
const transcription = await llm.transcription({ file: audioBlob }); const transcription = await llm.transcription({ file: audioBlob });
@ -91,7 +77,7 @@ export class OpenAITranscriptionApi extends SpeechApi {
resolve(); resolve();
}); });
this.recorder!.stop(); this.mediaRecorder!.stop();
}); });
} }
} }