mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-22 21:50:16 +09:00
fix: remove the condition that uses max_token to reduce the context
This commit is contained in:
parent
cf1c8e8f2a
commit
3939ff47ef
@ -494,13 +494,13 @@ export const useChatStore = createPersistStore(
|
||||
: shortTermMemoryStartIndex;
|
||||
// and if user has cleared history messages, we should exclude the memory too.
|
||||
const contextStartIndex = Math.max(clearContextIndex, memoryStartIndex);
|
||||
const maxTokenThreshold = modelConfig.max_tokens;
|
||||
// const maxTokenThreshold = modelConfig.max_tokens;
|
||||
|
||||
// get recent messages as much as possible
|
||||
const reversedRecentMessages = [];
|
||||
for (
|
||||
let i = totalMessageCount - 1, tokenCount = 0;
|
||||
i >= contextStartIndex && tokenCount < maxTokenThreshold;
|
||||
i >= contextStartIndex ;//&& tokenCount < maxTokenThreshold;
|
||||
i -= 1
|
||||
) {
|
||||
const msg = messages[i];
|
||||
|
Loading…
Reference in New Issue
Block a user