mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-05-24 14:40:22 +09:00
fix: remove the condition that uses max_token to reduce the context
This commit is contained in:
parent
cf1c8e8f2a
commit
3939ff47ef
@ -494,13 +494,13 @@ export const useChatStore = createPersistStore(
|
|||||||
: shortTermMemoryStartIndex;
|
: shortTermMemoryStartIndex;
|
||||||
// and if user has cleared history messages, we should exclude the memory too.
|
// and if user has cleared history messages, we should exclude the memory too.
|
||||||
const contextStartIndex = Math.max(clearContextIndex, memoryStartIndex);
|
const contextStartIndex = Math.max(clearContextIndex, memoryStartIndex);
|
||||||
const maxTokenThreshold = modelConfig.max_tokens;
|
// const maxTokenThreshold = modelConfig.max_tokens;
|
||||||
|
|
||||||
// get recent messages as much as possible
|
// get recent messages as much as possible
|
||||||
const reversedRecentMessages = [];
|
const reversedRecentMessages = [];
|
||||||
for (
|
for (
|
||||||
let i = totalMessageCount - 1, tokenCount = 0;
|
let i = totalMessageCount - 1, tokenCount = 0;
|
||||||
i >= contextStartIndex && tokenCount < maxTokenThreshold;
|
i >= contextStartIndex ;//&& tokenCount < maxTokenThreshold;
|
||||||
i -= 1
|
i -= 1
|
||||||
) {
|
) {
|
||||||
const msg = messages[i];
|
const msg = messages[i];
|
||||||
|
Loading…
Reference in New Issue
Block a user