mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-16 08:20:14 +01:00
* 🧹 chore: Update logger imports to use @librechat/data-schemas across multiple files and remove unused sleep function from queue.js (#9930) * chore: Replace local isEnabled utility with @librechat/api import across multiple files, update test files * chore: Replace local logger import with @librechat/data-schemas logger in countTokens.js and fork.js * chore: Update logs volume path in docker-compose.yml to correct directory * chore: import order of isEnabled in static.js
66 lines
1.7 KiB
JavaScript
66 lines
1.7 KiB
JavaScript
const { logger } = require('@librechat/data-schemas');
|
|
const { ConversationSummaryBufferMemory, ChatMessageHistory } = require('langchain/memory');
|
|
const { formatLangChainMessages, SUMMARY_PROMPT } = require('../prompts');
|
|
const { predictNewSummary } = require('../chains');
|
|
|
|
const createSummaryBufferMemory = ({ llm, prompt, messages, ...rest }) => {
|
|
const chatHistory = new ChatMessageHistory(messages);
|
|
return new ConversationSummaryBufferMemory({
|
|
llm,
|
|
prompt,
|
|
chatHistory,
|
|
returnMessages: true,
|
|
...rest,
|
|
});
|
|
};
|
|
|
|
const summaryBuffer = async ({
|
|
llm,
|
|
debug,
|
|
context, // array of messages
|
|
formatOptions = {},
|
|
previous_summary = '',
|
|
prompt = SUMMARY_PROMPT,
|
|
signal,
|
|
}) => {
|
|
if (previous_summary) {
|
|
logger.debug('[summaryBuffer]', { previous_summary });
|
|
}
|
|
|
|
const formattedMessages = formatLangChainMessages(context, formatOptions);
|
|
const memoryOptions = {
|
|
llm,
|
|
prompt,
|
|
messages: formattedMessages,
|
|
};
|
|
|
|
if (formatOptions.userName) {
|
|
memoryOptions.humanPrefix = formatOptions.userName;
|
|
}
|
|
if (formatOptions.userName) {
|
|
memoryOptions.aiPrefix = formatOptions.assistantName;
|
|
}
|
|
|
|
const chatPromptMemory = createSummaryBufferMemory(memoryOptions);
|
|
|
|
const messages = await chatPromptMemory.chatHistory.getMessages();
|
|
|
|
if (debug) {
|
|
logger.debug('[summaryBuffer]', { summary_buffer_messages: messages.length });
|
|
}
|
|
|
|
const predictSummary = await predictNewSummary({
|
|
messages,
|
|
previous_summary,
|
|
memory: chatPromptMemory,
|
|
signal,
|
|
});
|
|
|
|
if (debug) {
|
|
logger.debug('[summaryBuffer]', { summary: predictSummary });
|
|
}
|
|
|
|
return { role: 'system', content: predictSummary };
|
|
};
|
|
|
|
module.exports = { createSummaryBufferMemory, summaryBuffer };
|