mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-22 08:12:00 +02:00
31 lines
1.3 KiB
JavaScript
31 lines
1.3 KiB
JavaScript
require('dotenv').config();
|
|
const { ChatOpenAI } = require('@langchain/openai');
|
|
const { getBufferString, ConversationSummaryBufferMemory } = require('langchain/memory');
|
|
|
|
const chatPromptMemory = new ConversationSummaryBufferMemory({
|
|
llm: new ChatOpenAI({ modelName: 'gpt-4o-mini', temperature: 0 }),
|
|
maxTokenLimit: 10,
|
|
returnMessages: true,
|
|
});
|
|
|
|
(async () => {
|
|
await chatPromptMemory.saveContext({ input: "hi my name's Danny" }, { output: 'whats up' });
|
|
await chatPromptMemory.saveContext({ input: 'not much you' }, { output: 'not much' });
|
|
await chatPromptMemory.saveContext(
|
|
{ input: 'are you excited for the olympics?' },
|
|
{ output: 'not really' },
|
|
);
|
|
|
|
// We can also utilize the predict_new_summary method directly.
|
|
const messages = await chatPromptMemory.chatHistory.getMessages();
|
|
console.log('MESSAGES\n\n');
|
|
console.log(JSON.stringify(messages));
|
|
const previous_summary = '';
|
|
const predictSummary = await chatPromptMemory.predictNewSummary(messages, previous_summary);
|
|
console.log('SUMMARY\n\n');
|
|
console.log(JSON.stringify(getBufferString([{ role: 'system', content: predictSummary }])));
|
|
|
|
// const { history } = await chatPromptMemory.loadMemoryVariables({});
|
|
// console.log('HISTORY\n\n');
|
|
// console.log(JSON.stringify(history));
|
|
})();
|