mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 08:50:15 +01:00
* chore: bump langchain deps to address vulnerability warnings * chore: bump community package and install textsplitters package * fix: update expected result in tokenSplit tests for accuracy * chore: remove CodeSherpa tools * chore: remove E2B tools and loadToolSuite * chore: remove CodeBrew tool and update related references * chore: remove HumanTool and ChatTool, update tool references * chore: remove Zapier tool from manifest.json and update SerpAPI * chore: remove basic tools * chore: update import path for RecursiveCharacterTextSplitter * chore: update import path for DynamicStructuredTool * chore: remove extractionChain.js and update tool filtering logic * chore: npm audit fix * chore: bump google packages * chore: update DALL-E tool to DALL-E-3 and adjust authentication logic * ci: update message classes * chore: elliptic npm audit fix * chore: update CallbackManager import and remove deprecated tool handling logic * chore: imports order * chore: remove unused code --------- Co-authored-by: Max Sanna <max@maxsanna.com>
31 lines
1.3 KiB
JavaScript
31 lines
1.3 KiB
JavaScript
require('dotenv').config();
|
|
const { ChatOpenAI } = require('@langchain/openai');
|
|
const { getBufferString, ConversationSummaryBufferMemory } = require('langchain/memory');
|
|
|
|
const chatPromptMemory = new ConversationSummaryBufferMemory({
|
|
llm: new ChatOpenAI({ modelName: 'gpt-3.5-turbo', temperature: 0 }),
|
|
maxTokenLimit: 10,
|
|
returnMessages: true,
|
|
});
|
|
|
|
(async () => {
|
|
await chatPromptMemory.saveContext({ input: 'hi my name\'s Danny' }, { output: 'whats up' });
|
|
await chatPromptMemory.saveContext({ input: 'not much you' }, { output: 'not much' });
|
|
await chatPromptMemory.saveContext(
|
|
{ input: 'are you excited for the olympics?' },
|
|
{ output: 'not really' },
|
|
);
|
|
|
|
// We can also utilize the predict_new_summary method directly.
|
|
const messages = await chatPromptMemory.chatHistory.getMessages();
|
|
console.log('MESSAGES\n\n');
|
|
console.log(JSON.stringify(messages));
|
|
const previous_summary = '';
|
|
const predictSummary = await chatPromptMemory.predictNewSummary(messages, previous_summary);
|
|
console.log('SUMMARY\n\n');
|
|
console.log(JSON.stringify(getBufferString([{ role: 'system', content: predictSummary }])));
|
|
|
|
// const { history } = await chatPromptMemory.loadMemoryVariables({});
|
|
// console.log('HISTORY\n\n');
|
|
// console.log(JSON.stringify(history));
|
|
})();
|