mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-09-21 21:50:49 +02:00

* 🧠 feat: User Memories for Conversational Context
chore: mcp typing, use `t`
WIP: first pass, Memories UI
- Added MemoryViewer component for displaying, editing, and deleting user memories.
- Integrated data provider hooks for fetching, updating, and deleting memories.
- Implemented pagination and loading states for better user experience.
- Created unit tests for MemoryViewer to ensure functionality and interaction with data provider.
- Updated translation files to include new UI strings related to memories.
chore: move mcp-related files to own directory
chore: rename librechat-mcp to librechat-api
WIP: first pass, memory processing and data schemas
chore: linting in fileSearch.js query description
chore: rename librechat-api to @librechat/api across the project
WIP: first pass, functional memory agent
feat: add MemoryEditDialog and MemoryViewer components for managing user memories
- Introduced MemoryEditDialog for editing memory entries with validation and toast notifications.
- Updated MemoryViewer to support editing and deleting memories, including pagination and loading states.
- Enhanced data provider to handle memory updates with optional original key for better management.
- Added new localization strings for memory-related UI elements.
feat: add memory permissions management
- Implemented memory permissions in the backend, allowing roles to have specific permissions for using, creating, updating, and reading memories.
- Added new API endpoints for updating memory permissions associated with roles.
- Created a new AdminSettings component for managing memory permissions in the frontend.
- Integrated memory permissions into the existing roles and permissions schemas.
- Updated the interface to include memory settings and permissions.
- Enhanced the MemoryViewer component to conditionally render admin settings based on user roles.
- Added localization support for memory permissions in the translation files.
feat: move AdminSettings component to a new position in MemoryViewer for better visibility
refactor: clean up commented code in MemoryViewer component
feat: enhance MemoryViewer with search functionality and improve MemoryEditDialog integration
- Added a search input to filter memories in the MemoryViewer component.
- Refactored MemoryEditDialog to accept children for better customization.
- Updated MemoryViewer to utilize the new EditMemoryButton and DeleteMemoryButton components for editing and deleting memories.
- Improved localization support by adding new strings for memory filtering and deletion confirmation.
refactor: optimize memory filtering in MemoryViewer using match-sorter
- Replaced manual filtering logic with match-sorter for improved search functionality.
- Enhanced performance and readability of the filteredMemories computation.
feat: enhance MemoryEditDialog with triggerRef and improve updateMemory mutation handling
feat: implement access control for MemoryEditDialog and MemoryViewer components
refactor: remove commented out code and create runMemory method
refactor: rename role based files
feat: implement access control for memory usage in AgentClient
refactor: simplify checkVisionRequest method in AgentClient by removing commented-out code
refactor: make `agents` dir in api package
refactor: migrate Azure utilities to TypeScript and consolidate imports
refactor: move sanitizeFilename function to a new file and update imports, add related tests
refactor: update LLM configuration types and consolidate Azure options in the API package
chore: linting
chore: import order
refactor: replace getLLMConfig with getOpenAIConfig and remove unused LLM configuration file
chore: update winston-daily-rotate-file to version 5.0.0 and add object-hash dependency in package-lock.json
refactor: move primeResources and optionalChainWithEmptyCheck functions to resources.ts and update imports
refactor: move createRun function to a new run.ts file and update related imports
fix: ensure safeAttachments is correctly typed as an array of TFile
chore: add node-fetch dependency and refactor fetch-related functions into packages/api/utils, removing the old generators file
refactor: enhance TEndpointOption type by using Pick to streamline endpoint fields and add new properties for model parameters and client options
feat: implement initializeOpenAIOptions function and update OpenAI types for enhanced configuration handling
fix: update types due to new TEndpointOption typing
fix: ensure safe access to group parameters in initializeOpenAIOptions function
fix: remove redundant API key validation comment in initializeOpenAIOptions function
refactor: rename initializeOpenAIOptions to initializeOpenAI for consistency and update related documentation
refactor: decouple req.body fields and tool loading from initializeAgentOptions
chore: linting
refactor: adjust column widths in MemoryViewer for improved layout
refactor: simplify agent initialization by creating loadAgent function and removing unused code
feat: add memory configuration loading and validation functions
WIP: first pass, memory processing with config
feat: implement memory callback and artifact handling
feat: implement memory artifacts display and processing updates
feat: add memory configuration options and schema validation for validKeys
fix: update MemoryEditDialog and MemoryViewer to handle memory state and display improvements
refactor: remove padding from BookmarkTable and MemoryViewer headers for consistent styling
WIP: initial tokenLimit config and move Tokenizer to @librechat/api
refactor: update mongoMeili plugin methods to use callback for better error handling
feat: enhance memory management with token tracking and usage metrics
- Added token counting for memory entries to enforce limits and provide usage statistics.
- Updated memory retrieval and update routes to include total token usage and limit.
- Enhanced MemoryEditDialog and MemoryViewer components to display memory usage and token information.
- Refactored memory processing functions to handle token limits and provide feedback on memory capacity.
feat: implement memory artifact handling in attachment handler
- Enhanced useAttachmentHandler to process memory artifacts when receiving updates.
- Introduced handleMemoryArtifact utility to manage memory updates and deletions.
- Updated query client to reflect changes in memory state based on incoming data.
refactor: restructure web search key extraction logic
- Moved the logic for extracting API keys from the webSearchAuth configuration into a dedicated function, getWebSearchKeys.
- Updated webSearchKeys to utilize the new function for improved clarity and maintainability.
- Prevents build time errors
feat: add personalization settings and memory preferences management
- Introduced a new Personalization tab in settings to manage user memory preferences.
- Implemented API endpoints and client-side logic for updating memory preferences.
- Enhanced user interface components to reflect personalization options and memory usage.
- Updated permissions to allow users to opt out of memory features.
- Added localization support for new settings and messages related to personalization.
style: personalization switch class
feat: add PersonalizationIcon and align Side Panel UI
feat: implement memory creation functionality
- Added a new API endpoint for creating memory entries, including validation for key and value.
- Introduced MemoryCreateDialog component for user interface to facilitate memory creation.
- Integrated token limit checks to prevent exceeding user memory capacity.
- Updated MemoryViewer to include a button for opening the memory creation dialog.
- Enhanced localization support for new messages related to memory creation.
feat: enhance message processing with configurable window size
- Updated AgentClient to use a configurable message window size for processing messages.
- Introduced messageWindowSize option in memory configuration schema with a default value of 5.
- Improved logic for selecting messages to process based on the configured window size.
chore: update librechat-data-provider version to 0.7.87 in package.json and package-lock.json
chore: remove OpenAPIPlugin and its associated tests
chore: remove MIGRATION_README.md as migration tasks are completed
ci: fix backend tests
chore: remove unused translation keys from localization file
chore: remove problematic test file and unused var in AgentClient
chore: remove unused import and import directly for JSDoc
* feat: add api package build stage in Dockerfile for improved modularity
* docs: reorder build steps in contributing guide for clarity
387 lines
11 KiB
JavaScript
387 lines
11 KiB
JavaScript
const { logger } = require('~/config');
|
|
|
|
// WeakMap to hold temporary data associated with requests
|
|
const requestDataMap = new WeakMap();
|
|
|
|
const FinalizationRegistry = global.FinalizationRegistry || null;
|
|
|
|
/**
|
|
* FinalizationRegistry to clean up client objects when they are garbage collected.
|
|
* This is used to prevent memory leaks and ensure that client objects are
|
|
* properly disposed of when they are no longer needed.
|
|
* The registry holds a weak reference to the client object and a cleanup
|
|
* callback that is called when the client object is garbage collected.
|
|
* The callback can be used to perform any necessary cleanup operations,
|
|
* such as removing event listeners or freeing up resources.
|
|
*/
|
|
const clientRegistry = FinalizationRegistry
|
|
? new FinalizationRegistry((heldValue) => {
|
|
try {
|
|
// This will run when the client is garbage collected
|
|
if (heldValue && heldValue.userId) {
|
|
logger.debug(`[FinalizationRegistry] Cleaning up client for user ${heldValue.userId}`);
|
|
} else {
|
|
logger.debug('[FinalizationRegistry] Cleaning up client');
|
|
}
|
|
} catch (e) {
|
|
// Ignore errors
|
|
}
|
|
})
|
|
: null;
|
|
|
|
/**
|
|
* Cleans up the client object by removing references to its properties.
|
|
* This is useful for preventing memory leaks and ensuring that the client
|
|
* and its properties can be garbage collected when it is no longer needed.
|
|
*/
|
|
function disposeClient(client) {
|
|
if (!client) {
|
|
return;
|
|
}
|
|
|
|
try {
|
|
if (client.user) {
|
|
client.user = null;
|
|
}
|
|
if (client.apiKey) {
|
|
client.apiKey = null;
|
|
}
|
|
if (client.azure) {
|
|
client.azure = null;
|
|
}
|
|
if (client.conversationId) {
|
|
client.conversationId = null;
|
|
}
|
|
if (client.responseMessageId) {
|
|
client.responseMessageId = null;
|
|
}
|
|
if (client.message_file_map) {
|
|
client.message_file_map = null;
|
|
}
|
|
if (client.clientName) {
|
|
client.clientName = null;
|
|
}
|
|
if (client.sender) {
|
|
client.sender = null;
|
|
}
|
|
if (client.model) {
|
|
client.model = null;
|
|
}
|
|
if (client.maxContextTokens) {
|
|
client.maxContextTokens = null;
|
|
}
|
|
if (client.contextStrategy) {
|
|
client.contextStrategy = null;
|
|
}
|
|
if (client.currentDateString) {
|
|
client.currentDateString = null;
|
|
}
|
|
if (client.inputTokensKey) {
|
|
client.inputTokensKey = null;
|
|
}
|
|
if (client.outputTokensKey) {
|
|
client.outputTokensKey = null;
|
|
}
|
|
if (client.skipSaveUserMessage !== undefined) {
|
|
client.skipSaveUserMessage = null;
|
|
}
|
|
if (client.visionMode) {
|
|
client.visionMode = null;
|
|
}
|
|
if (client.continued !== undefined) {
|
|
client.continued = null;
|
|
}
|
|
if (client.fetchedConvo !== undefined) {
|
|
client.fetchedConvo = null;
|
|
}
|
|
if (client.previous_summary) {
|
|
client.previous_summary = null;
|
|
}
|
|
if (client.metadata) {
|
|
client.metadata = null;
|
|
}
|
|
if (client.isVisionModel) {
|
|
client.isVisionModel = null;
|
|
}
|
|
if (client.isChatCompletion !== undefined) {
|
|
client.isChatCompletion = null;
|
|
}
|
|
if (client.contextHandlers) {
|
|
client.contextHandlers = null;
|
|
}
|
|
if (client.augmentedPrompt) {
|
|
client.augmentedPrompt = null;
|
|
}
|
|
if (client.systemMessage) {
|
|
client.systemMessage = null;
|
|
}
|
|
if (client.azureEndpoint) {
|
|
client.azureEndpoint = null;
|
|
}
|
|
if (client.langchainProxy) {
|
|
client.langchainProxy = null;
|
|
}
|
|
if (client.isOmni !== undefined) {
|
|
client.isOmni = null;
|
|
}
|
|
if (client.runManager) {
|
|
client.runManager = null;
|
|
}
|
|
// Properties specific to AnthropicClient
|
|
if (client.message_start) {
|
|
client.message_start = null;
|
|
}
|
|
if (client.message_delta) {
|
|
client.message_delta = null;
|
|
}
|
|
if (client.isClaudeLatest !== undefined) {
|
|
client.isClaudeLatest = null;
|
|
}
|
|
if (client.useMessages !== undefined) {
|
|
client.useMessages = null;
|
|
}
|
|
if (client.supportsCacheControl !== undefined) {
|
|
client.supportsCacheControl = null;
|
|
}
|
|
// Properties specific to GoogleClient
|
|
if (client.serviceKey) {
|
|
client.serviceKey = null;
|
|
}
|
|
if (client.project_id) {
|
|
client.project_id = null;
|
|
}
|
|
if (client.client_email) {
|
|
client.client_email = null;
|
|
}
|
|
if (client.private_key) {
|
|
client.private_key = null;
|
|
}
|
|
if (client.access_token) {
|
|
client.access_token = null;
|
|
}
|
|
if (client.reverseProxyUrl) {
|
|
client.reverseProxyUrl = null;
|
|
}
|
|
if (client.authHeader) {
|
|
client.authHeader = null;
|
|
}
|
|
if (client.isGenerativeModel !== undefined) {
|
|
client.isGenerativeModel = null;
|
|
}
|
|
// Properties specific to OpenAIClient
|
|
if (client.ChatGPTClient) {
|
|
client.ChatGPTClient = null;
|
|
}
|
|
if (client.completionsUrl) {
|
|
client.completionsUrl = null;
|
|
}
|
|
if (client.shouldSummarize !== undefined) {
|
|
client.shouldSummarize = null;
|
|
}
|
|
if (client.isOllama !== undefined) {
|
|
client.isOllama = null;
|
|
}
|
|
if (client.FORCE_PROMPT !== undefined) {
|
|
client.FORCE_PROMPT = null;
|
|
}
|
|
if (client.isChatGptModel !== undefined) {
|
|
client.isChatGptModel = null;
|
|
}
|
|
if (client.isUnofficialChatGptModel !== undefined) {
|
|
client.isUnofficialChatGptModel = null;
|
|
}
|
|
if (client.useOpenRouter !== undefined) {
|
|
client.useOpenRouter = null;
|
|
}
|
|
if (client.startToken) {
|
|
client.startToken = null;
|
|
}
|
|
if (client.endToken) {
|
|
client.endToken = null;
|
|
}
|
|
if (client.userLabel) {
|
|
client.userLabel = null;
|
|
}
|
|
if (client.chatGptLabel) {
|
|
client.chatGptLabel = null;
|
|
}
|
|
if (client.modelLabel) {
|
|
client.modelLabel = null;
|
|
}
|
|
if (client.modelOptions) {
|
|
client.modelOptions = null;
|
|
}
|
|
if (client.defaultVisionModel) {
|
|
client.defaultVisionModel = null;
|
|
}
|
|
if (client.maxPromptTokens) {
|
|
client.maxPromptTokens = null;
|
|
}
|
|
if (client.maxResponseTokens) {
|
|
client.maxResponseTokens = null;
|
|
}
|
|
if (client.processMemory) {
|
|
client.processMemory = null;
|
|
}
|
|
if (client.run) {
|
|
// Break circular references in run
|
|
if (client.run.Graph) {
|
|
client.run.Graph.resetValues();
|
|
client.run.Graph.handlerRegistry = null;
|
|
client.run.Graph.runId = null;
|
|
client.run.Graph.tools = null;
|
|
client.run.Graph.signal = null;
|
|
client.run.Graph.config = null;
|
|
client.run.Graph.toolEnd = null;
|
|
client.run.Graph.toolMap = null;
|
|
client.run.Graph.provider = null;
|
|
client.run.Graph.streamBuffer = null;
|
|
client.run.Graph.clientOptions = null;
|
|
client.run.Graph.graphState = null;
|
|
if (client.run.Graph.boundModel?.client) {
|
|
client.run.Graph.boundModel.client = null;
|
|
}
|
|
client.run.Graph.boundModel = null;
|
|
client.run.Graph.systemMessage = null;
|
|
client.run.Graph.reasoningKey = null;
|
|
client.run.Graph.messages = null;
|
|
client.run.Graph.contentData = null;
|
|
client.run.Graph.stepKeyIds = null;
|
|
client.run.Graph.contentIndexMap = null;
|
|
client.run.Graph.toolCallStepIds = null;
|
|
client.run.Graph.messageIdsByStepKey = null;
|
|
client.run.Graph.messageStepHasToolCalls = null;
|
|
client.run.Graph.prelimMessageIdsByStepKey = null;
|
|
client.run.Graph.currentTokenType = null;
|
|
client.run.Graph.lastToken = null;
|
|
client.run.Graph.tokenTypeSwitch = null;
|
|
client.run.Graph.indexTokenCountMap = null;
|
|
client.run.Graph.currentUsage = null;
|
|
client.run.Graph.tokenCounter = null;
|
|
client.run.Graph.maxContextTokens = null;
|
|
client.run.Graph.pruneMessages = null;
|
|
client.run.Graph.lastStreamCall = null;
|
|
client.run.Graph.startIndex = null;
|
|
client.run.Graph = null;
|
|
}
|
|
if (client.run.handlerRegistry) {
|
|
client.run.handlerRegistry = null;
|
|
}
|
|
if (client.run.graphRunnable) {
|
|
if (client.run.graphRunnable.channels) {
|
|
client.run.graphRunnable.channels = null;
|
|
}
|
|
if (client.run.graphRunnable.nodes) {
|
|
client.run.graphRunnable.nodes = null;
|
|
}
|
|
if (client.run.graphRunnable.lc_kwargs) {
|
|
client.run.graphRunnable.lc_kwargs = null;
|
|
}
|
|
if (client.run.graphRunnable.builder?.nodes) {
|
|
client.run.graphRunnable.builder.nodes = null;
|
|
client.run.graphRunnable.builder = null;
|
|
}
|
|
client.run.graphRunnable = null;
|
|
}
|
|
client.run = null;
|
|
}
|
|
if (client.sendMessage) {
|
|
client.sendMessage = null;
|
|
}
|
|
if (client.savedMessageIds) {
|
|
client.savedMessageIds.clear();
|
|
client.savedMessageIds = null;
|
|
}
|
|
if (client.currentMessages) {
|
|
client.currentMessages = null;
|
|
}
|
|
if (client.streamHandler) {
|
|
client.streamHandler = null;
|
|
}
|
|
if (client.contentParts) {
|
|
client.contentParts = null;
|
|
}
|
|
if (client.abortController) {
|
|
client.abortController = null;
|
|
}
|
|
if (client.collectedUsage) {
|
|
client.collectedUsage = null;
|
|
}
|
|
if (client.indexTokenCountMap) {
|
|
client.indexTokenCountMap = null;
|
|
}
|
|
if (client.agentConfigs) {
|
|
client.agentConfigs = null;
|
|
}
|
|
if (client.artifactPromises) {
|
|
client.artifactPromises = null;
|
|
}
|
|
if (client.usage) {
|
|
client.usage = null;
|
|
}
|
|
if (typeof client.dispose === 'function') {
|
|
client.dispose();
|
|
}
|
|
if (client.options) {
|
|
if (client.options.req) {
|
|
client.options.req = null;
|
|
}
|
|
if (client.options.res) {
|
|
client.options.res = null;
|
|
}
|
|
if (client.options.attachments) {
|
|
client.options.attachments = null;
|
|
}
|
|
if (client.options.agent) {
|
|
client.options.agent = null;
|
|
}
|
|
}
|
|
client.options = null;
|
|
} catch (e) {
|
|
// Ignore errors during disposal
|
|
}
|
|
}
|
|
|
|
function processReqData(data = {}, context) {
|
|
let {
|
|
abortKey,
|
|
userMessage,
|
|
userMessagePromise,
|
|
responseMessageId,
|
|
promptTokens,
|
|
conversationId,
|
|
userMessageId,
|
|
} = context;
|
|
for (const key in data) {
|
|
if (key === 'userMessage') {
|
|
userMessage = data[key];
|
|
userMessageId = data[key].messageId;
|
|
} else if (key === 'userMessagePromise') {
|
|
userMessagePromise = data[key];
|
|
} else if (key === 'responseMessageId') {
|
|
responseMessageId = data[key];
|
|
} else if (key === 'promptTokens') {
|
|
promptTokens = data[key];
|
|
} else if (key === 'abortKey') {
|
|
abortKey = data[key];
|
|
} else if (!conversationId && key === 'conversationId') {
|
|
conversationId = data[key];
|
|
}
|
|
}
|
|
return {
|
|
abortKey,
|
|
userMessage,
|
|
userMessagePromise,
|
|
responseMessageId,
|
|
promptTokens,
|
|
conversationId,
|
|
userMessageId,
|
|
};
|
|
}
|
|
|
|
module.exports = {
|
|
disposeClient,
|
|
requestDataMap,
|
|
clientRegistry,
|
|
processReqData,
|
|
};
|