🤖 refactor: Improve Agents Memory Usage, Bump Keyv, Grok 3 (#6850)

* chore: remove unused redis file

* chore: bump keyv dependencies, and update related imports

* refactor: Implement IoRedis client for rate limiting across middleware, as node-redis via keyv not compatible

* fix: Set max listeners to expected amount

* WIP: memory improvements

* refactor: Simplify getAbortData assignment in createAbortController

* refactor: Update getAbortData to use WeakRef for content management

* WIP: memory improvements in agent chat requests

* refactor: Enhance memory management with finalization registry and cleanup functions

* refactor: Simplify domainParser calls by removing unnecessary request parameter

* refactor: Update parameter types for action tools and agent loading functions to use minimal configs

* refactor: Simplify domainParser tests by removing unnecessary request parameter

* refactor: Simplify domainParser call by removing unnecessary request parameter

* refactor: Enhance client disposal by nullifying additional properties to improve memory management

* refactor: Improve title generation by adding abort controller and timeout handling, consolidate request cleanup

* refactor: Update checkIdleConnections to skip current user when checking for idle connections if passed

* refactor: Update createMCPTool to derive userId from config and handle abort signals

* refactor: Introduce createTokenCounter function and update tokenCounter usage; enhance disposeClient to reset Graph values

* refactor: Update getMCPManager to accept userId parameter for improved idle connection handling

* refactor: Extract logToolError function for improved error handling in AgentClient

* refactor: Update disposeClient to clear handlerRegistry and graphRunnable references in client.run

* refactor: Extract createHandleNewToken function to streamline token handling in initializeClient

* chore: bump @librechat/agents

* refactor: Improve timeout handling in addTitle function for better error management

* refactor: Introduce createFetch instead of using class method

* refactor: Enhance client disposal and request data handling in AskController and EditController

* refactor: Update import statements for AnthropicClient and OpenAIClient to use specific paths

* refactor: Use WeakRef for response handling in SplitStreamHandler to prevent memory leaks

* refactor: Simplify client disposal and rename getReqData to processReqData in AskController and EditController

* refactor: Improve logging structure and parameter handling in OpenAIClient

* refactor: Remove unused GraphEvents and improve stream event handling in AnthropicClient and OpenAIClient

* refactor: Simplify client initialization in AskController and EditController

* refactor: Remove unused mock functions and implement in-memory store for KeyvMongo

* chore: Update dependencies in package-lock.json to latest versions

* refactor: Await token usage recording in OpenAIClient to ensure proper async handling

* refactor: Remove handleAbort route from multiple endpoints and enhance client disposal logic

* refactor: Enhance abort controller logic by managing abortKey more effectively

* refactor: Add newConversation handling in useEventHandlers for improved conversation management

* fix: dropparams

* refactor: Use optional chaining for safer access to request properties in BaseClient

* refactor: Move client disposal and request data processing logic to cleanup module for better organization

* refactor: Remove aborted request check from addTitle function for cleaner logic

* feat: Add Grok 3 model pricing and update tests for new models

* chore: Remove trace warnings and inspect flags from backend start script used for debugging

* refactor: Replace user identifier handling with userId for consistency across controllers, use UserId in clientRegistry

* refactor: Enhance client disposal logic to prevent memory leaks by clearing additional references

* chore: Update @librechat/agents to version 2.4.14 in package.json and package-lock.json
This commit is contained in:
Danny Avila 2025-04-12 18:46:36 -04:00 committed by GitHub
parent 1e6b1b9554
commit 37964975c1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
68 changed files with 1796 additions and 623 deletions

240
api/server/cleanup.js Normal file
View file

@ -0,0 +1,240 @@
const { logger } = require('~/config');
// WeakMap to hold temporary data associated with requests
const requestDataMap = new WeakMap();
const FinalizationRegistry = global.FinalizationRegistry || null;
/**
* FinalizationRegistry to clean up client objects when they are garbage collected.
* This is used to prevent memory leaks and ensure that client objects are
* properly disposed of when they are no longer needed.
* The registry holds a weak reference to the client object and a cleanup
* callback that is called when the client object is garbage collected.
* The callback can be used to perform any necessary cleanup operations,
* such as removing event listeners or freeing up resources.
*/
const clientRegistry = FinalizationRegistry
? new FinalizationRegistry((heldValue) => {
try {
// This will run when the client is garbage collected
if (heldValue && heldValue.userId) {
logger.debug(`[FinalizationRegistry] Cleaning up client for user ${heldValue.userId}`);
} else {
logger.debug('[FinalizationRegistry] Cleaning up client');
}
} catch (e) {
// Ignore errors
}
})
: null;
/**
* Cleans up the client object by removing references to its properties.
* This is useful for preventing memory leaks and ensuring that the client
* and its properties can be garbage collected when it is no longer needed.
*/
function disposeClient(client) {
if (!client) {
return;
}
try {
if (client.user) {
client.user = null;
}
if (client.apiKey) {
client.apiKey = null;
}
if (client.azure) {
client.azure = null;
}
if (client.conversationId) {
client.conversationId = null;
}
if (client.responseMessageId) {
client.responseMessageId = null;
}
if (client.clientName) {
client.clientName = null;
}
if (client.sender) {
client.sender = null;
}
if (client.model) {
client.model = null;
}
if (client.maxContextTokens) {
client.maxContextTokens = null;
}
if (client.contextStrategy) {
client.contextStrategy = null;
}
if (client.currentDateString) {
client.currentDateString = null;
}
if (client.inputTokensKey) {
client.inputTokensKey = null;
}
if (client.outputTokensKey) {
client.outputTokensKey = null;
}
if (client.run) {
// Break circular references in run
if (client.run.Graph) {
client.run.Graph.resetValues();
client.run.Graph.handlerRegistry = null;
client.run.Graph.runId = null;
client.run.Graph.tools = null;
client.run.Graph.signal = null;
client.run.Graph.config = null;
client.run.Graph.toolEnd = null;
client.run.Graph.toolMap = null;
client.run.Graph.provider = null;
client.run.Graph.streamBuffer = null;
client.run.Graph.clientOptions = null;
client.run.Graph.graphState = null;
client.run.Graph.boundModel = null;
client.run.Graph.systemMessage = null;
client.run.Graph.reasoningKey = null;
client.run.Graph.messages = null;
client.run.Graph.contentData = null;
client.run.Graph.stepKeyIds = null;
client.run.Graph.contentIndexMap = null;
client.run.Graph.toolCallStepIds = null;
client.run.Graph.messageIdsByStepKey = null;
client.run.Graph.messageStepHasToolCalls = null;
client.run.Graph.prelimMessageIdsByStepKey = null;
client.run.Graph.currentTokenType = null;
client.run.Graph.lastToken = null;
client.run.Graph.tokenTypeSwitch = null;
client.run.Graph.indexTokenCountMap = null;
client.run.Graph.currentUsage = null;
client.run.Graph.tokenCounter = null;
client.run.Graph.maxContextTokens = null;
client.run.Graph.pruneMessages = null;
client.run.Graph.lastStreamCall = null;
client.run.Graph.startIndex = null;
client.run.Graph = null;
}
if (client.run.handlerRegistry) {
client.run.handlerRegistry = null;
}
if (client.run.graphRunnable) {
if (client.run.graphRunnable.channels) {
client.run.graphRunnable.channels = null;
}
if (client.run.graphRunnable.nodes) {
client.run.graphRunnable.nodes = null;
}
if (client.run.graphRunnable.lc_kwargs) {
client.run.graphRunnable.lc_kwargs = null;
}
if (client.run.graphRunnable.builder?.nodes) {
client.run.graphRunnable.builder.nodes = null;
client.run.graphRunnable.builder = null;
}
client.run.graphRunnable = null;
}
client.run = null;
}
if (client.sendMessage) {
client.sendMessage = null;
}
if (client.savedMessageIds) {
client.savedMessageIds.clear();
client.savedMessageIds = null;
}
if (client.currentMessages) {
client.currentMessages = null;
}
if (client.streamHandler) {
client.streamHandler = null;
}
if (client.contentParts) {
client.contentParts = null;
}
if (client.abortController) {
client.abortController = null;
}
if (client.collectedUsage) {
client.collectedUsage = null;
}
if (client.indexTokenCountMap) {
client.indexTokenCountMap = null;
}
if (client.agentConfigs) {
client.agentConfigs = null;
}
if (client.artifactPromises) {
client.artifactPromises = null;
}
if (client.usage) {
client.usage = null;
}
if (typeof client.dispose === 'function') {
client.dispose();
}
if (client.options) {
if (client.options.req) {
client.options.req = null;
}
if (client.options.res) {
client.options.res = null;
}
if (client.options.attachments) {
client.options.attachments = null;
}
if (client.options.agent) {
client.options.agent = null;
}
}
client.options = null;
} catch (e) {
// Ignore errors during disposal
}
}
function processReqData(data = {}, context) {
let {
abortKey,
userMessage,
userMessagePromise,
responseMessageId,
promptTokens,
conversationId,
userMessageId,
} = context;
for (const key in data) {
if (key === 'userMessage') {
userMessage = data[key];
userMessageId = data[key].messageId;
} else if (key === 'userMessagePromise') {
userMessagePromise = data[key];
} else if (key === 'responseMessageId') {
responseMessageId = data[key];
} else if (key === 'promptTokens') {
promptTokens = data[key];
} else if (key === 'abortKey') {
abortKey = data[key];
} else if (!conversationId && key === 'conversationId') {
conversationId = data[key];
}
}
return {
abortKey,
userMessage,
userMessagePromise,
responseMessageId,
promptTokens,
conversationId,
userMessageId,
};
}
module.exports = {
disposeClient,
requestDataMap,
clientRegistry,
processReqData,
};

View file

@ -1,5 +1,15 @@
const { getResponseSender, Constants } = require('librechat-data-provider');
const { createAbortController, handleAbortError } = require('~/server/middleware');
const {
handleAbortError,
createAbortController,
cleanupAbortController,
} = require('~/server/middleware');
const {
disposeClient,
processReqData,
clientRegistry,
requestDataMap,
} = require('~/server/cleanup');
const { sendMessage, createOnProgress } = require('~/server/utils');
const { saveMessage } = require('~/models');
const { logger } = require('~/config');
@ -14,90 +24,162 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
overrideParentMessageId = null,
} = req.body;
let client = null;
let abortKey = null;
let cleanupHandlers = [];
let clientRef = null;
logger.debug('[AskController]', {
text,
conversationId,
...endpointOption,
modelsConfig: endpointOption.modelsConfig ? 'exists' : '',
modelsConfig: endpointOption?.modelsConfig ? 'exists' : '',
});
let userMessage;
let userMessagePromise;
let promptTokens;
let userMessageId;
let responseMessageId;
let userMessage = null;
let userMessagePromise = null;
let promptTokens = null;
let userMessageId = null;
let responseMessageId = null;
let getAbortData = null;
const sender = getResponseSender({
...endpointOption,
model: endpointOption.modelOptions.model,
modelDisplayLabel,
});
const newConvo = !conversationId;
const user = req.user.id;
const initialConversationId = conversationId;
const newConvo = !initialConversationId;
const userId = req.user.id;
const getReqData = (data = {}) => {
for (let key in data) {
if (key === 'userMessage') {
userMessage = data[key];
userMessageId = data[key].messageId;
} else if (key === 'userMessagePromise') {
userMessagePromise = data[key];
} else if (key === 'responseMessageId') {
responseMessageId = data[key];
} else if (key === 'promptTokens') {
promptTokens = data[key];
} else if (!conversationId && key === 'conversationId') {
conversationId = data[key];
}
}
let reqDataContext = {
userMessage,
userMessagePromise,
responseMessageId,
promptTokens,
conversationId,
userMessageId,
};
let getText;
const updateReqData = (data = {}) => {
reqDataContext = processReqData(data, reqDataContext);
abortKey = reqDataContext.abortKey;
userMessage = reqDataContext.userMessage;
userMessagePromise = reqDataContext.userMessagePromise;
responseMessageId = reqDataContext.responseMessageId;
promptTokens = reqDataContext.promptTokens;
conversationId = reqDataContext.conversationId;
userMessageId = reqDataContext.userMessageId;
};
let { onProgress: progressCallback, getPartialText } = createOnProgress();
const performCleanup = () => {
logger.debug('[AskController] Performing cleanup');
if (Array.isArray(cleanupHandlers)) {
for (const handler of cleanupHandlers) {
try {
if (typeof handler === 'function') {
handler();
}
} catch (e) {
// Ignore
}
}
}
if (abortKey) {
logger.debug('[AskController] Cleaning up abort controller');
cleanupAbortController(abortKey);
abortKey = null;
}
if (client) {
disposeClient(client);
client = null;
}
reqDataContext = null;
userMessage = null;
userMessagePromise = null;
promptTokens = null;
getAbortData = null;
progressCallback = null;
endpointOption = null;
cleanupHandlers = null;
addTitle = null;
if (requestDataMap.has(req)) {
requestDataMap.delete(req);
}
logger.debug('[AskController] Cleanup completed');
};
try {
const { client } = await initializeClient({ req, res, endpointOption });
const { onProgress: progressCallback, getPartialText } = createOnProgress();
({ client } = await initializeClient({ req, res, endpointOption }));
if (clientRegistry && client) {
clientRegistry.register(client, { userId }, client);
}
getText = client.getStreamText != null ? client.getStreamText.bind(client) : getPartialText;
if (client) {
requestDataMap.set(req, { client });
}
const getAbortData = () => ({
sender,
conversationId,
userMessagePromise,
messageId: responseMessageId,
parentMessageId: overrideParentMessageId ?? userMessageId,
text: getText(),
userMessage,
promptTokens,
});
clientRef = new WeakRef(client);
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
getAbortData = () => {
const currentClient = clientRef.deref();
const currentText =
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
res.on('close', () => {
return {
sender,
conversationId,
messageId: reqDataContext.responseMessageId,
parentMessageId: overrideParentMessageId ?? userMessageId,
text: currentText,
userMessage: userMessage,
userMessagePromise: userMessagePromise,
promptTokens: reqDataContext.promptTokens,
};
};
const { onStart, abortController } = createAbortController(
req,
res,
getAbortData,
updateReqData,
);
const closeHandler = () => {
logger.debug('[AskController] Request closed');
if (!abortController) {
return;
} else if (abortController.signal.aborted) {
return;
} else if (abortController.requestCompleted) {
if (!abortController || abortController.signal.aborted || abortController.requestCompleted) {
return;
}
abortController.abort();
logger.debug('[AskController] Request aborted on close');
};
res.on('close', closeHandler);
cleanupHandlers.push(() => {
try {
res.removeListener('close', closeHandler);
} catch (e) {
// Ignore
}
});
const messageOptions = {
user,
user: userId,
parentMessageId,
conversationId,
conversationId: reqDataContext.conversationId,
overrideParentMessageId,
getReqData,
getReqData: updateReqData,
onStart,
abortController,
progressCallback,
progressOptions: {
res,
// parentMessageId: overrideParentMessageId || userMessageId,
},
};
@ -105,59 +187,94 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
let response = await client.sendMessage(text, messageOptions);
response.endpoint = endpointOption.endpoint;
const { conversation = {} } = await client.responsePromise;
const databasePromise = response.databasePromise;
delete response.databasePromise;
const { conversation: convoData = {} } = await databasePromise;
const conversation = { ...convoData };
conversation.title =
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
if (client.options.attachments) {
userMessage.files = client.options.attachments;
conversation.model = endpointOption.modelOptions.model;
delete userMessage.image_urls;
const latestUserMessage = reqDataContext.userMessage;
if (client?.options?.attachments && latestUserMessage) {
latestUserMessage.files = client.options.attachments;
if (endpointOption?.modelOptions?.model) {
conversation.model = endpointOption.modelOptions.model;
}
delete latestUserMessage.image_urls;
}
if (!abortController.signal.aborted) {
const finalResponseMessage = { ...response };
sendMessage(res, {
final: true,
conversation,
title: conversation.title,
requestMessage: userMessage,
responseMessage: response,
requestMessage: latestUserMessage,
responseMessage: finalResponseMessage,
});
res.end();
if (!client.savedMessageIds.has(response.messageId)) {
if (client?.savedMessageIds && !client.savedMessageIds.has(response.messageId)) {
await saveMessage(
req,
{ ...response, user },
{ ...finalResponseMessage, user: userId },
{ context: 'api/server/controllers/AskController.js - response end' },
);
}
}
if (!client.skipSaveUserMessage) {
await saveMessage(req, userMessage, {
if (!client?.skipSaveUserMessage && latestUserMessage) {
await saveMessage(req, latestUserMessage, {
context: 'api/server/controllers/AskController.js - don\'t skip saving user message',
});
}
if (addTitle && parentMessageId === Constants.NO_PARENT && newConvo) {
if (typeof addTitle === 'function' && parentMessageId === Constants.NO_PARENT && newConvo) {
addTitle(req, {
text,
response,
response: { ...response },
client,
});
})
.then(() => {
logger.debug('[AskController] Title generation started');
})
.catch((err) => {
logger.error('[AskController] Error in title generation', err);
})
.finally(() => {
logger.debug('[AskController] Title generation completed');
performCleanup();
});
} else {
performCleanup();
}
} catch (error) {
const partialText = getText && getText();
logger.error('[AskController] Error handling request', error);
let partialText = '';
try {
const currentClient = clientRef.deref();
partialText =
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
} catch (getTextError) {
logger.error('[AskController] Error calling getText() during error handling', getTextError);
}
handleAbortError(res, req, error, {
sender,
partialText,
conversationId,
messageId: responseMessageId,
parentMessageId: overrideParentMessageId ?? userMessageId ?? parentMessageId,
}).catch((err) => {
logger.error('[AskController] Error in `handleAbortError`', err);
});
conversationId: reqDataContext.conversationId,
messageId: reqDataContext.responseMessageId,
parentMessageId: overrideParentMessageId ?? reqDataContext.userMessageId ?? parentMessageId,
})
.catch((err) => {
logger.error('[AskController] Error in `handleAbortError` during catch block', err);
})
.finally(() => {
performCleanup();
});
}
};

View file

@ -1,5 +1,15 @@
const { getResponseSender } = require('librechat-data-provider');
const { createAbortController, handleAbortError } = require('~/server/middleware');
const {
handleAbortError,
createAbortController,
cleanupAbortController,
} = require('~/server/middleware');
const {
disposeClient,
processReqData,
clientRegistry,
requestDataMap,
} = require('~/server/cleanup');
const { sendMessage, createOnProgress } = require('~/server/utils');
const { saveMessage } = require('~/models');
const { logger } = require('~/config');
@ -17,6 +27,11 @@ const EditController = async (req, res, next, initializeClient) => {
overrideParentMessageId = null,
} = req.body;
let client = null;
let abortKey = null;
let cleanupHandlers = [];
let clientRef = null; // Declare clientRef here
logger.debug('[EditController]', {
text,
generation,
@ -26,123 +41,204 @@ const EditController = async (req, res, next, initializeClient) => {
modelsConfig: endpointOption.modelsConfig ? 'exists' : '',
});
let userMessage;
let userMessagePromise;
let promptTokens;
let userMessage = null;
let userMessagePromise = null;
let promptTokens = null;
let getAbortData = null;
const sender = getResponseSender({
...endpointOption,
model: endpointOption.modelOptions.model,
modelDisplayLabel,
});
const userMessageId = parentMessageId;
const user = req.user.id;
const userId = req.user.id;
const getReqData = (data = {}) => {
for (let key in data) {
if (key === 'userMessage') {
userMessage = data[key];
} else if (key === 'userMessagePromise') {
userMessagePromise = data[key];
} else if (key === 'responseMessageId') {
responseMessageId = data[key];
} else if (key === 'promptTokens') {
promptTokens = data[key];
}
}
let reqDataContext = { userMessage, userMessagePromise, responseMessageId, promptTokens };
const updateReqData = (data = {}) => {
reqDataContext = processReqData(data, reqDataContext);
abortKey = reqDataContext.abortKey;
userMessage = reqDataContext.userMessage;
userMessagePromise = reqDataContext.userMessagePromise;
responseMessageId = reqDataContext.responseMessageId;
promptTokens = reqDataContext.promptTokens;
};
const { onProgress: progressCallback, getPartialText } = createOnProgress({
let { onProgress: progressCallback, getPartialText } = createOnProgress({
generation,
});
let getText;
const performCleanup = () => {
logger.debug('[EditController] Performing cleanup');
if (Array.isArray(cleanupHandlers)) {
for (const handler of cleanupHandlers) {
try {
if (typeof handler === 'function') {
handler();
}
} catch (e) {
// Ignore
}
}
}
if (abortKey) {
logger.debug('[AskController] Cleaning up abort controller');
cleanupAbortController(abortKey);
abortKey = null;
}
if (client) {
disposeClient(client);
client = null;
}
reqDataContext = null;
userMessage = null;
userMessagePromise = null;
promptTokens = null;
getAbortData = null;
progressCallback = null;
endpointOption = null;
cleanupHandlers = null;
if (requestDataMap.has(req)) {
requestDataMap.delete(req);
}
logger.debug('[EditController] Cleanup completed');
};
try {
const { client } = await initializeClient({ req, res, endpointOption });
({ client } = await initializeClient({ req, res, endpointOption }));
getText = client.getStreamText != null ? client.getStreamText.bind(client) : getPartialText;
if (clientRegistry && client) {
clientRegistry.register(client, { userId }, client);
}
const getAbortData = () => ({
conversationId,
userMessagePromise,
messageId: responseMessageId,
sender,
parentMessageId: overrideParentMessageId ?? userMessageId,
text: getText(),
userMessage,
promptTokens,
});
if (client) {
requestDataMap.set(req, { client });
}
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
clientRef = new WeakRef(client);
res.on('close', () => {
getAbortData = () => {
const currentClient = clientRef.deref();
const currentText =
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
return {
sender,
conversationId,
messageId: reqDataContext.responseMessageId,
parentMessageId: overrideParentMessageId ?? userMessageId,
text: currentText,
userMessage: userMessage,
userMessagePromise: userMessagePromise,
promptTokens: reqDataContext.promptTokens,
};
};
const { onStart, abortController } = createAbortController(
req,
res,
getAbortData,
updateReqData,
);
const closeHandler = () => {
logger.debug('[EditController] Request closed');
if (!abortController) {
return;
} else if (abortController.signal.aborted) {
return;
} else if (abortController.requestCompleted) {
if (!abortController || abortController.signal.aborted || abortController.requestCompleted) {
return;
}
abortController.abort();
logger.debug('[EditController] Request aborted on close');
};
res.on('close', closeHandler);
cleanupHandlers.push(() => {
try {
res.removeListener('close', closeHandler);
} catch (e) {
// Ignore
}
});
let response = await client.sendMessage(text, {
user,
user: userId,
generation,
isContinued,
isEdited: true,
conversationId,
parentMessageId,
responseMessageId,
responseMessageId: reqDataContext.responseMessageId,
overrideParentMessageId,
getReqData,
getReqData: updateReqData,
onStart,
abortController,
progressCallback,
progressOptions: {
res,
// parentMessageId: overrideParentMessageId || userMessageId,
},
});
const { conversation = {} } = await client.responsePromise;
const databasePromise = response.databasePromise;
delete response.databasePromise;
const { conversation: convoData = {} } = await databasePromise;
const conversation = { ...convoData };
conversation.title =
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
if (client.options.attachments) {
if (client?.options?.attachments && endpointOption?.modelOptions?.model) {
conversation.model = endpointOption.modelOptions.model;
}
if (!abortController.signal.aborted) {
const finalUserMessage = reqDataContext.userMessage;
const finalResponseMessage = { ...response };
sendMessage(res, {
final: true,
conversation,
title: conversation.title,
requestMessage: userMessage,
responseMessage: response,
requestMessage: finalUserMessage,
responseMessage: finalResponseMessage,
});
res.end();
await saveMessage(
req,
{ ...response, user },
{ ...finalResponseMessage, user: userId },
{ context: 'api/server/controllers/EditController.js - response end' },
);
}
performCleanup();
} catch (error) {
const partialText = getText();
logger.error('[EditController] Error handling request', error);
let partialText = '';
try {
const currentClient = clientRef.deref();
partialText =
currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText();
} catch (getTextError) {
logger.error('[EditController] Error calling getText() during error handling', getTextError);
}
handleAbortError(res, req, error, {
sender,
partialText,
conversationId,
messageId: responseMessageId,
messageId: reqDataContext.responseMessageId,
parentMessageId: overrideParentMessageId ?? userMessageId ?? parentMessageId,
}).catch((err) => {
logger.error('[EditController] Error in `handleAbortError`', err);
});
})
.catch((err) => {
logger.error('[EditController] Error in `handleAbortError` during catch block', err);
})
.finally(() => {
performCleanup();
});
}
};

View file

@ -63,6 +63,21 @@ const noSystemModelRegex = [/\bo1\b/gi];
// const { getFormattedMemories } = require('~/models/Memory');
// const { getCurrentDateTime } = require('~/utils');
function createTokenCounter(encoding) {
return (message) => {
const countTokens = (text) => Tokenizer.getTokenCount(text, encoding);
return getTokenCountForMessage(message, countTokens);
};
}
function logToolError(graph, error, toolId) {
logger.error(
'[api/server/controllers/agents/client.js #chatCompletion] Tool Error',
error,
toolId,
);
}
class AgentClient extends BaseClient {
constructor(options = {}) {
super(null, options);
@ -535,6 +550,10 @@ class AgentClient extends BaseClient {
}
async chatCompletion({ payload, abortController = null }) {
/** @type {Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string; streamMode: string }} */
let config;
/** @type {ReturnType<createRun>} */
let run;
try {
if (!abortController) {
abortController = new AbortController();
@ -632,11 +651,11 @@ class AgentClient extends BaseClient {
/** @type {TCustomConfig['endpoints']['agents']} */
const agentsEConfig = this.options.req.app.locals[EModelEndpoint.agents];
/** @type {Partial<RunnableConfig> & { version: 'v1' | 'v2'; run_id?: string; streamMode: string }} */
const config = {
config = {
configurable: {
thread_id: this.conversationId,
last_agent_index: this.agentConfigs?.size ?? 0,
user_id: this.user ?? this.options.req.user?.id,
hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
},
recursionLimit: agentsEConfig?.recursionLimit,
@ -655,15 +674,6 @@ class AgentClient extends BaseClient {
initialMessages = formatContentStrings(initialMessages);
}
/** @type {ReturnType<createRun>} */
let run;
const countTokens = ((text) => this.getTokenCount(text)).bind(this);
/** @type {(message: BaseMessage) => number} */
const tokenCounter = (message) => {
return getTokenCountForMessage(message, countTokens);
};
/**
*
* @param {Agent} agent
@ -767,19 +777,14 @@ class AgentClient extends BaseClient {
run.Graph.contentData = contentData;
}
const encoding = this.getEncoding();
await run.processStream({ messages }, config, {
keepContent: i !== 0,
tokenCounter,
tokenCounter: createTokenCounter(encoding),
indexTokenCountMap: currentIndexCountMap,
maxContextTokens: agent.maxContextTokens,
callbacks: {
[Callback.TOOL_ERROR]: (graph, error, toolId) => {
logger.error(
'[api/server/controllers/agents/client.js #chatCompletion] Tool Error',
error,
toolId,
);
},
[Callback.TOOL_ERROR]: logToolError,
},
});
};
@ -809,6 +814,8 @@ class AgentClient extends BaseClient {
break;
}
}
const encoding = this.getEncoding();
const tokenCounter = createTokenCounter(encoding);
for (const [agentId, agent] of this.agentConfigs) {
if (abortController.signal.aborted === true) {
break;
@ -917,7 +924,7 @@ class AgentClient extends BaseClient {
* @param {string} params.text
* @param {string} params.conversationId
*/
async titleConvo({ text }) {
async titleConvo({ text, abortController }) {
if (!this.run) {
throw new Error('Run not initialized');
}
@ -950,6 +957,7 @@ class AgentClient extends BaseClient {
contentParts: this.contentParts,
clientOptions,
chainOptions: {
signal: abortController.signal,
callbacks: [
{
handleLLMEnd,
@ -975,7 +983,7 @@ class AgentClient extends BaseClient {
};
});
this.recordCollectedUsage({
await this.recordCollectedUsage({
model: clientOptions.model,
context: 'title',
collectedUsage,

View file

@ -1,5 +1,10 @@
const { Constants } = require('librechat-data-provider');
const { createAbortController, handleAbortError } = require('~/server/middleware');
const {
handleAbortError,
createAbortController,
cleanupAbortController,
} = require('~/server/middleware');
const { disposeClient, clientRegistry, requestDataMap } = require('~/server/cleanup');
const { sendMessage } = require('~/server/utils');
const { saveMessage } = require('~/models');
const { logger } = require('~/config');
@ -14,16 +19,22 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
} = req.body;
let sender;
let abortKey;
let userMessage;
let promptTokens;
let userMessageId;
let responseMessageId;
let userMessagePromise;
let getAbortData;
let client = null;
// Initialize as an array
let cleanupHandlers = [];
const newConvo = !conversationId;
const user = req.user.id;
const userId = req.user.id;
const getReqData = (data = {}) => {
// Create handler to avoid capturing the entire parent scope
let getReqData = (data = {}) => {
for (let key in data) {
if (key === 'userMessage') {
userMessage = data[key];
@ -36,30 +47,96 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
promptTokens = data[key];
} else if (key === 'sender') {
sender = data[key];
} else if (key === 'abortKey') {
abortKey = data[key];
} else if (!conversationId && key === 'conversationId') {
conversationId = data[key];
}
}
};
// Create a function to handle final cleanup
const performCleanup = () => {
logger.debug('[AgentController] Performing cleanup');
// Make sure cleanupHandlers is an array before iterating
if (Array.isArray(cleanupHandlers)) {
// Execute all cleanup handlers
for (const handler of cleanupHandlers) {
try {
if (typeof handler === 'function') {
handler();
}
} catch (e) {
// Ignore cleanup errors
}
}
}
// Clean up abort controller
if (abortKey) {
logger.debug('[AgentController] Cleaning up abort controller');
cleanupAbortController(abortKey);
}
// Dispose client properly
if (client) {
disposeClient(client);
}
// Clear all references
client = null;
getReqData = null;
userMessage = null;
getAbortData = null;
endpointOption.agent = null;
endpointOption = null;
cleanupHandlers = null;
userMessagePromise = null;
// Clear request data map
if (requestDataMap.has(req)) {
requestDataMap.delete(req);
}
logger.debug('[AgentController] Cleanup completed');
};
try {
/** @type {{ client: TAgentClient }} */
const { client } = await initializeClient({ req, res, endpointOption });
const result = await initializeClient({ req, res, endpointOption });
client = result.client;
const getAbortData = () => ({
sender,
userMessage,
promptTokens,
conversationId,
userMessagePromise,
messageId: responseMessageId,
content: client.getContentParts(),
parentMessageId: overrideParentMessageId ?? userMessageId,
});
// Register client with finalization registry if available
if (clientRegistry) {
clientRegistry.register(client, { userId }, client);
}
// Store request data in WeakMap keyed by req object
requestDataMap.set(req, { client });
// Use WeakRef to allow GC but still access content if it exists
const contentRef = new WeakRef(client.contentParts || []);
// Minimize closure scope - only capture small primitives and WeakRef
getAbortData = () => {
// Dereference WeakRef each time
const content = contentRef.deref();
return {
sender,
content: content || [],
userMessage,
promptTokens,
conversationId,
userMessagePromise,
messageId: responseMessageId,
parentMessageId: overrideParentMessageId ?? userMessageId,
};
};
const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData);
res.on('close', () => {
// Simple handler to avoid capturing scope
const closeHandler = () => {
logger.debug('[AgentController] Request closed');
if (!abortController) {
return;
@ -71,10 +148,19 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
abortController.abort();
logger.debug('[AgentController] Request aborted on close');
};
res.on('close', closeHandler);
cleanupHandlers.push(() => {
try {
res.removeListener('close', closeHandler);
} catch (e) {
// Ignore
}
});
const messageOptions = {
user,
user: userId,
onStart,
getReqData,
conversationId,
@ -83,69 +169,103 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => {
overrideParentMessageId,
progressOptions: {
res,
// parentMessageId: overrideParentMessageId || userMessageId,
},
};
let response = await client.sendMessage(text, messageOptions);
response.endpoint = endpointOption.endpoint;
const { conversation = {} } = await client.responsePromise;
// Extract what we need and immediately break reference
const messageId = response.messageId;
const endpoint = endpointOption.endpoint;
response.endpoint = endpoint;
// Store database promise locally
const databasePromise = response.databasePromise;
delete response.databasePromise;
// Resolve database-related data
const { conversation: convoData = {} } = await databasePromise;
const conversation = { ...convoData };
conversation.title =
conversation && !conversation.title ? null : conversation?.title || 'New Chat';
if (req.body.files && client.options.attachments) {
// Process files if needed
if (req.body.files && client.options?.attachments) {
userMessage.files = [];
const messageFiles = new Set(req.body.files.map((file) => file.file_id));
for (let attachment of client.options.attachments) {
if (messageFiles.has(attachment.file_id)) {
userMessage.files.push(attachment);
userMessage.files.push({ ...attachment });
}
}
delete userMessage.image_urls;
}
// Only send if not aborted
if (!abortController.signal.aborted) {
// Create a new response object with minimal copies
const finalResponse = { ...response };
sendMessage(res, {
final: true,
conversation,
title: conversation.title,
requestMessage: userMessage,
responseMessage: response,
responseMessage: finalResponse,
});
res.end();
if (!client.savedMessageIds.has(response.messageId)) {
// Save the message if needed
if (client.savedMessageIds && !client.savedMessageIds.has(messageId)) {
await saveMessage(
req,
{ ...response, user },
{ ...finalResponse, user: userId },
{ context: 'api/server/controllers/agents/request.js - response end' },
);
}
}
// Save user message if needed
if (!client.skipSaveUserMessage) {
await saveMessage(req, userMessage, {
context: 'api/server/controllers/agents/request.js - don\'t skip saving user message',
});
}
// Add title if needed - extract minimal data
if (addTitle && parentMessageId === Constants.NO_PARENT && newConvo) {
addTitle(req, {
text,
response,
response: { ...response },
client,
});
})
.then(() => {
logger.debug('[AgentController] Title generation started');
})
.catch((err) => {
logger.error('[AgentController] Error in title generation', err);
})
.finally(() => {
logger.debug('[AgentController] Title generation completed');
performCleanup();
});
} else {
performCleanup();
}
} catch (error) {
// Handle error without capturing much scope
handleAbortError(res, req, error, {
conversationId,
sender,
messageId: responseMessageId,
parentMessageId: overrideParentMessageId ?? userMessageId ?? parentMessageId,
}).catch((err) => {
logger.error('[api/server/controllers/agents/request] Error in `handleAbortError`', err);
});
})
.catch((err) => {
logger.error('[api/server/controllers/agents/request] Error in `handleAbortError`', err);
})
.finally(() => {
performCleanup();
});
}
};

View file

@ -1,3 +1,4 @@
// abortMiddleware.js
const { isAssistantsEndpoint, ErrorTypes } = require('librechat-data-provider');
const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils');
const { truncateText, smartTruncateText } = require('~/app/clients/prompts');
@ -8,6 +9,68 @@ const { saveMessage, getConvo } = require('~/models');
const { abortRun } = require('./abortRun');
const { logger } = require('~/config');
const abortDataMap = new WeakMap();
function cleanupAbortController(abortKey) {
if (!abortControllers.has(abortKey)) {
return false;
}
const { abortController } = abortControllers.get(abortKey);
if (!abortController) {
abortControllers.delete(abortKey);
return true;
}
// 1. Check if this controller has any composed signals and clean them up
try {
// This creates a temporary composed signal to use for cleanup
const composedSignal = AbortSignal.any([abortController.signal]);
// Get all event types - in practice, AbortSignal typically only uses 'abort'
const eventTypes = ['abort'];
// First, execute a dummy listener removal to handle potential composed signals
for (const eventType of eventTypes) {
const dummyHandler = () => {};
composedSignal.addEventListener(eventType, dummyHandler);
composedSignal.removeEventListener(eventType, dummyHandler);
const listeners = composedSignal.listeners?.(eventType) || [];
for (const listener of listeners) {
composedSignal.removeEventListener(eventType, listener);
}
}
} catch (e) {
logger.debug(`Error cleaning up composed signals: ${e}`);
}
// 2. Abort the controller if not already aborted
if (!abortController.signal.aborted) {
abortController.abort();
}
// 3. Remove from registry
abortControllers.delete(abortKey);
// 4. Clean up any data stored in the WeakMap
if (abortDataMap.has(abortController)) {
abortDataMap.delete(abortController);
}
// 5. Clean up function references on the controller
if (abortController.getAbortData) {
abortController.getAbortData = null;
}
if (abortController.abortCompletion) {
abortController.abortCompletion = null;
}
return true;
}
async function abortMessage(req, res) {
let { abortKey, endpoint } = req.body;
@ -29,19 +92,19 @@ async function abortMessage(req, res) {
if (!abortController) {
return res.status(204).send({ message: 'Request not found' });
}
const finalEvent = await abortController.abortCompletion();
const finalEvent = await abortController.abortCompletion?.();
logger.debug(
`[abortMessage] ID: ${req.user.id} | ${req.user.email} | Aborted request: ` +
JSON.stringify({ abortKey }),
);
abortControllers.delete(abortKey);
cleanupAbortController(abortKey);
if (res.headersSent && finalEvent) {
return sendMessage(res, finalEvent);
}
res.setHeader('Content-Type', 'application/json');
res.send(JSON.stringify(finalEvent));
}
@ -62,8 +125,48 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
const abortController = new AbortController();
const { endpointOption } = req.body;
// Store minimal data in WeakMap to avoid circular references
abortDataMap.set(abortController, {
getAbortDataFn: getAbortData,
userId: req.user.id,
endpoint: endpointOption.endpoint,
iconURL: endpointOption.iconURL,
model: endpointOption.modelOptions?.model || endpointOption.model_parameters?.model,
});
// Replace the direct function reference with a wrapper that uses WeakMap
abortController.getAbortData = function () {
return getAbortData();
const data = abortDataMap.get(this);
if (!data || typeof data.getAbortDataFn !== 'function') {
return {};
}
try {
const result = data.getAbortDataFn();
// Create a copy without circular references
const cleanResult = { ...result };
// If userMessagePromise exists, break its reference to client
if (
cleanResult.userMessagePromise &&
typeof cleanResult.userMessagePromise.then === 'function'
) {
// Create a new promise that fulfills with the same result but doesn't reference the original
const originalPromise = cleanResult.userMessagePromise;
cleanResult.userMessagePromise = new Promise((resolve, reject) => {
originalPromise.then(
(result) => resolve({ ...result }),
(error) => reject(error),
);
});
}
return cleanResult;
} catch (err) {
logger.error('[abortController.getAbortData] Error:', err);
return {};
}
};
/**
@ -74,6 +177,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
sendMessage(res, { message: userMessage, created: true });
const abortKey = userMessage?.conversationId ?? req.user.id;
getReqData({ abortKey });
const prevRequest = abortControllers.get(abortKey);
const { overrideUserMessageId } = req?.body ?? {};
@ -81,34 +185,74 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
const data = prevRequest.abortController.getAbortData();
getReqData({ userMessage: data?.userMessage });
const addedAbortKey = `${abortKey}:${responseMessageId}`;
abortControllers.set(addedAbortKey, { abortController, ...endpointOption });
res.on('finish', function () {
abortControllers.delete(addedAbortKey);
});
// Store minimal options
const minimalOptions = {
endpoint: endpointOption.endpoint,
iconURL: endpointOption.iconURL,
model: endpointOption.modelOptions?.model || endpointOption.model_parameters?.model,
};
abortControllers.set(addedAbortKey, { abortController, ...minimalOptions });
// Use a simple function for cleanup to avoid capturing context
const cleanupHandler = () => {
try {
cleanupAbortController(addedAbortKey);
} catch (e) {
// Ignore cleanup errors
}
};
res.on('finish', cleanupHandler);
return;
}
abortControllers.set(abortKey, { abortController, ...endpointOption });
// Store minimal options
const minimalOptions = {
endpoint: endpointOption.endpoint,
iconURL: endpointOption.iconURL,
model: endpointOption.modelOptions?.model || endpointOption.model_parameters?.model,
};
res.on('finish', function () {
abortControllers.delete(abortKey);
});
abortControllers.set(abortKey, { abortController, ...minimalOptions });
// Use a simple function for cleanup to avoid capturing context
const cleanupHandler = () => {
try {
cleanupAbortController(abortKey);
} catch (e) {
// Ignore cleanup errors
}
};
res.on('finish', cleanupHandler);
};
// Define abortCompletion without capturing the entire parent scope
abortController.abortCompletion = async function () {
abortController.abort();
this.abort();
// Get data from WeakMap
const ctrlData = abortDataMap.get(this);
if (!ctrlData || !ctrlData.getAbortDataFn) {
return { final: true, conversation: {}, title: 'New Chat' };
}
// Get abort data using stored function
const { conversationId, userMessage, userMessagePromise, promptTokens, ...responseData } =
getAbortData();
ctrlData.getAbortDataFn();
const completionTokens = await countTokens(responseData?.text ?? '');
const user = req.user.id;
const user = ctrlData.userId;
const responseMessage = {
...responseData,
conversationId,
finish_reason: 'incomplete',
endpoint: endpointOption.endpoint,
iconURL: endpointOption.iconURL,
model: endpointOption.modelOptions?.model ?? endpointOption.model_parameters?.model,
endpoint: ctrlData.endpoint,
iconURL: ctrlData.iconURL,
model: ctrlData.modelOptions?.model ?? ctrlData.model_parameters?.model,
unfinished: false,
error: false,
isCreatedByUser: false,
@ -130,10 +274,12 @@ const createAbortController = (req, res, getAbortData, getReqData) => {
if (userMessagePromise) {
const resolved = await userMessagePromise;
conversation = resolved?.conversation;
// Break reference to promise
resolved.conversation = null;
}
if (!conversation) {
conversation = await getConvo(req.user.id, conversationId);
conversation = await getConvo(user, conversationId);
}
return {
@ -218,11 +364,12 @@ const handleAbortError = async (res, req, error, data) => {
};
}
// Create a simple callback without capturing parent scope
const callback = async () => {
if (abortControllers.has(conversationId)) {
const { abortController } = abortControllers.get(conversationId);
abortController.abort();
abortControllers.delete(conversationId);
try {
cleanupAbortController(conversationId);
} catch (e) {
// Ignore cleanup errors
}
};
@ -243,6 +390,7 @@ const handleAbortError = async (res, req, error, data) => {
module.exports = {
handleAbort,
createAbortController,
handleAbortError,
createAbortController,
cleanupAbortController,
};

View file

@ -1,4 +1,4 @@
const Keyv = require('keyv');
const { Keyv } = require('keyv');
const uap = require('ua-parser-js');
const { ViolationTypes } = require('librechat-data-provider');
const { isEnabled, removePorts } = require('~/server/utils');

View file

@ -1,10 +1,9 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const { ViolationTypes } = require('librechat-data-provider');
const ioredisClient = require('~/cache/ioredisClient');
const logViolation = require('~/cache/logViolation');
const { isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const { logger } = require('~/config');
const getEnvironmentVariables = () => {
@ -67,11 +66,9 @@ const createImportLimiters = () => {
},
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for import rate limiters.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const sendCommand = (...args) => ioredisClient.call(...args);
const ipStore = new RedisStore({
sendCommand,
prefix: 'import_ip_limiter:',

View file

@ -1,8 +1,7 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const { removePorts, isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const ioredisClient = require('~/cache/ioredisClient');
const { logViolation } = require('~/cache');
const { logger } = require('~/config');
@ -31,13 +30,10 @@ const limiterOptions = {
keyGenerator: removePorts,
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for login rate limiter.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const store = new RedisStore({
sendCommand,
sendCommand: (...args) => ioredisClient.call(...args),
prefix: 'login_limiter:',
});
limiterOptions.store = store;

View file

@ -1,9 +1,8 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const denyRequest = require('~/server/middleware/denyRequest');
const ioredisClient = require('~/cache/ioredisClient');
const { isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const { logViolation } = require('~/cache');
const { logger } = require('~/config');
@ -63,11 +62,9 @@ const userLimiterOptions = {
},
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for message rate limiters.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const sendCommand = (...args) => ioredisClient.call(...args);
const ipStore = new RedisStore({
sendCommand,
prefix: 'message_ip_limiter:',

View file

@ -1,8 +1,7 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const { removePorts, isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const ioredisClient = require('~/cache/ioredisClient');
const { logViolation } = require('~/cache');
const { logger } = require('~/config');
@ -31,13 +30,10 @@ const limiterOptions = {
keyGenerator: removePorts,
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for register rate limiter.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const store = new RedisStore({
sendCommand,
sendCommand: (...args) => ioredisClient.call(...args),
prefix: 'register_limiter:',
});
limiterOptions.store = store;

View file

@ -1,9 +1,8 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const { ViolationTypes } = require('librechat-data-provider');
const { removePorts, isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const ioredisClient = require('~/cache/ioredisClient');
const { logViolation } = require('~/cache');
const { logger } = require('~/config');
@ -36,13 +35,10 @@ const limiterOptions = {
keyGenerator: removePorts,
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for reset password rate limiter.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const store = new RedisStore({
sendCommand,
sendCommand: (...args) => ioredisClient.call(...args),
prefix: 'reset_password_limiter:',
});
limiterOptions.store = store;

View file

@ -1,10 +1,9 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const { ViolationTypes } = require('librechat-data-provider');
const ioredisClient = require('~/cache/ioredisClient');
const logViolation = require('~/cache/logViolation');
const { isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const { logger } = require('~/config');
const getEnvironmentVariables = () => {
@ -67,11 +66,9 @@ const createSTTLimiters = () => {
},
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for STT rate limiters.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const sendCommand = (...args) => ioredisClient.call(...args);
const ipStore = new RedisStore({
sendCommand,
prefix: 'stt_ip_limiter:',

View file

@ -1,10 +1,9 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const { ViolationTypes } = require('librechat-data-provider');
const ioredisClient = require('~/cache/ioredisClient');
const logViolation = require('~/cache/logViolation');
const { isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const { logger } = require('~/config');
const handler = async (req, res) => {
@ -29,13 +28,10 @@ const limiterOptions = {
},
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for tool call rate limiter.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const store = new RedisStore({
sendCommand,
sendCommand: (...args) => ioredisClient.call(...args),
prefix: 'tool_call_limiter:',
});
limiterOptions.store = store;

View file

@ -1,10 +1,9 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const { ViolationTypes } = require('librechat-data-provider');
const ioredisClient = require('~/cache/ioredisClient');
const logViolation = require('~/cache/logViolation');
const { isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const { logger } = require('~/config');
const getEnvironmentVariables = () => {
@ -67,11 +66,9 @@ const createTTSLimiters = () => {
},
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for TTS rate limiters.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const sendCommand = (...args) => ioredisClient.call(...args);
const ipStore = new RedisStore({
sendCommand,
prefix: 'tts_ip_limiter:',

View file

@ -1,10 +1,9 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const { ViolationTypes } = require('librechat-data-provider');
const ioredisClient = require('~/cache/ioredisClient');
const logViolation = require('~/cache/logViolation');
const { isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const { logger } = require('~/config');
const getEnvironmentVariables = () => {
@ -72,11 +71,9 @@ const createFileLimiters = () => {
},
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for file upload rate limiters.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const sendCommand = (...args) => ioredisClient.call(...args);
const ipStore = new RedisStore({
sendCommand,
prefix: 'file_upload_ip_limiter:',

View file

@ -1,9 +1,8 @@
const Keyv = require('keyv');
const rateLimit = require('express-rate-limit');
const { RedisStore } = require('rate-limit-redis');
const { ViolationTypes } = require('librechat-data-provider');
const { removePorts, isEnabled } = require('~/server/utils');
const keyvRedis = require('~/cache/keyvRedis');
const ioredisClient = require('~/cache/ioredisClient');
const { logViolation } = require('~/cache');
const { logger } = require('~/config');
@ -36,13 +35,10 @@ const limiterOptions = {
keyGenerator: removePorts,
};
if (isEnabled(process.env.USE_REDIS)) {
if (isEnabled(process.env.USE_REDIS) && ioredisClient) {
logger.debug('Using Redis for verify email rate limiter.');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const sendCommand = (...args) => client.call(...args);
const store = new RedisStore({
sendCommand,
sendCommand: (...args) => ioredisClient.call(...args),
prefix: 'verify_email_limiter:',
});
limiterOptions.store = store;

View file

@ -58,7 +58,7 @@ router.post('/:agent_id', async (req, res) => {
}
let { domain } = metadata;
domain = await domainParser(req, domain, true);
domain = await domainParser(domain, true);
if (!domain) {
return res.status(400).json({ message: 'No domain provided' });
@ -164,7 +164,7 @@ router.delete('/:agent_id/:action_id', async (req, res) => {
return true;
});
domain = await domainParser(req, domain, true);
domain = await domainParser(domain, true);
if (!domain) {
return res.status(400).json({ message: 'No domain provided' });

View file

@ -2,7 +2,6 @@ const express = require('express');
const { PermissionTypes, Permissions } = require('librechat-data-provider');
const {
setHeaders,
handleAbort,
moderateText,
// validateModel,
generateCheckAccess,
@ -16,7 +15,6 @@ const addTitle = require('~/server/services/Endpoints/agents/title');
const router = express.Router();
router.use(moderateText);
router.post('/abort', handleAbort());
const checkAgentAccess = generateCheckAccess(PermissionTypes.AGENTS, [Permissions.USE]);

View file

@ -1,4 +1,4 @@
const Keyv = require('keyv');
const { Keyv } = require('keyv');
const { KeyvFile } = require('keyv-file');
const { logger } = require('~/config');

View file

@ -11,8 +11,6 @@ const {
const router = express.Router();
router.post('/abort', handleAbort());
router.post(
'/',
validateEndpoint,

View file

@ -3,7 +3,6 @@ const AskController = require('~/server/controllers/AskController');
const { initializeClient } = require('~/server/services/Endpoints/custom');
const { addTitle } = require('~/server/services/Endpoints/openAI');
const {
handleAbort,
setHeaders,
validateModel,
validateEndpoint,
@ -12,8 +11,6 @@ const {
const router = express.Router();
router.post('/abort', handleAbort());
router.post(
'/',
validateEndpoint,

View file

@ -3,7 +3,6 @@ const AskController = require('~/server/controllers/AskController');
const { initializeClient, addTitle } = require('~/server/services/Endpoints/google');
const {
setHeaders,
handleAbort,
validateModel,
validateEndpoint,
buildEndpointOption,
@ -11,8 +10,6 @@ const {
const router = express.Router();
router.post('/abort', handleAbort());
router.post(
'/',
validateEndpoint,

View file

@ -20,7 +20,6 @@ const { logger } = require('~/config');
const router = express.Router();
router.use(moderateText);
router.post('/abort', handleAbort());
router.post(
'/',
@ -196,7 +195,8 @@ router.post(
logger.debug('[/ask/gptPlugins]', response);
const { conversation = {} } = await client.responsePromise;
const { conversation = {} } = await response.databasePromise;
delete response.databasePromise;
conversation.title =
conversation && !conversation.title ? null : conversation?.title || 'New Chat';

View file

@ -12,7 +12,6 @@ const {
const router = express.Router();
router.use(moderateText);
router.post('/abort', handleAbort());
router.post(
'/',

View file

@ -36,7 +36,7 @@ router.post('/:assistant_id', async (req, res) => {
}
let { domain } = metadata;
domain = await domainParser(req, domain, true);
domain = await domainParser(domain, true);
if (!domain) {
return res.status(400).json({ message: 'No domain provided' });
@ -172,7 +172,7 @@ router.delete('/:assistant_id/:action_id/:model', async (req, res) => {
return true;
});
domain = await domainParser(req, domain, true);
domain = await domainParser(domain, true);
if (!domain) {
return res.status(400).json({ message: 'No domain provided' });

View file

@ -14,7 +14,6 @@ const AgentController = require('~/server/controllers/agents/request');
const addTitle = require('~/server/services/Endpoints/agents/title');
router.use(moderateText);
router.post('/abort', handleAbort());
/**
* @route POST /

View file

@ -3,7 +3,6 @@ const EditController = require('~/server/controllers/EditController');
const { initializeClient } = require('~/server/services/Endpoints/anthropic');
const {
setHeaders,
handleAbort,
validateModel,
validateEndpoint,
buildEndpointOption,
@ -11,8 +10,6 @@ const {
const router = express.Router();
router.post('/abort', handleAbort());
router.post(
'/',
validateEndpoint,

View file

@ -12,8 +12,6 @@ const {
const router = express.Router();
router.post('/abort', handleAbort());
router.post(
'/',
validateEndpoint,

View file

@ -3,7 +3,6 @@ const EditController = require('~/server/controllers/EditController');
const { initializeClient } = require('~/server/services/Endpoints/google');
const {
setHeaders,
handleAbort,
validateModel,
validateEndpoint,
buildEndpointOption,
@ -11,8 +10,6 @@ const {
const router = express.Router();
router.post('/abort', handleAbort());
router.post(
'/',
validateEndpoint,

View file

@ -2,7 +2,6 @@ const express = require('express');
const { getResponseSender } = require('librechat-data-provider');
const {
setHeaders,
handleAbort,
moderateText,
validateModel,
handleAbortError,
@ -19,7 +18,6 @@ const { logger } = require('~/config');
const router = express.Router();
router.use(moderateText);
router.post('/abort', handleAbort());
router.post(
'/',
@ -173,7 +171,8 @@ router.post(
logger.debug('[/edit/gptPlugins] CLIENT RESPONSE', response);
const { conversation = {} } = await client.responsePromise;
const { conversation = {} } = await response.databasePromise;
delete response.databasePromise;
conversation.title =
conversation && !conversation.title ? null : conversation?.title || 'New Chat';

View file

@ -2,7 +2,6 @@ const express = require('express');
const EditController = require('~/server/controllers/EditController');
const { initializeClient } = require('~/server/services/Endpoints/openAI');
const {
handleAbort,
setHeaders,
validateModel,
validateEndpoint,
@ -12,7 +11,6 @@ const {
const router = express.Router();
router.use(moderateText);
router.post('/abort', handleAbort());
router.post(
'/',

View file

@ -1,4 +1,4 @@
const Keyv = require('keyv');
const { Keyv } = require('keyv');
const express = require('express');
const { MeiliSearch } = require('meilisearch');
const { Conversation, getConvosQueried } = require('~/models/Conversation');

View file

@ -50,7 +50,7 @@ const validateAndUpdateTool = async ({ req, tool, assistant_id }) => {
return null;
}
const parsedDomain = await domainParser(req, domain, true);
const parsedDomain = await domainParser(domain, true);
if (!parsedDomain) {
return null;
@ -66,12 +66,11 @@ const validateAndUpdateTool = async ({ req, tool, assistant_id }) => {
*
* Necessary due to `[a-zA-Z0-9_-]*` Regex Validation, limited to a 64-character maximum.
*
* @param {Express.Request} req - The Express Request object.
* @param {string} domain - The domain name to encode/decode.
* @param {boolean} inverse - False to decode from base64, true to encode to base64.
* @returns {Promise<string>} Encoded or decoded domain string.
*/
async function domainParser(req, domain, inverse = false) {
async function domainParser(domain, inverse = false) {
if (!domain) {
return;
}
@ -122,7 +121,7 @@ async function loadActionSets(searchParams) {
* Creates a general tool for an entire action set.
*
* @param {Object} params - The parameters for loading action sets.
* @param {ServerRequest} params.req
* @param {string} params.userId
* @param {ServerResponse} params.res
* @param {Action} params.action - The action set. Necessary for decrypting authentication values.
* @param {ActionRequest} params.requestBuilder - The ActionRequest builder class to execute the API call.
@ -133,7 +132,7 @@ async function loadActionSets(searchParams) {
* @returns { Promise<typeof tool | { _call: (toolInput: Object | string) => unknown}> } An object with `_call` method to execute the tool input.
*/
async function createActionTool({
req,
userId,
res,
action,
requestBuilder,
@ -154,7 +153,7 @@ async function createActionTool({
try {
if (metadata.auth.type === AuthTypeEnum.OAuth && metadata.auth.authorization_url) {
const action_id = action.action_id;
const identifier = `${req.user.id}:${action.action_id}`;
const identifier = `${userId}:${action.action_id}`;
const requestLogin = async () => {
const { args: _args, stepId, ...toolCall } = config.toolCall ?? {};
if (!stepId) {
@ -162,7 +161,7 @@ async function createActionTool({
}
const statePayload = {
nonce: nanoid(),
user: req.user.id,
user: userId,
action_id,
};
@ -206,7 +205,7 @@ async function createActionTool({
'oauth',
{
state: stateToken,
userId: req.user.id,
userId: userId,
client_url: metadata.auth.client_url,
redirect_uri: `${process.env.DOMAIN_CLIENT}/api/actions/${action_id}/oauth/callback`,
/** Encrypted values */
@ -232,10 +231,10 @@ async function createActionTool({
};
const tokenPromises = [];
tokenPromises.push(findToken({ userId: req.user.id, type: 'oauth', identifier }));
tokenPromises.push(findToken({ userId, type: 'oauth', identifier }));
tokenPromises.push(
findToken({
userId: req.user.id,
userId,
type: 'oauth_refresh',
identifier: `${identifier}:refresh`,
}),
@ -258,9 +257,9 @@ async function createActionTool({
const refresh_token = await decryptV2(refreshTokenData.token);
const refreshTokens = async () =>
await refreshAccessToken({
userId,
identifier,
refresh_token,
userId: req.user.id,
client_url: metadata.auth.client_url,
encrypted_oauth_client_id: encrypted.oauth_client_id,
encrypted_oauth_client_secret: encrypted.oauth_client_secret,

View file

@ -78,20 +78,20 @@ describe('domainParser', () => {
// Non-azure request
it('does not return domain as is if not azure', async () => {
const domain = `example.com${actionDomainSeparator}test${actionDomainSeparator}`;
const result1 = await domainParser(reqNoAzure, domain, false);
const result2 = await domainParser(reqNoAzure, domain, true);
const result1 = await domainParser(domain, false);
const result2 = await domainParser(domain, true);
expect(result1).not.toEqual(domain);
expect(result2).not.toEqual(domain);
});
// Test for Empty or Null Inputs
it('returns undefined for null domain input', async () => {
const result = await domainParser(req, null, true);
const result = await domainParser(null, true);
expect(result).toBeUndefined();
});
it('returns undefined for empty domain input', async () => {
const result = await domainParser(req, '', true);
const result = await domainParser('', true);
expect(result).toBeUndefined();
});
@ -102,7 +102,7 @@ describe('domainParser', () => {
.toString('base64')
.substring(0, Constants.ENCODED_DOMAIN_LENGTH);
await domainParser(req, domain, true);
await domainParser(domain, true);
const cachedValue = await globalCache[encodedDomain];
expect(cachedValue).toEqual(Buffer.from(domain).toString('base64'));
@ -112,14 +112,14 @@ describe('domainParser', () => {
it('encodes domain exactly at threshold without modification', async () => {
const domain = 'a'.repeat(Constants.ENCODED_DOMAIN_LENGTH - TLD.length) + TLD;
const expected = domain.replace(/\./g, actionDomainSeparator);
const result = await domainParser(req, domain, true);
const result = await domainParser(domain, true);
expect(result).toEqual(expected);
});
it('encodes domain just below threshold without modification', async () => {
const domain = 'a'.repeat(Constants.ENCODED_DOMAIN_LENGTH - 1 - TLD.length) + TLD;
const expected = domain.replace(/\./g, actionDomainSeparator);
const result = await domainParser(req, domain, true);
const result = await domainParser(domain, true);
expect(result).toEqual(expected);
});
@ -129,7 +129,7 @@ describe('domainParser', () => {
const encodedDomain = Buffer.from(unicodeDomain)
.toString('base64')
.substring(0, Constants.ENCODED_DOMAIN_LENGTH);
const result = await domainParser(req, unicodeDomain, true);
const result = await domainParser(unicodeDomain, true);
expect(result).toEqual(encodedDomain);
});
@ -139,7 +139,6 @@ describe('domainParser', () => {
globalCache[encodedDomain.substring(0, Constants.ENCODED_DOMAIN_LENGTH)] = encodedDomain; // Simulate caching
const result = await domainParser(
req,
encodedDomain.substring(0, Constants.ENCODED_DOMAIN_LENGTH),
false,
);
@ -150,27 +149,27 @@ describe('domainParser', () => {
it('returns domain with replaced separators if no cached domain exists', async () => {
const domain = 'example.com';
const withSeparator = domain.replace(/\./g, actionDomainSeparator);
const result = await domainParser(req, withSeparator, false);
const result = await domainParser(withSeparator, false);
expect(result).toEqual(domain);
});
it('returns domain with replaced separators when inverse is false and under encoding length', async () => {
const domain = 'examp.com';
const withSeparator = domain.replace(/\./g, actionDomainSeparator);
const result = await domainParser(req, withSeparator, false);
const result = await domainParser(withSeparator, false);
expect(result).toEqual(domain);
});
it('replaces periods with actionDomainSeparator when inverse is true and under encoding length', async () => {
const domain = 'examp.com';
const expected = domain.replace(/\./g, actionDomainSeparator);
const result = await domainParser(req, domain, true);
const result = await domainParser(domain, true);
expect(result).toEqual(expected);
});
it('encodes domain when length is above threshold and inverse is true', async () => {
const domain = 'a'.repeat(Constants.ENCODED_DOMAIN_LENGTH + 1).concat('.com');
const result = await domainParser(req, domain, true);
const result = await domainParser(domain, true);
expect(result).not.toEqual(domain);
expect(result.length).toBeLessThanOrEqual(Constants.ENCODED_DOMAIN_LENGTH);
});
@ -180,20 +179,20 @@ describe('domainParser', () => {
const encodedDomain = Buffer.from(
originalDomain.replace(/\./g, actionDomainSeparator),
).toString('base64');
const result = await domainParser(req, encodedDomain, false);
const result = await domainParser(encodedDomain, false);
expect(result).toEqual(encodedDomain);
});
it('decodes encoded value if cached and encoded value is provided, and inverse is false', async () => {
const originalDomain = 'example.com';
const encodedDomain = await domainParser(req, originalDomain, true);
const result = await domainParser(req, encodedDomain, false);
const encodedDomain = await domainParser(originalDomain, true);
const result = await domainParser(encodedDomain, false);
expect(result).toEqual(originalDomain);
});
it('handles invalid base64 encoded values gracefully', async () => {
const invalidBase64Domain = 'not_base64_encoded';
const result = await domainParser(req, invalidBase64Domain, false);
const result = await domainParser(invalidBase64Domain, false);
expect(result).toEqual(invalidBase64Domain);
});
});

View file

@ -159,14 +159,20 @@ const initializeAgentOptions = async ({
currentFiles,
agent.tool_resources,
);
const provider = agent.provider;
const { tools, toolContextMap } = await loadAgentTools({
req,
res,
agent,
agent: {
id: agent.id,
tools: agent.tools,
provider,
model: agent.model,
},
tool_resources,
});
const provider = agent.provider;
agent.endpoint = provider;
let getOptions = providerConfigMap[provider];
if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) {

View file

@ -2,7 +2,11 @@ const { CacheKeys } = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
const { isEnabled } = require('~/server/utils');
const { saveConvo } = require('~/models');
const { logger } = require('~/config');
/**
* Add title to conversation in a way that avoids memory retention
*/
const addTitle = async (req, { text, response, client }) => {
const { TITLE_CONVO = true } = process.env ?? {};
if (!isEnabled(TITLE_CONVO)) {
@ -13,37 +17,55 @@ const addTitle = async (req, { text, response, client }) => {
return;
}
// If the request was aborted, don't generate the title.
if (client.abortController.signal.aborted) {
return;
}
const titleCache = getLogStores(CacheKeys.GEN_TITLE);
const key = `${req.user.id}-${response.conversationId}`;
const responseText =
response?.content && Array.isArray(response?.content)
? response.content.reduce((acc, block) => {
if (block?.type === 'text') {
return acc + block.text;
}
return acc;
}, '')
: (response?.content ?? response?.text ?? '');
/** @type {NodeJS.Timeout} */
let timeoutId;
try {
const timeoutPromise = new Promise((_, reject) => {
timeoutId = setTimeout(() => reject(new Error('Title generation timeout')), 25000);
}).catch((error) => {
logger.error('Title error:', error);
});
const title = await client.titleConvo({
text,
responseText,
conversationId: response.conversationId,
});
await titleCache.set(key, title, 120000);
await saveConvo(
req,
{
conversationId: response.conversationId,
title,
},
{ context: 'api/server/services/Endpoints/agents/title.js' },
);
let titlePromise;
let abortController = new AbortController();
if (client && typeof client.titleConvo === 'function') {
titlePromise = Promise.race([
client
.titleConvo({
text,
abortController,
})
.catch((error) => {
logger.error('Client title error:', error);
}),
timeoutPromise,
]);
} else {
return;
}
const title = await titlePromise;
if (!abortController.signal.aborted) {
abortController.abort();
}
if (timeoutId) {
clearTimeout(timeoutId);
}
await titleCache.set(key, title, 120000);
await saveConvo(
req,
{
conversationId: response.conversationId,
title,
},
{ context: 'api/server/services/Endpoints/agents/title.js' },
);
} catch (error) {
logger.error('Error generating title:', error);
}
};
module.exports = addTitle;

View file

@ -1,7 +1,7 @@
const { EModelEndpoint } = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm');
const { AnthropicClient } = require('~/app');
const AnthropicClient = require('~/app/clients/AnthropicClient');
const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => {
const { ANTHROPIC_API_KEY, ANTHROPIC_REVERSE_PROXY, PROXY } = process.env;

View file

@ -13,11 +13,6 @@ const addTitle = async (req, { text, response, client }) => {
return;
}
// If the request was aborted, don't generate the title.
if (client.abortController.signal.aborted) {
return;
}
const titleCache = getLogStores(CacheKeys.GEN_TITLE);
const key = `${req.user.id}-${response.conversationId}`;

View file

@ -11,8 +11,8 @@ const { getLLMConfig } = require('~/server/services/Endpoints/openAI/llm');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const { fetchModels } = require('~/server/services/ModelService');
const { isUserProvided, sleep } = require('~/server/utils');
const OpenAIClient = require('~/app/clients/OpenAIClient');
const getLogStores = require('~/cache/getLogStores');
const { OpenAIClient } = require('~/app');
const { PROXY } = process.env;

View file

@ -7,8 +7,14 @@ const {
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getLLMConfig } = require('~/server/services/Endpoints/openAI/llm');
const { isEnabled, isUserProvided, sleep } = require('~/server/utils');
const OpenAIClient = require('~/app/clients/OpenAIClient');
const { getAzureCredentials } = require('~/utils');
const { OpenAIClient } = require('~/app');
function createHandleNewToken(streamRate) {
async () => {
await sleep(streamRate);
};
}
const initializeClient = async ({
req,
@ -140,14 +146,13 @@ const initializeClient = async ({
clientOptions = Object.assign({ modelOptions }, clientOptions);
clientOptions.modelOptions.user = req.user.id;
const options = getLLMConfig(apiKey, clientOptions);
if (!clientOptions.streamRate) {
const streamRate = clientOptions.streamRate;
if (!streamRate) {
return options;
}
options.llmConfig.callbacks = [
{
handleLLMNewToken: async () => {
await sleep(clientOptions.streamRate);
},
handleLLMNewToken: createHandleNewToken(streamRate),
},
];
return options;

View file

@ -13,11 +13,6 @@ const addTitle = async (req, { text, response, client }) => {
return;
}
// If the request was aborted and is not azure, don't generate the title.
if (!client.azure && client.abortController.signal.aborted) {
return;
}
const titleCache = getLogStores(CacheKeys.GEN_TITLE);
const key = `${req.user.id}-${response.conversationId}`;

View file

@ -37,9 +37,8 @@ async function createMCPTool({ req, toolKey, provider }) {
}
const [toolName, serverName] = toolKey.split(Constants.mcp_delimiter);
const userId = req.user?.id;
if (!userId) {
if (!req.user?.id) {
logger.error(
`[MCP][${serverName}][${toolName}] User ID not found on request. Cannot create tool.`,
);
@ -49,15 +48,16 @@ async function createMCPTool({ req, toolKey, provider }) {
/** @type {(toolArguments: Object | string, config?: GraphRunnableConfig) => Promise<unknown>} */
const _call = async (toolArguments, config) => {
try {
const mcpManager = getMCPManager();
const derivedSignal = config?.signal ? AbortSignal.any([config.signal]) : undefined;
const mcpManager = getMCPManager(config?.userId);
const result = await mcpManager.callTool({
serverName,
toolName,
provider,
toolArguments,
options: {
userId,
signal: config?.signal,
userId: config?.configurable?.user_id,
signal: derivedSignal,
},
});
@ -70,7 +70,7 @@ async function createMCPTool({ req, toolKey, provider }) {
return result;
} catch (error) {
logger.error(
`[MCP][User: ${userId}][${serverName}] Error calling "${toolName}" MCP tool:`,
`[MCP][User: ${config?.userId}][${serverName}] Error calling "${toolName}" MCP tool:`,
error,
);
throw new Error(

View file

@ -334,7 +334,7 @@ async function processRequiredActions(client, requiredActions) {
const domainMap = new Map();
for (const action of actionSets) {
const domain = await domainParser(client.req, action.metadata.domain, true);
const domain = await domainParser(action.metadata.domain, true);
domainMap.set(domain, action);
// Check if domain is allowed
@ -404,7 +404,7 @@ async function processRequiredActions(client, requiredActions) {
// We've already decrypted the metadata, so we can pass it directly
tool = await createActionTool({
req: client.req,
userId: client.req.user.id,
res: client.res,
action,
requestBuilder,
@ -458,7 +458,7 @@ async function processRequiredActions(client, requiredActions) {
* @param {Object} params - Run params containing user and request information.
* @param {ServerRequest} params.req - The request object.
* @param {ServerResponse} params.res - The request object.
* @param {Agent} params.agent - The agent to load tools for.
* @param {Pick<Agent, 'id' | 'provider' | 'model' | 'tools'} params.agent - The agent to load tools for.
* @param {string | undefined} [params.openAIApiKey] - The OpenAI API key.
* @returns {Promise<{ tools?: StructuredTool[] }>} The agent tools.
*/
@ -570,7 +570,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
const domainMap = new Map();
for (const action of actionSets) {
const domain = await domainParser(req, action.metadata.domain, true);
const domain = await domainParser(action.metadata.domain, true);
domainMap.set(domain, action);
// Check if domain is allowed (do this once per action set)
@ -639,7 +639,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
if (requestBuilder) {
const tool = await createActionTool({
req,
userId: req.user.id,
res,
action,
requestBuilder,

View file

@ -1,4 +1,4 @@
const Keyv = require('keyv');
const { Keyv } = require('keyv');
const passport = require('passport');
const session = require('express-session');
const MemoryStore = require('memorystore')(session);
@ -53,7 +53,7 @@ const configureSocialLogins = (app) => {
if (isEnabled(process.env.USE_REDIS)) {
logger.debug('Using Redis for session storage in OpenID...');
const keyv = new Keyv({ store: keyvRedis });
const client = keyv.opts.store.redis;
const client = keyv.opts.store.client;
sessionOptions.store = new RedisStore({ client, prefix: 'openid_session' });
} else {
sessionOptions.store = new MemoryStore({