diff --git a/.env.example b/.env.example index 0cf51ea2dc..a6ff6157ce 100644 --- a/.env.example +++ b/.env.example @@ -47,6 +47,10 @@ TRUST_PROXY=1 # password policies. # MIN_PASSWORD_LENGTH=8 +# When enabled, the app will continue running after encountering uncaught exceptions +# instead of exiting the process. Not recommended for production unless necessary. +# CONTINUE_ON_UNCAUGHT_EXCEPTION=false + #===============# # JSON Logging # #===============# @@ -131,7 +135,7 @@ PROXY= #============# ANTHROPIC_API_KEY=user_provided -# ANTHROPIC_MODELS=claude-opus-4-6,claude-opus-4-20250514,claude-sonnet-4-20250514,claude-3-7-sonnet-20250219,claude-3-5-sonnet-20241022,claude-3-5-haiku-20241022,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307 +# ANTHROPIC_MODELS=claude-sonnet-4-6,claude-opus-4-6,claude-opus-4-20250514,claude-sonnet-4-20250514,claude-3-7-sonnet-20250219,claude-3-5-sonnet-20241022,claude-3-5-haiku-20241022,claude-3-opus-20240229,claude-3-sonnet-20240229,claude-3-haiku-20240307 # ANTHROPIC_REVERSE_PROXY= # Set to true to use Anthropic models through Google Vertex AI instead of direct API @@ -166,8 +170,8 @@ ANTHROPIC_API_KEY=user_provided # BEDROCK_AWS_SESSION_TOKEN=someSessionToken # Note: This example list is not meant to be exhaustive. If omitted, all known, supported model IDs will be included for you. -# BEDROCK_AWS_MODELS=anthropic.claude-opus-4-6-v1,anthropic.claude-3-5-sonnet-20240620-v1:0,meta.llama3-1-8b-instruct-v1:0 -# Cross-region inference model IDs: us.anthropic.claude-opus-4-6-v1,global.anthropic.claude-opus-4-6-v1 +# BEDROCK_AWS_MODELS=anthropic.claude-sonnet-4-6,anthropic.claude-opus-4-6-v1,anthropic.claude-3-5-sonnet-20240620-v1:0,meta.llama3-1-8b-instruct-v1:0 +# Cross-region inference model IDs: us.anthropic.claude-sonnet-4-6,us.anthropic.claude-opus-4-6-v1,global.anthropic.claude-opus-4-6-v1 # See all Bedrock model IDs here: https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns @@ -748,8 +752,10 @@ HELP_AND_FAQ_URL=https://librechat.ai # REDIS_PING_INTERVAL=300 # Force specific cache namespaces to use in-memory storage even when Redis is enabled -# Comma-separated list of CacheKeys (e.g., ROLES,MESSAGES) -# FORCED_IN_MEMORY_CACHE_NAMESPACES=ROLES,MESSAGES +# Comma-separated list of CacheKeys +# Defaults to CONFIG_STORE,APP_CONFIG so YAML-derived config stays per-container (safe for blue/green deployments) +# Set to empty string to force all namespaces through Redis: FORCED_IN_MEMORY_CACHE_NAMESPACES= +# FORCED_IN_MEMORY_CACHE_NAMESPACES=CONFIG_STORE,APP_CONFIG # Leader Election Configuration (for multi-instance deployments with Redis) # Duration in seconds that the leader lease is valid before it expires (default: 25) diff --git a/.gitignore b/.gitignore index d0c87ff03d..86d4a3ddae 100644 --- a/.gitignore +++ b/.gitignore @@ -30,6 +30,9 @@ coverage config/translations/stores/* client/src/localization/languages/*_missing_keys.json +# Turborepo +.turbo + # Compiled Dirs (http://nodejs.org/api/addons.html) build/ dist/ diff --git a/api/cache/banViolation.js b/api/cache/banViolation.js index 122355edb1..4d321889c1 100644 --- a/api/cache/banViolation.js +++ b/api/cache/banViolation.js @@ -55,6 +55,7 @@ const banViolation = async (req, res, errorMessage) => { res.clearCookie('refreshToken'); res.clearCookie('openid_access_token'); + res.clearCookie('openid_id_token'); res.clearCookie('openid_user_id'); res.clearCookie('token_provider'); diff --git a/api/cache/getLogStores.js b/api/cache/getLogStores.js index 5940689957..3089192196 100644 --- a/api/cache/getLogStores.js +++ b/api/cache/getLogStores.js @@ -37,6 +37,7 @@ const namespaces = { [CacheKeys.ROLES]: standardCache(CacheKeys.ROLES), [CacheKeys.APP_CONFIG]: standardCache(CacheKeys.APP_CONFIG), [CacheKeys.CONFIG_STORE]: standardCache(CacheKeys.CONFIG_STORE), + [CacheKeys.TOOL_CACHE]: standardCache(CacheKeys.TOOL_CACHE), [CacheKeys.PENDING_REQ]: standardCache(CacheKeys.PENDING_REQ), [CacheKeys.ENCODED_DOMAINS]: new Keyv({ store: keyvMongo, namespace: CacheKeys.ENCODED_DOMAINS }), [CacheKeys.ABORT_KEYS]: standardCache(CacheKeys.ABORT_KEYS, Time.TEN_MINUTES), diff --git a/api/db/connect.js b/api/db/connect.js index 26166ccff8..3534884b57 100644 --- a/api/db/connect.js +++ b/api/db/connect.js @@ -40,6 +40,10 @@ if (!cached) { cached = global.mongoose = { conn: null, promise: null }; } +mongoose.connection.on('error', (err) => { + logger.error('[connectDb] MongoDB connection error:', err); +}); + async function connectDb() { if (cached.conn && cached.conn?._readyState === 1) { return cached.conn; diff --git a/api/db/utils.js b/api/db/utils.js index 4a311d9832..32051be78d 100644 --- a/api/db/utils.js +++ b/api/db/utils.js @@ -26,7 +26,7 @@ async function batchResetMeiliFlags(collection) { try { while (hasMore) { const docs = await collection - .find({ expiredAt: null, _meiliIndex: true }, { projection: { _id: 1 } }) + .find({ expiredAt: null, _meiliIndex: { $ne: false } }, { projection: { _id: 1 } }) .limit(BATCH_SIZE) .toArray(); diff --git a/api/db/utils.spec.js b/api/db/utils.spec.js index 8b32b4aea8..adf4f6cd86 100644 --- a/api/db/utils.spec.js +++ b/api/db/utils.spec.js @@ -265,8 +265,8 @@ describe('batchResetMeiliFlags', () => { const result = await batchResetMeiliFlags(testCollection); - // Only one document has _meiliIndex: true - expect(result).toBe(1); + // both documents should be updated + expect(result).toBe(2); }); it('should handle mixed document states correctly', async () => { @@ -275,16 +275,18 @@ describe('batchResetMeiliFlags', () => { { _id: new mongoose.Types.ObjectId(), expiredAt: null, _meiliIndex: false }, { _id: new mongoose.Types.ObjectId(), expiredAt: new Date(), _meiliIndex: true }, { _id: new mongoose.Types.ObjectId(), expiredAt: null, _meiliIndex: true }, + { _id: new mongoose.Types.ObjectId(), expiredAt: null, _meiliIndex: null }, + { _id: new mongoose.Types.ObjectId(), expiredAt: null }, ]); const result = await batchResetMeiliFlags(testCollection); - expect(result).toBe(2); + expect(result).toBe(4); const flaggedDocs = await testCollection .find({ expiredAt: null, _meiliIndex: false }) .toArray(); - expect(flaggedDocs).toHaveLength(3); // 2 were updated, 1 was already false + expect(flaggedDocs).toHaveLength(5); // 4 were updated, 1 was already false }); }); diff --git a/api/models/Conversation.js b/api/models/Conversation.js index a8f5f9a36c..32eac1a764 100644 --- a/api/models/Conversation.js +++ b/api/models/Conversation.js @@ -124,10 +124,15 @@ module.exports = { updateOperation, { new: true, - upsert: true, + upsert: metadata?.noUpsert !== true, }, ); + if (!conversation) { + logger.debug('[saveConvo] Conversation not found, skipping update'); + return null; + } + return conversation.toObject(); } catch (error) { logger.error('[saveConvo] Error saving conversation', error); diff --git a/api/models/Conversation.spec.js b/api/models/Conversation.spec.js index b6237d5f15..bd415b4165 100644 --- a/api/models/Conversation.spec.js +++ b/api/models/Conversation.spec.js @@ -106,6 +106,47 @@ describe('Conversation Operations', () => { expect(result.conversationId).toBe(newConversationId); }); + it('should not create a conversation when noUpsert is true and conversation does not exist', async () => { + const nonExistentId = uuidv4(); + const result = await saveConvo( + mockReq, + { conversationId: nonExistentId, title: 'Ghost Title' }, + { noUpsert: true }, + ); + + expect(result).toBeNull(); + + const dbConvo = await Conversation.findOne({ conversationId: nonExistentId }); + expect(dbConvo).toBeNull(); + }); + + it('should update an existing conversation when noUpsert is true', async () => { + await saveConvo(mockReq, mockConversationData); + + const result = await saveConvo( + mockReq, + { conversationId: mockConversationData.conversationId, title: 'Updated Title' }, + { noUpsert: true }, + ); + + expect(result).not.toBeNull(); + expect(result.title).toBe('Updated Title'); + expect(result.conversationId).toBe(mockConversationData.conversationId); + }); + + it('should still upsert by default when noUpsert is not provided', async () => { + const newId = uuidv4(); + const result = await saveConvo(mockReq, { + conversationId: newId, + title: 'New Conversation', + endpoint: EModelEndpoint.openAI, + }); + + expect(result).not.toBeNull(); + expect(result.conversationId).toBe(newId); + expect(result.title).toBe('New Conversation'); + }); + it('should handle unsetFields metadata', async () => { const metadata = { unsetFields: { someField: 1 }, @@ -122,7 +163,6 @@ describe('Conversation Operations', () => { describe('isTemporary conversation handling', () => { it('should save a conversation with expiredAt when isTemporary is true', async () => { - // Mock app config with 24 hour retention mockReq.config.interfaceConfig.temporaryChatRetention = 24; mockReq.body = { isTemporary: true }; @@ -135,7 +175,6 @@ describe('Conversation Operations', () => { expect(result.expiredAt).toBeDefined(); expect(result.expiredAt).toBeInstanceOf(Date); - // Verify expiredAt is approximately 24 hours in the future const expectedExpirationTime = new Date(beforeSave.getTime() + 24 * 60 * 60 * 1000); const actualExpirationTime = new Date(result.expiredAt); @@ -157,7 +196,6 @@ describe('Conversation Operations', () => { }); it('should save a conversation without expiredAt when isTemporary is not provided', async () => { - // No isTemporary in body mockReq.body = {}; const result = await saveConvo(mockReq, mockConversationData); @@ -167,7 +205,6 @@ describe('Conversation Operations', () => { }); it('should use custom retention period from config', async () => { - // Mock app config with 48 hour retention mockReq.config.interfaceConfig.temporaryChatRetention = 48; mockReq.body = { isTemporary: true }; diff --git a/api/models/tx.js b/api/models/tx.js index 959c88e2b4..9a6305ec5c 100644 --- a/api/models/tx.js +++ b/api/models/tx.js @@ -176,6 +176,7 @@ const tokenValues = Object.assign( 'claude-opus-4-5': { prompt: 5, completion: 25 }, 'claude-opus-4-6': { prompt: 5, completion: 25 }, 'claude-sonnet-4': { prompt: 3, completion: 15 }, + 'claude-sonnet-4-6': { prompt: 3, completion: 15 }, 'command-r': { prompt: 0.5, completion: 1.5 }, 'command-r-plus': { prompt: 3, completion: 15 }, 'command-text': { prompt: 1.5, completion: 2.0 }, @@ -309,6 +310,7 @@ const cacheTokenValues = { 'claude-3-haiku': { write: 0.3, read: 0.03 }, 'claude-haiku-4-5': { write: 1.25, read: 0.1 }, 'claude-sonnet-4': { write: 3.75, read: 0.3 }, + 'claude-sonnet-4-6': { write: 3.75, read: 0.3 }, 'claude-opus-4': { write: 18.75, read: 1.5 }, 'claude-opus-4-5': { write: 6.25, read: 0.5 }, 'claude-opus-4-6': { write: 6.25, read: 0.5 }, @@ -337,6 +339,7 @@ const cacheTokenValues = { */ const premiumTokenValues = { 'claude-opus-4-6': { threshold: 200000, prompt: 10, completion: 37.5 }, + 'claude-sonnet-4-6': { threshold: 200000, prompt: 6, completion: 22.5 }, }; /** diff --git a/api/package.json b/api/package.json index f26022d8d3..bc212227d3 100644 --- a/api/package.json +++ b/api/package.json @@ -44,14 +44,14 @@ "@google/genai": "^1.19.0", "@keyv/redis": "^4.3.3", "@langchain/core": "^0.3.80", - "@librechat/agents": "^3.1.38", + "@librechat/agents": "^3.1.50", "@librechat/api": "*", "@librechat/data-schemas": "*", "@microsoft/microsoft-graph-client": "^3.0.7", "@modelcontextprotocol/sdk": "^1.26.0", "@node-saml/passport-saml": "^5.1.0", "@smithy/node-http-handler": "^4.4.5", - "axios": "^1.12.1", + "axios": "^1.13.5", "bcryptjs": "^2.4.3", "compression": "^1.8.1", "connect-redis": "^8.1.0", diff --git a/api/server/controllers/AuthController.js b/api/server/controllers/AuthController.js index 22e53dcfc9..588391b535 100644 --- a/api/server/controllers/AuthController.js +++ b/api/server/controllers/AuthController.js @@ -18,7 +18,6 @@ const { findUser, } = require('~/models'); const { getGraphApiToken } = require('~/server/services/GraphTokenService'); -const { getOAuthReconnectionManager } = require('~/config'); const { getOpenIdConfig } = require('~/strategies'); const registrationController = async (req, res) => { @@ -79,7 +78,12 @@ const refreshController = async (req, res) => { try { const openIdConfig = getOpenIdConfig(); - const tokenset = await openIdClient.refreshTokenGrant(openIdConfig, refreshToken); + const refreshParams = process.env.OPENID_SCOPE ? { scope: process.env.OPENID_SCOPE } : {}; + const tokenset = await openIdClient.refreshTokenGrant( + openIdConfig, + refreshToken, + refreshParams, + ); const claims = tokenset.claims(); const { user, error, migration } = await findOpenIDUser({ findUser, @@ -161,17 +165,6 @@ const refreshController = async (req, res) => { if (session && session.expiration > new Date()) { const token = await setAuthTokens(userId, res, session); - // trigger OAuth MCP server reconnection asynchronously (best effort) - try { - void getOAuthReconnectionManager() - .reconnectServers(userId) - .catch((err) => { - logger.error('[refreshController] Error reconnecting OAuth MCP servers:', err); - }); - } catch (err) { - logger.warn(`[refreshController] Cannot attempt OAuth MCP servers reconnection:`, err); - } - res.status(200).send({ token, user }); } else if (req?.query?.retry) { // Retrying from a refresh token request that failed (401) diff --git a/api/server/controllers/PluginController.js b/api/server/controllers/PluginController.js index c5e074b8ff..279ffb15fd 100644 --- a/api/server/controllers/PluginController.js +++ b/api/server/controllers/PluginController.js @@ -8,7 +8,7 @@ const { getLogStores } = require('~/cache'); const getAvailablePluginsController = async (req, res) => { try { - const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cache = getLogStores(CacheKeys.TOOL_CACHE); const cachedPlugins = await cache.get(CacheKeys.PLUGINS); if (cachedPlugins) { res.status(200).json(cachedPlugins); @@ -63,7 +63,7 @@ const getAvailableTools = async (req, res) => { logger.warn('[getAvailableTools] User ID not found in request'); return res.status(401).json({ message: 'Unauthorized' }); } - const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cache = getLogStores(CacheKeys.TOOL_CACHE); const cachedToolsArray = await cache.get(CacheKeys.TOOLS); const appConfig = req.config ?? (await getAppConfig({ role: req.user?.role })); diff --git a/api/server/controllers/PluginController.spec.js b/api/server/controllers/PluginController.spec.js index d7d3f83a8b..06a51a3bd6 100644 --- a/api/server/controllers/PluginController.spec.js +++ b/api/server/controllers/PluginController.spec.js @@ -1,3 +1,4 @@ +const { CacheKeys } = require('librechat-data-provider'); const { getCachedTools, getAppConfig } = require('~/server/services/Config'); const { getLogStores } = require('~/cache'); @@ -63,6 +64,28 @@ describe('PluginController', () => { }); }); + describe('cache namespace', () => { + it('getAvailablePluginsController should use TOOL_CACHE namespace', async () => { + mockCache.get.mockResolvedValue([]); + await getAvailablePluginsController(mockReq, mockRes); + expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE); + }); + + it('getAvailableTools should use TOOL_CACHE namespace', async () => { + mockCache.get.mockResolvedValue([]); + await getAvailableTools(mockReq, mockRes); + expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE); + }); + + it('should NOT use CONFIG_STORE namespace for tool/plugin operations', async () => { + mockCache.get.mockResolvedValue([]); + await getAvailablePluginsController(mockReq, mockRes); + await getAvailableTools(mockReq, mockRes); + const allCalls = getLogStores.mock.calls.flat(); + expect(allCalls).not.toContain(CacheKeys.CONFIG_STORE); + }); + }); + describe('getAvailablePluginsController', () => { it('should use filterUniquePlugins to remove duplicate plugins', async () => { // Add plugins with duplicates to availableTools diff --git a/api/server/controllers/UserController.js b/api/server/controllers/UserController.js index 0f17b4d3a9..7a9dd8125e 100644 --- a/api/server/controllers/UserController.js +++ b/api/server/controllers/UserController.js @@ -36,6 +36,7 @@ const { const { updateUserPluginAuth, deleteUserPluginAuth } = require('~/server/services/PluginService'); const { verifyEmail, resendVerificationEmail } = require('~/server/services/AuthService'); const { getMCPManager, getFlowStateManager, getMCPServersRegistry } = require('~/config'); +const { invalidateCachedTools } = require('~/server/services/Config/getCachedTools'); const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud'); const { processDeleteRequest } = require('~/server/services/Files/process'); const { getAppConfig } = require('~/server/services/Config'); @@ -215,6 +216,7 @@ const updateUserPluginsController = async (req, res) => { `[updateUserPluginsController] Attempting disconnect of MCP server "${serverName}" for user ${user.id} after plugin auth update.`, ); await mcpManager.disconnectUserConnection(user.id, serverName); + await invalidateCachedTools({ userId: user.id, serverName }); } } catch (disconnectError) { logger.error( diff --git a/api/server/controllers/agents/__tests__/callbacks.spec.js b/api/server/controllers/agents/__tests__/callbacks.spec.js index 103f9f3236..8bd711f9c6 100644 --- a/api/server/controllers/agents/__tests__/callbacks.spec.js +++ b/api/server/controllers/agents/__tests__/callbacks.spec.js @@ -20,7 +20,6 @@ jest.mock('@librechat/agents', () => ({ getMessageId: jest.fn(), ToolEndHandler: jest.fn(), handleToolCalls: jest.fn(), - ChatModelStreamHandler: jest.fn(), })); jest.mock('~/server/services/Files/Citations', () => ({ diff --git a/api/server/controllers/agents/__tests__/openai.spec.js b/api/server/controllers/agents/__tests__/openai.spec.js index 03a280b545..8592c79a2d 100644 --- a/api/server/controllers/agents/__tests__/openai.spec.js +++ b/api/server/controllers/agents/__tests__/openai.spec.js @@ -30,9 +30,6 @@ jest.mock('@librechat/agents', () => ({ messages: [], indexTokenCountMap: {}, }), - ChatModelStreamHandler: jest.fn().mockImplementation(() => ({ - handle: jest.fn(), - })), })); jest.mock('@librechat/api', () => ({ diff --git a/api/server/controllers/agents/__tests__/responses.unit.spec.js b/api/server/controllers/agents/__tests__/responses.unit.spec.js index 25e048f2fa..e16ca394b2 100644 --- a/api/server/controllers/agents/__tests__/responses.unit.spec.js +++ b/api/server/controllers/agents/__tests__/responses.unit.spec.js @@ -34,9 +34,6 @@ jest.mock('@librechat/agents', () => ({ messages: [], indexTokenCountMap: {}, }), - ChatModelStreamHandler: jest.fn().mockImplementation(() => ({ - handle: jest.fn(), - })), })); jest.mock('@librechat/api', () => ({ diff --git a/api/server/controllers/agents/callbacks.js b/api/server/controllers/agents/callbacks.js index 867e7f53af..0bb935795d 100644 --- a/api/server/controllers/agents/callbacks.js +++ b/api/server/controllers/agents/callbacks.js @@ -1,22 +1,13 @@ const { nanoid } = require('nanoid'); -const { Constants } = require('@librechat/agents'); const { logger } = require('@librechat/data-schemas'); +const { Constants, EnvVar, GraphEvents, ToolEndHandler } = require('@librechat/agents'); +const { Tools, StepTypes, FileContext, ErrorTypes } = require('librechat-data-provider'); const { sendEvent, GenerationJobManager, writeAttachmentEvent, createToolExecuteHandler, } = require('@librechat/api'); -const { Tools, StepTypes, FileContext, ErrorTypes } = require('librechat-data-provider'); -const { - EnvVar, - Providers, - GraphEvents, - getMessageId, - ToolEndHandler, - handleToolCalls, - ChatModelStreamHandler, -} = require('@librechat/agents'); const { processFileCitations } = require('~/server/services/Files/Citations'); const { processCodeOutput } = require('~/server/services/Files/Code/process'); const { loadAuthValues } = require('~/server/services/Tools/credentials'); @@ -57,8 +48,6 @@ class ModelEndHandler { let errorMessage; try { const agentContext = graph.getAgentContext(metadata); - const isGoogle = agentContext.provider === Providers.GOOGLE; - const streamingDisabled = !!agentContext.clientOptions?.disableStreaming; if (data?.output?.additional_kwargs?.stop_reason === 'refusal') { const info = { ...data.output.additional_kwargs }; errorMessage = JSON.stringify({ @@ -73,21 +62,6 @@ class ModelEndHandler { }); } - const toolCalls = data?.output?.tool_calls; - let hasUnprocessedToolCalls = false; - if (Array.isArray(toolCalls) && toolCalls.length > 0 && graph?.toolCallStepIds?.has) { - try { - hasUnprocessedToolCalls = toolCalls.some( - (tc) => tc?.id && !graph.toolCallStepIds.has(tc.id), - ); - } catch { - hasUnprocessedToolCalls = false; - } - } - if (isGoogle || streamingDisabled || hasUnprocessedToolCalls) { - await handleToolCalls(toolCalls, metadata, graph); - } - const usage = data?.output?.usage_metadata; if (!usage) { return this.finalize(errorMessage); @@ -98,38 +72,6 @@ class ModelEndHandler { } this.collectedUsage.push(usage); - if (!streamingDisabled) { - return this.finalize(errorMessage); - } - if (!data.output.content) { - return this.finalize(errorMessage); - } - const stepKey = graph.getStepKey(metadata); - const message_id = getMessageId(stepKey, graph) ?? ''; - if (message_id) { - await graph.dispatchRunStep(stepKey, { - type: StepTypes.MESSAGE_CREATION, - message_creation: { - message_id, - }, - }); - } - const stepId = graph.getStepIdByKey(stepKey); - const content = data.output.content; - if (typeof content === 'string') { - await graph.dispatchMessageDelta(stepId, { - content: [ - { - type: 'text', - text: content, - }, - ], - }); - } else if (content.every((c) => c.type?.startsWith('text'))) { - await graph.dispatchMessageDelta(stepId, { - content, - }); - } } catch (error) { logger.error('Error handling model end event:', error); return this.finalize(errorMessage); @@ -200,7 +142,6 @@ function getDefaultHandlers({ const handlers = { [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(collectedUsage), [GraphEvents.TOOL_END]: new ToolEndHandler(toolEndCallback, logger), - [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(), [GraphEvents.ON_RUN_STEP]: { /** * Handle ON_RUN_STEP event. @@ -209,6 +150,7 @@ function getDefaultHandlers({ * @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata. */ handle: async (event, data, metadata) => { + aggregateContent({ event, data }); if (data?.stepDetails.type === StepTypes.TOOL_CALLS) { await emitEvent(res, streamId, { event, data }); } else if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) { @@ -227,7 +169,6 @@ function getDefaultHandlers({ }, }); } - aggregateContent({ event, data }); }, }, [GraphEvents.ON_RUN_STEP_DELTA]: { @@ -238,6 +179,7 @@ function getDefaultHandlers({ * @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata. */ handle: async (event, data, metadata) => { + aggregateContent({ event, data }); if (data?.delta.type === StepTypes.TOOL_CALLS) { await emitEvent(res, streamId, { event, data }); } else if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) { @@ -245,7 +187,6 @@ function getDefaultHandlers({ } else if (!metadata?.hide_sequential_outputs) { await emitEvent(res, streamId, { event, data }); } - aggregateContent({ event, data }); }, }, [GraphEvents.ON_RUN_STEP_COMPLETED]: { @@ -256,6 +197,7 @@ function getDefaultHandlers({ * @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata. */ handle: async (event, data, metadata) => { + aggregateContent({ event, data }); if (data?.result != null) { await emitEvent(res, streamId, { event, data }); } else if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) { @@ -263,7 +205,6 @@ function getDefaultHandlers({ } else if (!metadata?.hide_sequential_outputs) { await emitEvent(res, streamId, { event, data }); } - aggregateContent({ event, data }); }, }, [GraphEvents.ON_MESSAGE_DELTA]: { @@ -274,12 +215,12 @@ function getDefaultHandlers({ * @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata. */ handle: async (event, data, metadata) => { + aggregateContent({ event, data }); if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) { await emitEvent(res, streamId, { event, data }); } else if (!metadata?.hide_sequential_outputs) { await emitEvent(res, streamId, { event, data }); } - aggregateContent({ event, data }); }, }, [GraphEvents.ON_REASONING_DELTA]: { @@ -290,12 +231,12 @@ function getDefaultHandlers({ * @param {GraphRunnableConfig['configurable']} [metadata] The runnable metadata. */ handle: async (event, data, metadata) => { + aggregateContent({ event, data }); if (checkIfLastAgent(metadata?.last_agent_id, metadata?.langgraph_node)) { await emitEvent(res, streamId, { event, data }); } else if (!metadata?.hide_sequential_outputs) { await emitEvent(res, streamId, { event, data }); } - aggregateContent({ event, data }); }, }, }; diff --git a/api/server/controllers/agents/client.js b/api/server/controllers/agents/client.js index c7aadc6d87..49240a6b3b 100644 --- a/api/server/controllers/agents/client.js +++ b/api/server/controllers/agents/client.js @@ -6,18 +6,22 @@ const { Tokenizer, checkAccess, buildToolSet, - logAxiosError, sanitizeTitle, + logToolError, + payloadParser, resolveHeaders, createSafeUser, initializeAgent, getBalanceConfig, getProviderConfig, + omitTitleOptions, memoryInstructions, applyContextToAgent, + createTokenCounter, GenerationJobManager, getTransactionsConfig, createMemoryProcessor, + createMultiAgentMapper, filterMalformedContentParts, } = require('@librechat/api'); const { @@ -25,9 +29,7 @@ const { Providers, TitleMethod, formatMessage, - labelContentByAgent, formatAgentMessages, - getTokenCountForMessage, createMetadataAggregator, } = require('@librechat/agents'); const { @@ -39,7 +41,6 @@ const { PermissionTypes, isAgentsEndpoint, isEphemeralAgentId, - bedrockInputSchema, removeNullishValues, } = require('librechat-data-provider'); const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens'); @@ -52,183 +53,6 @@ const { loadAgent } = require('~/models/Agent'); const { getMCPManager } = require('~/config'); const db = require('~/models'); -const omitTitleOptions = new Set([ - 'stream', - 'thinking', - 'streaming', - 'clientOptions', - 'thinkingConfig', - 'thinkingBudget', - 'includeThoughts', - 'maxOutputTokens', - 'additionalModelRequestFields', -]); - -/** - * @param {ServerRequest} req - * @param {Agent} agent - * @param {string} endpoint - */ -const payloadParser = ({ req, agent, endpoint }) => { - if (isAgentsEndpoint(endpoint)) { - return { model: undefined }; - } else if (endpoint === EModelEndpoint.bedrock) { - const parsedValues = bedrockInputSchema.parse(agent.model_parameters); - if (parsedValues.thinking == null) { - parsedValues.thinking = false; - } - return parsedValues; - } - return req.body.endpointOption.model_parameters; -}; - -function createTokenCounter(encoding) { - return function (message) { - const countTokens = (text) => Tokenizer.getTokenCount(text, encoding); - return getTokenCountForMessage(message, countTokens); - }; -} - -function logToolError(graph, error, toolId) { - logAxiosError({ - error, - message: `[api/server/controllers/agents/client.js #chatCompletion] Tool Error "${toolId}"`, - }); -} - -/** Regex pattern to match agent ID suffix (____N) */ -const AGENT_SUFFIX_PATTERN = /____(\d+)$/; - -/** - * Finds the primary agent ID within a set of agent IDs. - * Primary = no suffix (____N) or lowest suffix number. - * @param {Set} agentIds - * @returns {string | null} - */ -function findPrimaryAgentId(agentIds) { - let primaryAgentId = null; - let lowestSuffixIndex = Infinity; - - for (const agentId of agentIds) { - const suffixMatch = agentId.match(AGENT_SUFFIX_PATTERN); - if (!suffixMatch) { - return agentId; - } - const suffixIndex = parseInt(suffixMatch[1], 10); - if (suffixIndex < lowestSuffixIndex) { - lowestSuffixIndex = suffixIndex; - primaryAgentId = agentId; - } - } - - return primaryAgentId; -} - -/** - * Creates a mapMethod for getMessagesForConversation that processes agent content. - * - Strips agentId/groupId metadata from all content - * - For parallel agents (addedConvo with groupId): filters each group to its primary agent - * - For handoffs (agentId without groupId): keeps all content from all agents - * - For multi-agent: applies agent labels to content - * - * The key distinction: - * - Parallel execution (addedConvo): Parts have both agentId AND groupId - * - Handoffs: Parts only have agentId, no groupId - * - * @param {Agent} primaryAgent - Primary agent configuration - * @param {Map} [agentConfigs] - Additional agent configurations - * @returns {(message: TMessage) => TMessage} Map method for processing messages - */ -function createMultiAgentMapper(primaryAgent, agentConfigs) { - const hasMultipleAgents = (primaryAgent.edges?.length ?? 0) > 0 || (agentConfigs?.size ?? 0) > 0; - - /** @type {Record | null} */ - let agentNames = null; - if (hasMultipleAgents) { - agentNames = { [primaryAgent.id]: primaryAgent.name || 'Assistant' }; - if (agentConfigs) { - for (const [agentId, agentConfig] of agentConfigs.entries()) { - agentNames[agentId] = agentConfig.name || agentConfig.id; - } - } - } - - return (message) => { - if (message.isCreatedByUser || !Array.isArray(message.content)) { - return message; - } - - // Check for metadata - const hasAgentMetadata = message.content.some((part) => part?.agentId || part?.groupId != null); - if (!hasAgentMetadata) { - return message; - } - - try { - // Build a map of groupId -> Set of agentIds, to find primary per group - /** @type {Map>} */ - const groupAgentMap = new Map(); - - for (const part of message.content) { - const groupId = part?.groupId; - const agentId = part?.agentId; - if (groupId != null && agentId) { - if (!groupAgentMap.has(groupId)) { - groupAgentMap.set(groupId, new Set()); - } - groupAgentMap.get(groupId).add(agentId); - } - } - - // For each group, find the primary agent - /** @type {Map} */ - const groupPrimaryMap = new Map(); - for (const [groupId, agentIds] of groupAgentMap) { - const primary = findPrimaryAgentId(agentIds); - if (primary) { - groupPrimaryMap.set(groupId, primary); - } - } - - /** @type {Array} */ - const filteredContent = []; - /** @type {Record} */ - const agentIdMap = {}; - - for (const part of message.content) { - const agentId = part?.agentId; - const groupId = part?.groupId; - - // Filtering logic: - // - No groupId (handoffs): always include - // - Has groupId (parallel): only include if it's the primary for that group - const isParallelPart = groupId != null; - const groupPrimary = isParallelPart ? groupPrimaryMap.get(groupId) : null; - const shouldInclude = !isParallelPart || !agentId || agentId === groupPrimary; - - if (shouldInclude) { - const newIndex = filteredContent.length; - const { agentId: _a, groupId: _g, ...cleanPart } = part; - filteredContent.push(cleanPart); - if (agentId && hasMultipleAgents) { - agentIdMap[newIndex] = agentId; - } - } - } - - const finalContent = - Object.keys(agentIdMap).length > 0 && agentNames - ? labelContentByAgent(filteredContent, agentIdMap, agentNames) - : filteredContent; - - return { ...message, content: finalContent }; - } catch (error) { - logger.error('[AgentClient] Error processing multi-agent message:', error); - return message; - } - }; -} - class AgentClient extends BaseClient { constructor(options = {}) { super(null, options); @@ -296,14 +120,9 @@ class AgentClient extends BaseClient { checkVisionRequest() {} getSaveOptions() { - // TODO: - // would need to be override settings; otherwise, model needs to be undefined - // model: this.override.model, - // instructions: this.override.instructions, - // additional_instructions: this.override.additional_instructions, let runOptions = {}; try { - runOptions = payloadParser(this.options); + runOptions = payloadParser(this.options) ?? {}; } catch (error) { logger.error( '[api/server/controllers/agents/client.js #getSaveOptions] Error parsing options', @@ -314,14 +133,14 @@ class AgentClient extends BaseClient { return removeNullishValues( Object.assign( { + spec: this.options.spec, + iconURL: this.options.iconURL, endpoint: this.options.endpoint, agent_id: this.options.agent.id, modelLabel: this.options.modelLabel, - maxContextTokens: this.options.maxContextTokens, resendFiles: this.options.resendFiles, imageDetail: this.options.imageDetail, - spec: this.options.spec, - iconURL: this.options.iconURL, + maxContextTokens: this.maxContextTokens, }, // TODO: PARSE OPTIONS BY PROVIDER, MAY CONTAIN SENSITIVE DATA runOptions, @@ -969,7 +788,7 @@ class AgentClient extends BaseClient { }, user: createSafeUser(this.options.req.user), }, - recursionLimit: agentsEConfig?.recursionLimit ?? 25, + recursionLimit: agentsEConfig?.recursionLimit ?? 50, signal: abortController.signal, streamMode: 'values', version: 'v2', diff --git a/api/server/controllers/agents/openai.js b/api/server/controllers/agents/openai.js index d4dc82174d..b334580eb1 100644 --- a/api/server/controllers/agents/openai.js +++ b/api/server/controllers/agents/openai.js @@ -1,12 +1,7 @@ const { nanoid } = require('nanoid'); const { logger } = require('@librechat/data-schemas'); +const { Callback, ToolEndHandler, formatAgentMessages } = require('@librechat/agents'); const { EModelEndpoint, ResourceType, PermissionBits } = require('librechat-data-provider'); -const { - Callback, - ToolEndHandler, - formatAgentMessages, - ChatModelStreamHandler, -} = require('@librechat/agents'); const { writeSSE, createRun, @@ -325,18 +320,8 @@ const OpenAIChatCompletionController = async (req, res) => { } }; - // Built-in handler for processing raw model stream chunks - const chatModelStreamHandler = new ChatModelStreamHandler(); - // Event handlers for OpenAI-compatible streaming const handlers = { - // Process raw model chunks and dispatch message/reasoning deltas - on_chat_model_stream: { - handle: async (event, data, metadata, graph) => { - await chatModelStreamHandler.handle(event, data, metadata, graph); - }, - }, - // Text content streaming on_message_delta: createHandler((data) => { const content = data?.delta?.content; @@ -577,7 +562,14 @@ const OpenAIChatCompletionController = async (req, res) => { writeSSE(res, '[DONE]'); res.end(); } else { - sendErrorResponse(res, 500, errorMessage, 'server_error'); + // Forward upstream provider status codes (e.g., Anthropic 400s) instead of masking as 500 + const statusCode = + typeof error?.status === 'number' && error.status >= 400 && error.status < 600 + ? error.status + : 500; + const errorType = + statusCode >= 400 && statusCode < 500 ? 'invalid_request_error' : 'server_error'; + sendErrorResponse(res, statusCode, errorMessage, errorType); } } }; diff --git a/api/server/controllers/agents/responses.js b/api/server/controllers/agents/responses.js index 3cd1dff5eb..afdb96be9f 100644 --- a/api/server/controllers/agents/responses.js +++ b/api/server/controllers/agents/responses.js @@ -1,13 +1,8 @@ const { nanoid } = require('nanoid'); const { v4: uuidv4 } = require('uuid'); const { logger } = require('@librechat/data-schemas'); +const { Callback, ToolEndHandler, formatAgentMessages } = require('@librechat/agents'); const { EModelEndpoint, ResourceType, PermissionBits } = require('librechat-data-provider'); -const { - Callback, - ToolEndHandler, - formatAgentMessages, - ChatModelStreamHandler, -} = require('@librechat/agents'); const { createRun, buildToolSet, @@ -410,9 +405,6 @@ const createResponse = async (req, res) => { // Collect usage for balance tracking const collectedUsage = []; - // Built-in handler for processing raw model stream chunks - const chatModelStreamHandler = new ChatModelStreamHandler(); - // Artifact promises for processing tool outputs /** @type {Promise[]} */ const artifactPromises = []; @@ -443,11 +435,6 @@ const createResponse = async (req, res) => { // Combine handlers const handlers = { - on_chat_model_stream: { - handle: async (event, data, metadata, graph) => { - await chatModelStreamHandler.handle(event, data, metadata, graph); - }, - }, on_message_delta: responsesHandlers.on_message_delta, on_reasoning_delta: responsesHandlers.on_reasoning_delta, on_run_step: responsesHandlers.on_run_step, @@ -570,8 +557,6 @@ const createResponse = async (req, res) => { } else { const aggregatorHandlers = createAggregatorEventHandlers(aggregator); - const chatModelStreamHandler = new ChatModelStreamHandler(); - // Collect usage for balance tracking const collectedUsage = []; @@ -596,11 +581,6 @@ const createResponse = async (req, res) => { }; const handlers = { - on_chat_model_stream: { - handle: async (event, data, metadata, graph) => { - await chatModelStreamHandler.handle(event, data, metadata, graph); - }, - }, on_message_delta: aggregatorHandlers.on_message_delta, on_reasoning_delta: aggregatorHandlers.on_reasoning_delta, on_run_step: aggregatorHandlers.on_run_step, @@ -727,7 +707,13 @@ const createResponse = async (req, res) => { writeDone(res); res.end(); } else { - sendResponsesErrorResponse(res, 500, errorMessage, 'server_error'); + // Forward upstream provider status codes (e.g., Anthropic 400s) instead of masking as 500 + const statusCode = + typeof error?.status === 'number' && error.status >= 400 && error.status < 600 + ? error.status + : 500; + const errorType = statusCode >= 400 && statusCode < 500 ? 'invalid_request' : 'server_error'; + sendResponsesErrorResponse(res, statusCode, errorMessage, errorType); } } }; diff --git a/api/server/controllers/auth/LogoutController.js b/api/server/controllers/auth/LogoutController.js index ec66316285..0b3cf262b8 100644 --- a/api/server/controllers/auth/LogoutController.js +++ b/api/server/controllers/auth/LogoutController.js @@ -22,6 +22,7 @@ const logoutController = async (req, res) => { res.clearCookie('refreshToken'); res.clearCookie('openid_access_token'); + res.clearCookie('openid_id_token'); res.clearCookie('openid_user_id'); res.clearCookie('token_provider'); const response = { message }; diff --git a/api/server/index.js b/api/server/index.js index fcd0229c9f..193eb423ad 100644 --- a/api/server/index.js +++ b/api/server/index.js @@ -251,6 +251,15 @@ process.on('uncaughtException', (err) => { return; } + if (isEnabled(process.env.CONTINUE_ON_UNCAUGHT_EXCEPTION)) { + logger.error('Unhandled error encountered. The app will continue running.', { + name: err?.name, + message: err?.message, + stack: err?.stack, + }); + return; + } + process.exit(1); }); diff --git a/api/server/middleware/buildEndpointOption.js b/api/server/middleware/buildEndpointOption.js index f56d850120..64ed8e7466 100644 --- a/api/server/middleware/buildEndpointOption.js +++ b/api/server/middleware/buildEndpointOption.js @@ -5,9 +5,11 @@ const { EModelEndpoint, isAgentsEndpoint, parseCompactConvo, + getDefaultParamsEndpoint, } = require('librechat-data-provider'); const azureAssistants = require('~/server/services/Endpoints/azureAssistants'); const assistants = require('~/server/services/Endpoints/assistants'); +const { getEndpointsConfig } = require('~/server/services/Config'); const agents = require('~/server/services/Endpoints/agents'); const { updateFilesUsage } = require('~/models'); @@ -19,9 +21,24 @@ const buildFunction = { async function buildEndpointOption(req, res, next) { const { endpoint, endpointType } = req.body; + + let endpointsConfig; + try { + endpointsConfig = await getEndpointsConfig(req); + } catch (error) { + logger.error('Error fetching endpoints config in buildEndpointOption', error); + } + + const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, endpoint); + let parsedBody; try { - parsedBody = parseCompactConvo({ endpoint, endpointType, conversation: req.body }); + parsedBody = parseCompactConvo({ + endpoint, + endpointType, + conversation: req.body, + defaultParamsEndpoint, + }); } catch (error) { logger.error(`Error parsing compact conversation for endpoint ${endpoint}`, error); logger.debug({ @@ -55,6 +72,7 @@ async function buildEndpointOption(req, res, next) { endpoint, endpointType, conversation: currentModelSpec.preset, + defaultParamsEndpoint, }); if (currentModelSpec.iconURL != null && currentModelSpec.iconURL !== '') { parsedBody.iconURL = currentModelSpec.iconURL; diff --git a/api/server/middleware/buildEndpointOption.spec.js b/api/server/middleware/buildEndpointOption.spec.js new file mode 100644 index 0000000000..eab5e2666b --- /dev/null +++ b/api/server/middleware/buildEndpointOption.spec.js @@ -0,0 +1,237 @@ +/** + * Wrap parseCompactConvo: the REAL function runs, but jest can observe + * calls and return values. Must be declared before require('./buildEndpointOption') + * so the destructured reference in the middleware captures the wrapper. + */ +jest.mock('librechat-data-provider', () => { + const actual = jest.requireActual('librechat-data-provider'); + return { + ...actual, + parseCompactConvo: jest.fn((...args) => actual.parseCompactConvo(...args)), + }; +}); + +const { EModelEndpoint, parseCompactConvo } = require('librechat-data-provider'); + +const mockBuildOptions = jest.fn((_endpoint, parsedBody) => ({ + ...parsedBody, + endpoint: _endpoint, +})); + +jest.mock('~/server/services/Endpoints/azureAssistants', () => ({ + buildOptions: mockBuildOptions, +})); +jest.mock('~/server/services/Endpoints/assistants', () => ({ + buildOptions: mockBuildOptions, +})); +jest.mock('~/server/services/Endpoints/agents', () => ({ + buildOptions: mockBuildOptions, +})); + +jest.mock('~/models', () => ({ + updateFilesUsage: jest.fn(), +})); + +const mockGetEndpointsConfig = jest.fn(); +jest.mock('~/server/services/Config', () => ({ + getEndpointsConfig: (...args) => mockGetEndpointsConfig(...args), +})); + +jest.mock('@librechat/api', () => ({ + handleError: jest.fn(), +})); + +const buildEndpointOption = require('./buildEndpointOption'); + +const createReq = (body, config = {}) => ({ + body, + config, + baseUrl: '/api/chat', +}); + +const createRes = () => ({ + status: jest.fn().mockReturnThis(), + json: jest.fn().mockReturnThis(), +}); + +describe('buildEndpointOption - defaultParamsEndpoint parsing', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should pass defaultParamsEndpoint to parseCompactConvo and preserve maxOutputTokens', async () => { + mockGetEndpointsConfig.mockResolvedValue({ + AnthropicClaude: { + type: EModelEndpoint.custom, + customParams: { + defaultParamsEndpoint: EModelEndpoint.anthropic, + }, + }, + }); + + const req = createReq( + { + endpoint: 'AnthropicClaude', + endpointType: EModelEndpoint.custom, + model: 'anthropic/claude-opus-4.5', + temperature: 0.7, + maxOutputTokens: 8192, + topP: 0.9, + maxContextTokens: 50000, + }, + { modelSpecs: null }, + ); + + await buildEndpointOption(req, createRes(), jest.fn()); + + expect(parseCompactConvo).toHaveBeenCalledWith( + expect.objectContaining({ + defaultParamsEndpoint: EModelEndpoint.anthropic, + }), + ); + + const parsedResult = parseCompactConvo.mock.results[0].value; + expect(parsedResult.maxOutputTokens).toBe(8192); + expect(parsedResult.topP).toBe(0.9); + expect(parsedResult.temperature).toBe(0.7); + expect(parsedResult.maxContextTokens).toBe(50000); + }); + + it('should strip maxOutputTokens when no defaultParamsEndpoint is configured', async () => { + mockGetEndpointsConfig.mockResolvedValue({ + MyOpenRouter: { + type: EModelEndpoint.custom, + }, + }); + + const req = createReq( + { + endpoint: 'MyOpenRouter', + endpointType: EModelEndpoint.custom, + model: 'gpt-4o', + temperature: 0.7, + maxOutputTokens: 8192, + max_tokens: 4096, + }, + { modelSpecs: null }, + ); + + await buildEndpointOption(req, createRes(), jest.fn()); + + expect(parseCompactConvo).toHaveBeenCalledWith( + expect.objectContaining({ + defaultParamsEndpoint: undefined, + }), + ); + + const parsedResult = parseCompactConvo.mock.results[0].value; + expect(parsedResult.maxOutputTokens).toBeUndefined(); + expect(parsedResult.max_tokens).toBe(4096); + expect(parsedResult.temperature).toBe(0.7); + }); + + it('should strip bedrock region from custom endpoint without defaultParamsEndpoint', async () => { + mockGetEndpointsConfig.mockResolvedValue({ + MyEndpoint: { + type: EModelEndpoint.custom, + }, + }); + + const req = createReq( + { + endpoint: 'MyEndpoint', + endpointType: EModelEndpoint.custom, + model: 'gpt-4o', + temperature: 0.7, + region: 'us-east-1', + }, + { modelSpecs: null }, + ); + + await buildEndpointOption(req, createRes(), jest.fn()); + + const parsedResult = parseCompactConvo.mock.results[0].value; + expect(parsedResult.region).toBeUndefined(); + expect(parsedResult.temperature).toBe(0.7); + }); + + it('should pass defaultParamsEndpoint when re-parsing enforced model spec', async () => { + mockGetEndpointsConfig.mockResolvedValue({ + AnthropicClaude: { + type: EModelEndpoint.custom, + customParams: { + defaultParamsEndpoint: EModelEndpoint.anthropic, + }, + }, + }); + + const modelSpec = { + name: 'claude-opus-4.5', + preset: { + endpoint: 'AnthropicClaude', + endpointType: EModelEndpoint.custom, + model: 'anthropic/claude-opus-4.5', + temperature: 0.7, + maxOutputTokens: 8192, + maxContextTokens: 50000, + }, + }; + + const req = createReq( + { + endpoint: 'AnthropicClaude', + endpointType: EModelEndpoint.custom, + spec: 'claude-opus-4.5', + model: 'anthropic/claude-opus-4.5', + }, + { + modelSpecs: { + enforce: true, + list: [modelSpec], + }, + }, + ); + + await buildEndpointOption(req, createRes(), jest.fn()); + + const enforcedCall = parseCompactConvo.mock.calls[1]; + expect(enforcedCall[0]).toEqual( + expect.objectContaining({ + defaultParamsEndpoint: EModelEndpoint.anthropic, + }), + ); + + const enforcedResult = parseCompactConvo.mock.results[1].value; + expect(enforcedResult.maxOutputTokens).toBe(8192); + expect(enforcedResult.temperature).toBe(0.7); + expect(enforcedResult.maxContextTokens).toBe(50000); + }); + + it('should fall back to OpenAI schema when getEndpointsConfig fails', async () => { + mockGetEndpointsConfig.mockRejectedValue(new Error('Config unavailable')); + + const req = createReq( + { + endpoint: 'AnthropicClaude', + endpointType: EModelEndpoint.custom, + model: 'anthropic/claude-opus-4.5', + temperature: 0.7, + maxOutputTokens: 8192, + max_tokens: 4096, + }, + { modelSpecs: null }, + ); + + await buildEndpointOption(req, createRes(), jest.fn()); + + expect(parseCompactConvo).toHaveBeenCalledWith( + expect.objectContaining({ + defaultParamsEndpoint: undefined, + }), + ); + + const parsedResult = parseCompactConvo.mock.results[0].value; + expect(parsedResult.maxOutputTokens).toBeUndefined(); + expect(parsedResult.max_tokens).toBe(4096); + }); +}); diff --git a/api/server/middleware/requireJwtAuth.js b/api/server/middleware/requireJwtAuth.js index ed83c4773e..16b107aefc 100644 --- a/api/server/middleware/requireJwtAuth.js +++ b/api/server/middleware/requireJwtAuth.js @@ -7,16 +7,13 @@ const { isEnabled } = require('@librechat/api'); * Switches between JWT and OpenID authentication based on cookies and environment settings */ const requireJwtAuth = (req, res, next) => { - // Check if token provider is specified in cookies const cookieHeader = req.headers.cookie; const tokenProvider = cookieHeader ? cookies.parse(cookieHeader).token_provider : null; - // Use OpenID authentication if token provider is OpenID and OPENID_REUSE_TOKENS is enabled if (tokenProvider === 'openid' && isEnabled(process.env.OPENID_REUSE_TOKENS)) { return passport.authenticate('openidJwt', { session: false })(req, res, next); } - // Default to standard JWT authentication return passport.authenticate('jwt', { session: false })(req, res, next); }; diff --git a/api/server/routes/__tests__/convos.spec.js b/api/server/routes/__tests__/convos.spec.js index ef11b3cbbb..931ef006d0 100644 --- a/api/server/routes/__tests__/convos.spec.js +++ b/api/server/routes/__tests__/convos.spec.js @@ -385,6 +385,40 @@ describe('Convos Routes', () => { expect(deleteConvoSharedLink).not.toHaveBeenCalled(); }); + it('should return 400 when request body is empty (DoS prevention)', async () => { + const response = await request(app).delete('/api/convos').send({}); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'no parameters provided' }); + expect(deleteConvos).not.toHaveBeenCalled(); + }); + + it('should return 400 when arg is null (DoS prevention)', async () => { + const response = await request(app).delete('/api/convos').send({ arg: null }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'no parameters provided' }); + expect(deleteConvos).not.toHaveBeenCalled(); + }); + + it('should return 400 when arg is undefined (DoS prevention)', async () => { + const response = await request(app).delete('/api/convos').send({ arg: undefined }); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'no parameters provided' }); + expect(deleteConvos).not.toHaveBeenCalled(); + }); + + it('should return 400 when request body is null (DoS prevention)', async () => { + const response = await request(app) + .delete('/api/convos') + .set('Content-Type', 'application/json') + .send('null'); + + expect(response.status).toBe(400); + expect(deleteConvos).not.toHaveBeenCalled(); + }); + it('should return 500 if deleteConvoSharedLink fails', async () => { const mockConversationId = 'conv-error'; diff --git a/api/server/routes/__tests__/keys.spec.js b/api/server/routes/__tests__/keys.spec.js new file mode 100644 index 0000000000..0c96dd3bcb --- /dev/null +++ b/api/server/routes/__tests__/keys.spec.js @@ -0,0 +1,174 @@ +const express = require('express'); +const request = require('supertest'); + +jest.mock('~/models', () => ({ + updateUserKey: jest.fn(), + deleteUserKey: jest.fn(), + getUserKeyExpiry: jest.fn(), +})); + +jest.mock('~/server/middleware/requireJwtAuth', () => (req, res, next) => next()); + +jest.mock('~/server/middleware', () => ({ + requireJwtAuth: (req, res, next) => next(), +})); + +describe('Keys Routes', () => { + let app; + const { updateUserKey, deleteUserKey, getUserKeyExpiry } = require('~/models'); + + beforeAll(() => { + const keysRouter = require('../keys'); + + app = express(); + app.use(express.json()); + + app.use((req, res, next) => { + req.user = { id: 'test-user-123' }; + next(); + }); + + app.use('/api/keys', keysRouter); + }); + + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('PUT /', () => { + it('should update a user key with the authenticated user ID', async () => { + updateUserKey.mockResolvedValue({}); + + const response = await request(app) + .put('/api/keys') + .send({ name: 'openAI', value: 'sk-test-key-123', expiresAt: '2026-12-31' }); + + expect(response.status).toBe(201); + expect(updateUserKey).toHaveBeenCalledWith({ + userId: 'test-user-123', + name: 'openAI', + value: 'sk-test-key-123', + expiresAt: '2026-12-31', + }); + expect(updateUserKey).toHaveBeenCalledTimes(1); + }); + + it('should not allow userId override via request body (IDOR prevention)', async () => { + updateUserKey.mockResolvedValue({}); + + const response = await request(app).put('/api/keys').send({ + userId: 'attacker-injected-id', + name: 'openAI', + value: 'sk-attacker-key', + }); + + expect(response.status).toBe(201); + expect(updateUserKey).toHaveBeenCalledWith({ + userId: 'test-user-123', + name: 'openAI', + value: 'sk-attacker-key', + expiresAt: undefined, + }); + }); + + it('should ignore extraneous fields from request body', async () => { + updateUserKey.mockResolvedValue({}); + + const response = await request(app).put('/api/keys').send({ + name: 'openAI', + value: 'sk-test-key', + expiresAt: '2026-12-31', + _id: 'injected-mongo-id', + __v: 99, + extra: 'should-be-ignored', + }); + + expect(response.status).toBe(201); + expect(updateUserKey).toHaveBeenCalledWith({ + userId: 'test-user-123', + name: 'openAI', + value: 'sk-test-key', + expiresAt: '2026-12-31', + }); + }); + + it('should handle missing optional fields', async () => { + updateUserKey.mockResolvedValue({}); + + const response = await request(app) + .put('/api/keys') + .send({ name: 'anthropic', value: 'sk-ant-key' }); + + expect(response.status).toBe(201); + expect(updateUserKey).toHaveBeenCalledWith({ + userId: 'test-user-123', + name: 'anthropic', + value: 'sk-ant-key', + expiresAt: undefined, + }); + }); + + it('should return 400 when request body is null', async () => { + const response = await request(app) + .put('/api/keys') + .set('Content-Type', 'application/json') + .send('null'); + + expect(response.status).toBe(400); + expect(updateUserKey).not.toHaveBeenCalled(); + }); + }); + + describe('DELETE /:name', () => { + it('should delete a user key by name', async () => { + deleteUserKey.mockResolvedValue({}); + + const response = await request(app).delete('/api/keys/openAI'); + + expect(response.status).toBe(204); + expect(deleteUserKey).toHaveBeenCalledWith({ + userId: 'test-user-123', + name: 'openAI', + }); + expect(deleteUserKey).toHaveBeenCalledTimes(1); + }); + }); + + describe('DELETE /', () => { + it('should delete all keys when all=true', async () => { + deleteUserKey.mockResolvedValue({}); + + const response = await request(app).delete('/api/keys?all=true'); + + expect(response.status).toBe(204); + expect(deleteUserKey).toHaveBeenCalledWith({ + userId: 'test-user-123', + all: true, + }); + }); + + it('should return 400 when all query param is not true', async () => { + const response = await request(app).delete('/api/keys'); + + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'Specify either all=true to delete.' }); + expect(deleteUserKey).not.toHaveBeenCalled(); + }); + }); + + describe('GET /', () => { + it('should return key expiry for a given key name', async () => { + const mockExpiry = { expiresAt: '2026-12-31' }; + getUserKeyExpiry.mockResolvedValue(mockExpiry); + + const response = await request(app).get('/api/keys?name=openAI'); + + expect(response.status).toBe(200); + expect(response.body).toEqual(mockExpiry); + expect(getUserKeyExpiry).toHaveBeenCalledWith({ + userId: 'test-user-123', + name: 'openAI', + }); + }); + }); +}); diff --git a/api/server/routes/__tests__/mcp.spec.js b/api/server/routes/__tests__/mcp.spec.js index 26d7988f0a..e87fcf8f15 100644 --- a/api/server/routes/__tests__/mcp.spec.js +++ b/api/server/routes/__tests__/mcp.spec.js @@ -1,8 +1,18 @@ +const crypto = require('crypto'); const express = require('express'); const request = require('supertest'); const mongoose = require('mongoose'); -const { MongoMemoryServer } = require('mongodb-memory-server'); +const cookieParser = require('cookie-parser'); const { getBasePath } = require('@librechat/api'); +const { MongoMemoryServer } = require('mongodb-memory-server'); + +function generateTestCsrfToken(flowId) { + return crypto + .createHmac('sha256', process.env.JWT_SECRET) + .update(flowId) + .digest('hex') + .slice(0, 32); +} const mockRegistryInstance = { getServerConfig: jest.fn(), @@ -130,6 +140,7 @@ describe('MCP Routes', () => { app = express(); app.use(express.json()); + app.use(cookieParser()); app.use((req, res, next) => { req.user = { id: 'test-user-id' }; @@ -168,12 +179,12 @@ describe('MCP Routes', () => { MCPOAuthHandler.initiateOAuthFlow.mockResolvedValue({ authorizationUrl: 'https://oauth.example.com/auth', - flowId: 'test-flow-id', + flowId: 'test-user-id:test-server', }); const response = await request(app).get('/api/mcp/test-server/oauth/initiate').query({ userId: 'test-user-id', - flowId: 'test-flow-id', + flowId: 'test-user-id:test-server', }); expect(response.status).toBe(302); @@ -190,7 +201,7 @@ describe('MCP Routes', () => { it('should return 403 when userId does not match authenticated user', async () => { const response = await request(app).get('/api/mcp/test-server/oauth/initiate').query({ userId: 'different-user-id', - flowId: 'test-flow-id', + flowId: 'test-user-id:test-server', }); expect(response.status).toBe(403); @@ -228,7 +239,7 @@ describe('MCP Routes', () => { const response = await request(app).get('/api/mcp/test-server/oauth/initiate').query({ userId: 'test-user-id', - flowId: 'test-flow-id', + flowId: 'test-user-id:test-server', }); expect(response.status).toBe(400); @@ -245,7 +256,7 @@ describe('MCP Routes', () => { const response = await request(app).get('/api/mcp/test-server/oauth/initiate').query({ userId: 'test-user-id', - flowId: 'test-flow-id', + flowId: 'test-user-id:test-server', }); expect(response.status).toBe(500); @@ -255,7 +266,7 @@ describe('MCP Routes', () => { it('should return 400 when flow state metadata is null', async () => { const mockFlowManager = { getFlowState: jest.fn().mockResolvedValue({ - id: 'test-flow-id', + id: 'test-user-id:test-server', metadata: null, }), }; @@ -265,7 +276,7 @@ describe('MCP Routes', () => { const response = await request(app).get('/api/mcp/test-server/oauth/initiate').query({ userId: 'test-user-id', - flowId: 'test-flow-id', + flowId: 'test-user-id:test-server', }); expect(response.status).toBe(400); @@ -280,7 +291,7 @@ describe('MCP Routes', () => { it('should redirect to error page when OAuth error is received', async () => { const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ error: 'access_denied', - state: 'test-flow-id', + state: 'test-user-id:test-server', }); const basePath = getBasePath(); @@ -290,7 +301,7 @@ describe('MCP Routes', () => { it('should redirect to error page when code is missing', async () => { const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ - state: 'test-flow-id', + state: 'test-user-id:test-server', }); const basePath = getBasePath(); @@ -308,15 +319,50 @@ describe('MCP Routes', () => { expect(response.headers.location).toBe(`${basePath}/oauth/error?error=missing_state`); }); - it('should redirect to error page when flow state is not found', async () => { - MCPOAuthHandler.getFlowState.mockResolvedValue(null); - + it('should redirect to error page when CSRF cookie is missing', async () => { const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ code: 'test-auth-code', - state: 'invalid-flow-id', + state: 'test-user-id:test-server', }); const basePath = getBasePath(); + expect(response.status).toBe(302); + expect(response.headers.location).toBe( + `${basePath}/oauth/error?error=csrf_validation_failed`, + ); + }); + + it('should redirect to error page when CSRF cookie does not match state', async () => { + const csrfToken = generateTestCsrfToken('different-flow-id'); + const response = await request(app) + .get('/api/mcp/test-server/oauth/callback') + .set('Cookie', [`oauth_csrf=${csrfToken}`]) + .query({ + code: 'test-auth-code', + state: 'test-user-id:test-server', + }); + const basePath = getBasePath(); + + expect(response.status).toBe(302); + expect(response.headers.location).toBe( + `${basePath}/oauth/error?error=csrf_validation_failed`, + ); + }); + + it('should redirect to error page when flow state is not found', async () => { + MCPOAuthHandler.getFlowState.mockResolvedValue(null); + const flowId = 'invalid-flow:id'; + const csrfToken = generateTestCsrfToken(flowId); + + const response = await request(app) + .get('/api/mcp/test-server/oauth/callback') + .set('Cookie', [`oauth_csrf=${csrfToken}`]) + .query({ + code: 'test-auth-code', + state: flowId, + }); + const basePath = getBasePath(); + expect(response.status).toBe(302); expect(response.headers.location).toBe(`${basePath}/oauth/error?error=invalid_state`); }); @@ -369,16 +415,22 @@ describe('MCP Routes', () => { }); setCachedTools.mockResolvedValue(); - const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ - code: 'test-auth-code', - state: 'test-flow-id', - }); + const flowId = 'test-user-id:test-server'; + const csrfToken = generateTestCsrfToken(flowId); + + const response = await request(app) + .get('/api/mcp/test-server/oauth/callback') + .set('Cookie', [`oauth_csrf=${csrfToken}`]) + .query({ + code: 'test-auth-code', + state: flowId, + }); const basePath = getBasePath(); expect(response.status).toBe(302); expect(response.headers.location).toBe(`${basePath}/oauth/success?serverName=test-server`); expect(MCPOAuthHandler.completeOAuthFlow).toHaveBeenCalledWith( - 'test-flow-id', + flowId, 'test-auth-code', mockFlowManager, {}, @@ -400,16 +452,24 @@ describe('MCP Routes', () => { 'mcp_oauth', mockTokens, ); - expect(mockFlowManager.deleteFlow).toHaveBeenCalledWith('test-flow-id', 'mcp_get_tokens'); + expect(mockFlowManager.deleteFlow).toHaveBeenCalledWith( + 'test-user-id:test-server', + 'mcp_get_tokens', + ); }); it('should redirect to error page when callback processing fails', async () => { MCPOAuthHandler.getFlowState.mockRejectedValue(new Error('Callback error')); + const flowId = 'test-user-id:test-server'; + const csrfToken = generateTestCsrfToken(flowId); - const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ - code: 'test-auth-code', - state: 'test-flow-id', - }); + const response = await request(app) + .get('/api/mcp/test-server/oauth/callback') + .set('Cookie', [`oauth_csrf=${csrfToken}`]) + .query({ + code: 'test-auth-code', + state: flowId, + }); const basePath = getBasePath(); expect(response.status).toBe(302); @@ -442,15 +502,21 @@ describe('MCP Routes', () => { getLogStores.mockReturnValue({}); require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager); - const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ - code: 'test-auth-code', - state: 'test-flow-id', - }); + const flowId = 'test-user-id:test-server'; + const csrfToken = generateTestCsrfToken(flowId); + + const response = await request(app) + .get('/api/mcp/test-server/oauth/callback') + .set('Cookie', [`oauth_csrf=${csrfToken}`]) + .query({ + code: 'test-auth-code', + state: flowId, + }); const basePath = getBasePath(); expect(response.status).toBe(302); expect(response.headers.location).toBe(`${basePath}/oauth/success?serverName=test-server`); - expect(mockFlowManager.deleteFlow).toHaveBeenCalledWith('test-flow-id', 'mcp_get_tokens'); + expect(mockFlowManager.deleteFlow).toHaveBeenCalledWith(flowId, 'mcp_get_tokens'); }); it('should handle reconnection failure after OAuth', async () => { @@ -488,16 +554,22 @@ describe('MCP Routes', () => { getCachedTools.mockResolvedValue({}); setCachedTools.mockResolvedValue(); - const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ - code: 'test-auth-code', - state: 'test-flow-id', - }); + const flowId = 'test-user-id:test-server'; + const csrfToken = generateTestCsrfToken(flowId); + + const response = await request(app) + .get('/api/mcp/test-server/oauth/callback') + .set('Cookie', [`oauth_csrf=${csrfToken}`]) + .query({ + code: 'test-auth-code', + state: flowId, + }); const basePath = getBasePath(); expect(response.status).toBe(302); expect(response.headers.location).toBe(`${basePath}/oauth/success?serverName=test-server`); expect(MCPTokenStorage.storeTokens).toHaveBeenCalled(); - expect(mockFlowManager.deleteFlow).toHaveBeenCalledWith('test-flow-id', 'mcp_get_tokens'); + expect(mockFlowManager.deleteFlow).toHaveBeenCalledWith(flowId, 'mcp_get_tokens'); }); it('should redirect to error page if token storage fails', async () => { @@ -530,10 +602,16 @@ describe('MCP Routes', () => { }; require('~/config').getMCPManager.mockReturnValue(mockMcpManager); - const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ - code: 'test-auth-code', - state: 'test-flow-id', - }); + const flowId = 'test-user-id:test-server'; + const csrfToken = generateTestCsrfToken(flowId); + + const response = await request(app) + .get('/api/mcp/test-server/oauth/callback') + .set('Cookie', [`oauth_csrf=${csrfToken}`]) + .query({ + code: 'test-auth-code', + state: flowId, + }); const basePath = getBasePath(); expect(response.status).toBe(302); @@ -589,22 +667,27 @@ describe('MCP Routes', () => { clearReconnection: jest.fn(), }); - const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ - code: 'test-auth-code', - state: 'test-flow-id', - }); + const flowId = 'test-user-id:test-server'; + const csrfToken = generateTestCsrfToken(flowId); + + const response = await request(app) + .get('/api/mcp/test-server/oauth/callback') + .set('Cookie', [`oauth_csrf=${csrfToken}`]) + .query({ + code: 'test-auth-code', + state: flowId, + }); const basePath = getBasePath(); expect(response.status).toBe(302); expect(response.headers.location).toBe(`${basePath}/oauth/success?serverName=test-server`); - // Verify storeTokens was called with ORIGINAL flow state credentials expect(MCPTokenStorage.storeTokens).toHaveBeenCalledWith( expect.objectContaining({ userId: 'test-user-id', serverName: 'test-server', tokens: mockTokens, - clientInfo: clientInfo, // Uses original flow state, not any "updated" credentials + clientInfo: clientInfo, metadata: flowState.metadata, }), ); @@ -631,16 +714,21 @@ describe('MCP Routes', () => { getLogStores.mockReturnValue({}); require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager); - const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({ - code: 'test-auth-code', - state: 'test-flow-id', - }); + const flowId = 'test-user-id:test-server'; + const csrfToken = generateTestCsrfToken(flowId); + + const response = await request(app) + .get('/api/mcp/test-server/oauth/callback') + .set('Cookie', [`oauth_csrf=${csrfToken}`]) + .query({ + code: 'test-auth-code', + state: flowId, + }); const basePath = getBasePath(); expect(response.status).toBe(302); expect(response.headers.location).toBe(`${basePath}/oauth/success?serverName=test-server`); - // Verify completeOAuthFlow was NOT called (prevented duplicate) expect(MCPOAuthHandler.completeOAuthFlow).not.toHaveBeenCalled(); expect(MCPTokenStorage.storeTokens).not.toHaveBeenCalled(); }); @@ -755,7 +843,7 @@ describe('MCP Routes', () => { getLogStores.mockReturnValue({}); require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager); - const response = await request(app).get('/api/mcp/oauth/status/test-flow-id'); + const response = await request(app).get('/api/mcp/oauth/status/test-user-id:test-server'); expect(response.status).toBe(200); expect(response.body).toEqual({ @@ -766,6 +854,13 @@ describe('MCP Routes', () => { }); }); + it('should return 403 when flowId does not match authenticated user', async () => { + const response = await request(app).get('/api/mcp/oauth/status/other-user-id:test-server'); + + expect(response.status).toBe(403); + expect(response.body).toEqual({ error: 'Access denied' }); + }); + it('should return 404 when flow is not found', async () => { const mockFlowManager = { getFlowState: jest.fn().mockResolvedValue(null), @@ -774,7 +869,7 @@ describe('MCP Routes', () => { getLogStores.mockReturnValue({}); require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager); - const response = await request(app).get('/api/mcp/oauth/status/non-existent-flow'); + const response = await request(app).get('/api/mcp/oauth/status/test-user-id:non-existent'); expect(response.status).toBe(404); expect(response.body).toEqual({ error: 'Flow not found' }); @@ -788,7 +883,7 @@ describe('MCP Routes', () => { getLogStores.mockReturnValue({}); require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager); - const response = await request(app).get('/api/mcp/oauth/status/error-flow-id'); + const response = await request(app).get('/api/mcp/oauth/status/test-user-id:error-server'); expect(response.status).toBe(500); expect(response.body).toEqual({ error: 'Failed to get flow status' }); @@ -1375,7 +1470,7 @@ describe('MCP Routes', () => { refresh_token: 'edge-refresh-token', }; MCPOAuthHandler.getFlowState = jest.fn().mockResolvedValue({ - id: 'test-flow-id', + id: 'test-user-id:test-server', userId: 'test-user-id', metadata: { serverUrl: 'https://example.com', @@ -1403,8 +1498,12 @@ describe('MCP Routes', () => { }; require('~/config').getMCPManager.mockReturnValue(mockMcpManager); + const flowId = 'test-user-id:test-server'; + const csrfToken = generateTestCsrfToken(flowId); + const response = await request(app) - .get('/api/mcp/test-server/oauth/callback?code=test-code&state=test-flow-id') + .get(`/api/mcp/test-server/oauth/callback?code=test-code&state=${flowId}`) + .set('Cookie', [`oauth_csrf=${csrfToken}`]) .expect(302); const basePath = getBasePath(); @@ -1424,7 +1523,7 @@ describe('MCP Routes', () => { const mockFlowManager = { getFlowState: jest.fn().mockResolvedValue({ - id: 'test-flow-id', + id: 'test-user-id:test-server', userId: 'test-user-id', metadata: { serverUrl: 'https://example.com', oauth: {} }, clientInfo: {}, @@ -1453,8 +1552,12 @@ describe('MCP Routes', () => { }; require('~/config').getMCPManager.mockReturnValue(mockMcpManager); + const flowId = 'test-user-id:test-server'; + const csrfToken = generateTestCsrfToken(flowId); + const response = await request(app) - .get('/api/mcp/test-server/oauth/callback?code=test-code&state=test-flow-id') + .get(`/api/mcp/test-server/oauth/callback?code=test-code&state=${flowId}`) + .set('Cookie', [`oauth_csrf=${csrfToken}`]) .expect(302); const basePath = getBasePath(); diff --git a/api/server/routes/actions.js b/api/server/routes/actions.js index 14474a53d3..806edc66cc 100644 --- a/api/server/routes/actions.js +++ b/api/server/routes/actions.js @@ -1,14 +1,47 @@ const express = require('express'); const jwt = require('jsonwebtoken'); -const { getAccessToken, getBasePath } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); const { CacheKeys } = require('librechat-data-provider'); +const { + getBasePath, + getAccessToken, + setOAuthSession, + validateOAuthCsrf, + OAUTH_CSRF_COOKIE, + setOAuthCsrfCookie, + validateOAuthSession, + OAUTH_SESSION_COOKIE, +} = require('@librechat/api'); const { findToken, updateToken, createToken } = require('~/models'); +const { requireJwtAuth } = require('~/server/middleware'); const { getFlowStateManager } = require('~/config'); const { getLogStores } = require('~/cache'); const router = express.Router(); const JWT_SECRET = process.env.JWT_SECRET; +const OAUTH_CSRF_COOKIE_PATH = '/api/actions'; + +/** + * Sets a CSRF cookie binding the action OAuth flow to the current browser session. + * Must be called before the user opens the IdP authorization URL. + * + * @route POST /actions/:action_id/oauth/bind + */ +router.post('/:action_id/oauth/bind', requireJwtAuth, setOAuthSession, async (req, res) => { + try { + const { action_id } = req.params; + const user = req.user; + if (!user?.id) { + return res.status(401).json({ error: 'User not authenticated' }); + } + const flowId = `${user.id}:${action_id}`; + setOAuthCsrfCookie(res, flowId, OAUTH_CSRF_COOKIE_PATH); + res.json({ success: true }); + } catch (error) { + logger.error('[Action OAuth] Failed to set CSRF binding cookie', error); + res.status(500).json({ error: 'Failed to bind OAuth flow' }); + } +}); /** * Handles the OAuth callback and exchanges the authorization code for tokens. @@ -45,7 +78,22 @@ router.get('/:action_id/oauth/callback', async (req, res) => { await flowManager.failFlow(identifier, 'oauth', 'Invalid user ID in state parameter'); return res.redirect(`${basePath}/oauth/error?error=invalid_state`); } + identifier = `${decodedState.user}:${action_id}`; + + if ( + !validateOAuthCsrf(req, res, identifier, OAUTH_CSRF_COOKIE_PATH) && + !validateOAuthSession(req, decodedState.user) + ) { + logger.error('[Action OAuth] CSRF validation failed: no valid CSRF or session cookie', { + identifier, + hasCsrfCookie: !!req.cookies?.[OAUTH_CSRF_COOKIE], + hasSessionCookie: !!req.cookies?.[OAUTH_SESSION_COOKIE], + }); + await flowManager.failFlow(identifier, 'oauth', 'CSRF validation failed'); + return res.redirect(`${basePath}/oauth/error?error=csrf_validation_failed`); + } + const flowState = await flowManager.getFlowState(identifier, 'oauth'); if (!flowState) { throw new Error('OAuth flow not found'); @@ -71,7 +119,6 @@ router.get('/:action_id/oauth/callback', async (req, res) => { ); await flowManager.completeFlow(identifier, 'oauth', tokenData); - /** Redirect to React success page */ const serverName = flowState.metadata?.action_name || `Action ${action_id}`; const redirectUrl = `${basePath}/oauth/success?serverName=${encodeURIComponent(serverName)}`; res.redirect(redirectUrl); diff --git a/api/server/routes/convos.js b/api/server/routes/convos.js index 75b3656f59..bb9c4ebea9 100644 --- a/api/server/routes/convos.js +++ b/api/server/routes/convos.js @@ -98,7 +98,7 @@ router.get('/gen_title/:conversationId', async (req, res) => { router.delete('/', async (req, res) => { let filter = {}; - const { conversationId, source, thread_id, endpoint } = req.body.arg; + const { conversationId, source, thread_id, endpoint } = req.body?.arg ?? {}; // Prevent deletion of all conversations if (!conversationId && !source && !thread_id && !endpoint) { @@ -160,7 +160,7 @@ router.delete('/all', async (req, res) => { * @returns {object} 200 - The updated conversation object. */ router.post('/archive', validateConvoAccess, async (req, res) => { - const { conversationId, isArchived } = req.body.arg ?? {}; + const { conversationId, isArchived } = req.body?.arg ?? {}; if (!conversationId) { return res.status(400).json({ error: 'conversationId is required' }); @@ -194,7 +194,7 @@ const MAX_CONVO_TITLE_LENGTH = 1024; * @returns {object} 201 - The updated conversation object. */ router.post('/update', validateConvoAccess, async (req, res) => { - const { conversationId, title } = req.body.arg ?? {}; + const { conversationId, title } = req.body?.arg ?? {}; if (!conversationId) { return res.status(400).json({ error: 'conversationId is required' }); diff --git a/api/server/routes/keys.js b/api/server/routes/keys.js index 620e4d234b..dfd68f69c4 100644 --- a/api/server/routes/keys.js +++ b/api/server/routes/keys.js @@ -5,7 +5,11 @@ const { requireJwtAuth } = require('~/server/middleware'); const router = express.Router(); router.put('/', requireJwtAuth, async (req, res) => { - await updateUserKey({ userId: req.user.id, ...req.body }); + if (req.body == null || typeof req.body !== 'object') { + return res.status(400).send({ error: 'Invalid request body.' }); + } + const { name, value, expiresAt } = req.body; + await updateUserKey({ userId: req.user.id, name, value, expiresAt }); res.status(201).send(); }); diff --git a/api/server/routes/mcp.js b/api/server/routes/mcp.js index f01c7ff71c..2db8c2c462 100644 --- a/api/server/routes/mcp.js +++ b/api/server/routes/mcp.js @@ -8,18 +8,32 @@ const { Permissions, } = require('librechat-data-provider'); const { + getBasePath, createSafeUser, MCPOAuthHandler, MCPTokenStorage, - getBasePath, + setOAuthSession, getUserMCPAuthMap, + validateOAuthCsrf, + OAUTH_CSRF_COOKIE, + setOAuthCsrfCookie, generateCheckAccess, + validateOAuthSession, + OAUTH_SESSION_COOKIE, } = require('@librechat/api'); const { - getMCPManager, - getFlowStateManager, + createMCPServerController, + updateMCPServerController, + deleteMCPServerController, + getMCPServersList, + getMCPServerById, + getMCPTools, +} = require('~/server/controllers/mcp'); +const { getOAuthReconnectionManager, getMCPServersRegistry, + getFlowStateManager, + getMCPManager, } = require('~/config'); const { getMCPSetupData, getServerConnectionStatus } = require('~/server/services/MCP'); const { requireJwtAuth, canAccessMCPServerResource } = require('~/server/middleware'); @@ -27,20 +41,14 @@ const { findToken, updateToken, createToken, deleteTokens } = require('~/models' const { getUserPluginAuthValue } = require('~/server/services/PluginService'); const { updateMCPServerTools } = require('~/server/services/Config/mcp'); const { reinitMCPServer } = require('~/server/services/Tools/mcp'); -const { getMCPTools } = require('~/server/controllers/mcp'); const { findPluginAuthsByKeys } = require('~/models'); const { getRoleByName } = require('~/models/Role'); const { getLogStores } = require('~/cache'); -const { - createMCPServerController, - getMCPServerById, - getMCPServersList, - updateMCPServerController, - deleteMCPServerController, -} = require('~/server/controllers/mcp'); const router = Router(); +const OAUTH_CSRF_COOKIE_PATH = '/api/mcp'; + /** * Get all MCP tools available to the user * Returns only MCP tools, completely decoupled from regular LibreChat tools @@ -53,7 +61,7 @@ router.get('/tools', requireJwtAuth, async (req, res) => { * Initiate OAuth flow * This endpoint is called when the user clicks the auth link in the UI */ -router.get('/:serverName/oauth/initiate', requireJwtAuth, async (req, res) => { +router.get('/:serverName/oauth/initiate', requireJwtAuth, setOAuthSession, async (req, res) => { try { const { serverName } = req.params; const { userId, flowId } = req.query; @@ -93,7 +101,7 @@ router.get('/:serverName/oauth/initiate', requireJwtAuth, async (req, res) => { logger.debug('[MCP OAuth] OAuth flow initiated', { oauthFlowId, authorizationUrl }); - // Redirect user to the authorization URL + setOAuthCsrfCookie(res, oauthFlowId, OAUTH_CSRF_COOKIE_PATH); res.redirect(authorizationUrl); } catch (error) { logger.error('[MCP OAuth] Failed to initiate OAuth', error); @@ -138,6 +146,25 @@ router.get('/:serverName/oauth/callback', async (req, res) => { const flowId = state; logger.debug('[MCP OAuth] Using flow ID from state', { flowId }); + const flowParts = flowId.split(':'); + if (flowParts.length < 2 || !flowParts[0] || !flowParts[1]) { + logger.error('[MCP OAuth] Invalid flow ID format in state', { flowId }); + return res.redirect(`${basePath}/oauth/error?error=invalid_state`); + } + + const [flowUserId] = flowParts; + if ( + !validateOAuthCsrf(req, res, flowId, OAUTH_CSRF_COOKIE_PATH) && + !validateOAuthSession(req, flowUserId) + ) { + logger.error('[MCP OAuth] CSRF validation failed: no valid CSRF or session cookie', { + flowId, + hasCsrfCookie: !!req.cookies?.[OAUTH_CSRF_COOKIE], + hasSessionCookie: !!req.cookies?.[OAUTH_SESSION_COOKIE], + }); + return res.redirect(`${basePath}/oauth/error?error=csrf_validation_failed`); + } + const flowsCache = getLogStores(CacheKeys.FLOWS); const flowManager = getFlowStateManager(flowsCache); @@ -302,13 +329,47 @@ router.get('/oauth/tokens/:flowId', requireJwtAuth, async (req, res) => { } }); +/** + * Set CSRF binding cookie for OAuth flows initiated outside of HTTP request/response + * (e.g. during chat via SSE). The frontend should call this before opening the OAuth URL + * so the callback can verify the browser matches the flow initiator. + */ +router.post('/:serverName/oauth/bind', requireJwtAuth, setOAuthSession, async (req, res) => { + try { + const { serverName } = req.params; + const user = req.user; + + if (!user?.id) { + return res.status(401).json({ error: 'User not authenticated' }); + } + + const flowId = MCPOAuthHandler.generateFlowId(user.id, serverName); + setOAuthCsrfCookie(res, flowId, OAUTH_CSRF_COOKIE_PATH); + + res.json({ success: true }); + } catch (error) { + logger.error('[MCP OAuth] Failed to set CSRF binding cookie', error); + res.status(500).json({ error: 'Failed to bind OAuth flow' }); + } +}); + /** * Check OAuth flow status * This endpoint can be used to poll the status of an OAuth flow */ -router.get('/oauth/status/:flowId', async (req, res) => { +router.get('/oauth/status/:flowId', requireJwtAuth, async (req, res) => { try { const { flowId } = req.params; + const user = req.user; + + if (!user?.id) { + return res.status(401).json({ error: 'User not authenticated' }); + } + + if (!flowId.startsWith(`${user.id}:`) && !flowId.startsWith('system:')) { + return res.status(403).json({ error: 'Access denied' }); + } + const flowsCache = getLogStores(CacheKeys.FLOWS); const flowManager = getFlowStateManager(flowsCache); @@ -375,7 +436,7 @@ router.post('/oauth/cancel/:serverName', requireJwtAuth, async (req, res) => { * Reinitialize MCP server * This endpoint allows reinitializing a specific MCP server */ -router.post('/:serverName/reinitialize', requireJwtAuth, async (req, res) => { +router.post('/:serverName/reinitialize', requireJwtAuth, setOAuthSession, async (req, res) => { try { const { serverName } = req.params; const user = createSafeUser(req.user); @@ -421,6 +482,11 @@ router.post('/:serverName/reinitialize', requireJwtAuth, async (req, res) => { const { success, message, oauthRequired, oauthUrl } = result; + if (oauthRequired) { + const flowId = MCPOAuthHandler.generateFlowId(user.id, serverName); + setOAuthCsrfCookie(res, flowId, OAUTH_CSRF_COOKIE_PATH); + } + res.json({ success, message, diff --git a/api/server/routes/oauth.js b/api/server/routes/oauth.js index 4a2e2f70c6..f4bb5b6026 100644 --- a/api/server/routes/oauth.js +++ b/api/server/routes/oauth.js @@ -29,7 +29,7 @@ const oauthHandler = createOAuthHandler(); router.get('/error', (req, res) => { /** A single error message is pushed by passport when authentication fails. */ - const errorMessage = req.session?.messages?.pop() || 'Unknown error'; + const errorMessage = req.session?.messages?.pop() || 'Unknown OAuth error'; logger.error('Error in OAuth authentication:', { message: errorMessage, }); diff --git a/api/server/services/ActionService.js b/api/server/services/ActionService.js index 132f6f4686..5e96726a46 100644 --- a/api/server/services/ActionService.js +++ b/api/server/services/ActionService.js @@ -8,6 +8,7 @@ const { logAxiosError, refreshAccessToken, GenerationJobManager, + createSSRFSafeAgents, } = require('@librechat/api'); const { Time, @@ -133,6 +134,7 @@ async function loadActionSets(searchParams) { * @param {import('zod').ZodTypeAny | undefined} [params.zodSchema] - The Zod schema for tool input validation/definition * @param {{ oauth_client_id?: string; oauth_client_secret?: string; }} params.encrypted - The encrypted values for the action. * @param {string | null} [params.streamId] - The stream ID for resumable streams. + * @param {boolean} [params.useSSRFProtection] - When true, uses SSRF-safe HTTP agents that validate resolved IPs at connect time. * @returns { Promise unknown}> } An object with `_call` method to execute the tool input. */ async function createActionTool({ @@ -145,7 +147,9 @@ async function createActionTool({ description, encrypted, streamId = null, + useSSRFProtection = false, }) { + const ssrfAgents = useSSRFProtection ? createSSRFSafeAgents() : undefined; /** @type {(toolInput: Object | string, config: GraphRunnableConfig) => Promise} */ const _call = async (toolInput, config) => { try { @@ -324,7 +328,7 @@ async function createActionTool({ } } - const response = await preparedExecutor.execute(); + const response = await preparedExecutor.execute(ssrfAgents); if (typeof response.data === 'object') { return JSON.stringify(response.data); diff --git a/api/server/services/AuthService.js b/api/server/services/AuthService.js index a400bce8b7..ef50a365b9 100644 --- a/api/server/services/AuthService.js +++ b/api/server/services/AuthService.js @@ -7,7 +7,13 @@ const { DEFAULT_REFRESH_TOKEN_EXPIRY, } = require('@librechat/data-schemas'); const { ErrorTypes, SystemRoles, errorsToString } = require('librechat-data-provider'); -const { isEnabled, checkEmailConfig, isEmailDomainAllowed, math } = require('@librechat/api'); +const { + math, + isEnabled, + checkEmailConfig, + isEmailDomainAllowed, + shouldUseSecureCookie, +} = require('@librechat/api'); const { findUser, findToken, @@ -33,7 +39,6 @@ const domains = { server: process.env.DOMAIN_SERVER, }; -const isProduction = process.env.NODE_ENV === 'production'; const genericVerificationMessage = 'Please check your email to verify your email address.'; /** @@ -392,13 +397,13 @@ const setAuthTokens = async (userId, res, _session = null) => { res.cookie('refreshToken', refreshToken, { expires: new Date(refreshTokenExpires), httpOnly: true, - secure: isProduction, + secure: shouldUseSecureCookie(), sameSite: 'strict', }); res.cookie('token_provider', 'librechat', { expires: new Date(refreshTokenExpires), httpOnly: true, - secure: isProduction, + secure: shouldUseSecureCookie(), sameSite: 'strict', }); return token; @@ -419,7 +424,7 @@ const setAuthTokens = async (userId, res, _session = null) => { * @param {Object} req - request object (for session access) * @param {Object} res - response object * @param {string} [userId] - Optional MongoDB user ID for image path validation - * @returns {String} - access token + * @returns {String} - id_token (preferred) or access_token as the app auth token */ const setOpenIDAuthTokens = (tokenset, req, res, userId, existingRefreshToken) => { try { @@ -448,34 +453,62 @@ const setOpenIDAuthTokens = (tokenset, req, res, userId, existingRefreshToken) = return; } + /** + * Use id_token as the app authentication token (Bearer token for JWKS validation). + * The id_token is always a standard JWT signed by the IdP's JWKS keys with the app's + * client_id as audience. The access_token may be opaque or intended for a different + * audience (e.g., Microsoft Graph API), which fails JWKS validation. + * Falls back to access_token for providers where id_token is not available. + */ + const appAuthToken = tokenset.id_token || tokenset.access_token; + + /** + * Always set refresh token cookie so it survives express session expiry. + * The session cookie maxAge (SESSION_EXPIRY, default 15 min) is typically shorter + * than the OIDC token lifetime (~1 hour). Without this cookie fallback, the refresh + * token stored only in the session is lost when the session expires, causing the user + * to be signed out on the next token refresh attempt. + * The refresh token is small (opaque string) so it doesn't hit the HTTP/2 header + * size limits that motivated session storage for the larger access_token/id_token. + */ + res.cookie('refreshToken', refreshToken, { + expires: expirationDate, + httpOnly: true, + secure: shouldUseSecureCookie(), + sameSite: 'strict', + }); + /** Store tokens server-side in session to avoid large cookies */ if (req.session) { req.session.openidTokens = { accessToken: tokenset.access_token, + idToken: tokenset.id_token, refreshToken: refreshToken, expiresAt: expirationDate.getTime(), }; } else { logger.warn('[setOpenIDAuthTokens] No session available, falling back to cookies'); - res.cookie('refreshToken', refreshToken, { - expires: expirationDate, - httpOnly: true, - secure: isProduction, - sameSite: 'strict', - }); res.cookie('openid_access_token', tokenset.access_token, { expires: expirationDate, httpOnly: true, - secure: isProduction, + secure: shouldUseSecureCookie(), sameSite: 'strict', }); + if (tokenset.id_token) { + res.cookie('openid_id_token', tokenset.id_token, { + expires: expirationDate, + httpOnly: true, + secure: shouldUseSecureCookie(), + sameSite: 'strict', + }); + } } /** Small cookie to indicate token provider (required for auth middleware) */ res.cookie('token_provider', 'openid', { expires: expirationDate, httpOnly: true, - secure: isProduction, + secure: shouldUseSecureCookie(), sameSite: 'strict', }); if (userId && isEnabled(process.env.OPENID_REUSE_TOKENS)) { @@ -486,11 +519,11 @@ const setOpenIDAuthTokens = (tokenset, req, res, userId, existingRefreshToken) = res.cookie('openid_user_id', signedUserId, { expires: expirationDate, httpOnly: true, - secure: isProduction, + secure: shouldUseSecureCookie(), sameSite: 'strict', }); } - return tokenset.access_token; + return appAuthToken; } catch (error) { logger.error('[setOpenIDAuthTokens] Error in setting authentication tokens:', error); throw error; diff --git a/api/server/services/AuthService.spec.js b/api/server/services/AuthService.spec.js new file mode 100644 index 0000000000..da78f8d775 --- /dev/null +++ b/api/server/services/AuthService.spec.js @@ -0,0 +1,269 @@ +jest.mock('@librechat/data-schemas', () => ({ + logger: { info: jest.fn(), warn: jest.fn(), debug: jest.fn(), error: jest.fn() }, + DEFAULT_SESSION_EXPIRY: 900000, + DEFAULT_REFRESH_TOKEN_EXPIRY: 604800000, +})); +jest.mock('librechat-data-provider', () => ({ + ErrorTypes: {}, + SystemRoles: { USER: 'USER', ADMIN: 'ADMIN' }, + errorsToString: jest.fn(), +})); +jest.mock('@librechat/api', () => ({ + isEnabled: jest.fn((val) => val === 'true' || val === true), + checkEmailConfig: jest.fn(), + isEmailDomainAllowed: jest.fn(), + math: jest.fn((val, fallback) => (val ? Number(val) : fallback)), + shouldUseSecureCookie: jest.fn(() => false), +})); +jest.mock('~/models', () => ({ + findUser: jest.fn(), + findToken: jest.fn(), + createUser: jest.fn(), + updateUser: jest.fn(), + countUsers: jest.fn(), + getUserById: jest.fn(), + findSession: jest.fn(), + createToken: jest.fn(), + deleteTokens: jest.fn(), + deleteSession: jest.fn(), + createSession: jest.fn(), + generateToken: jest.fn(), + deleteUserById: jest.fn(), + generateRefreshToken: jest.fn(), +})); +jest.mock('~/strategies/validators', () => ({ registerSchema: { parse: jest.fn() } })); +jest.mock('~/server/services/Config', () => ({ getAppConfig: jest.fn() })); +jest.mock('~/server/utils', () => ({ sendEmail: jest.fn() })); + +const { shouldUseSecureCookie } = require('@librechat/api'); +const { setOpenIDAuthTokens } = require('./AuthService'); + +/** Helper to build a mock Express response */ +function mockResponse() { + const cookies = {}; + const res = { + cookie: jest.fn((name, value, options) => { + cookies[name] = { value, options }; + }), + _cookies: cookies, + }; + return res; +} + +/** Helper to build a mock Express request with session */ +function mockRequest(sessionData = {}) { + return { + session: { openidTokens: null, ...sessionData }, + }; +} + +describe('setOpenIDAuthTokens', () => { + const env = process.env; + + beforeEach(() => { + jest.clearAllMocks(); + process.env = { + ...env, + JWT_REFRESH_SECRET: 'test-refresh-secret', + OPENID_REUSE_TOKENS: 'true', + }; + }); + + afterAll(() => { + process.env = env; + }); + + describe('token selection (id_token vs access_token)', () => { + it('should return id_token when both id_token and access_token are present', () => { + const tokenset = { + id_token: 'the-id-token', + access_token: 'the-access-token', + refresh_token: 'the-refresh-token', + }; + const req = mockRequest(); + const res = mockResponse(); + + const result = setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + expect(result).toBe('the-id-token'); + }); + + it('should return access_token when id_token is not available', () => { + const tokenset = { + access_token: 'the-access-token', + refresh_token: 'the-refresh-token', + }; + const req = mockRequest(); + const res = mockResponse(); + + const result = setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + expect(result).toBe('the-access-token'); + }); + + it('should return access_token when id_token is undefined', () => { + const tokenset = { + id_token: undefined, + access_token: 'the-access-token', + refresh_token: 'the-refresh-token', + }; + const req = mockRequest(); + const res = mockResponse(); + + const result = setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + expect(result).toBe('the-access-token'); + }); + + it('should return access_token when id_token is null', () => { + const tokenset = { + id_token: null, + access_token: 'the-access-token', + refresh_token: 'the-refresh-token', + }; + const req = mockRequest(); + const res = mockResponse(); + + const result = setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + expect(result).toBe('the-access-token'); + }); + + it('should return id_token even when id_token and access_token differ', () => { + const tokenset = { + id_token: 'id-token-jwt-signed-by-idp', + access_token: 'opaque-graph-api-token', + refresh_token: 'refresh-token', + }; + const req = mockRequest(); + const res = mockResponse(); + + const result = setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + expect(result).toBe('id-token-jwt-signed-by-idp'); + expect(result).not.toBe('opaque-graph-api-token'); + }); + }); + + describe('session token storage', () => { + it('should store the original access_token in session (not id_token)', () => { + const tokenset = { + id_token: 'the-id-token', + access_token: 'the-access-token', + refresh_token: 'the-refresh-token', + }; + const req = mockRequest(); + const res = mockResponse(); + + setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + + expect(req.session.openidTokens.accessToken).toBe('the-access-token'); + expect(req.session.openidTokens.refreshToken).toBe('the-refresh-token'); + }); + }); + + describe('cookie secure flag', () => { + it('should call shouldUseSecureCookie for every cookie set', () => { + const tokenset = { + id_token: 'the-id-token', + access_token: 'the-access-token', + refresh_token: 'the-refresh-token', + }; + const req = mockRequest(); + const res = mockResponse(); + + setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + + // token_provider + openid_user_id (session path, so no refreshToken/openid_access_token cookies) + const secureCalls = shouldUseSecureCookie.mock.calls.length; + expect(secureCalls).toBeGreaterThanOrEqual(2); + + // Verify all cookies use the result of shouldUseSecureCookie + for (const [, cookie] of Object.entries(res._cookies)) { + expect(cookie.options.secure).toBe(false); + } + }); + + it('should set secure: true when shouldUseSecureCookie returns true', () => { + shouldUseSecureCookie.mockReturnValue(true); + + const tokenset = { + id_token: 'the-id-token', + access_token: 'the-access-token', + refresh_token: 'the-refresh-token', + }; + const req = mockRequest(); + const res = mockResponse(); + + setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + + for (const [, cookie] of Object.entries(res._cookies)) { + expect(cookie.options.secure).toBe(true); + } + }); + + it('should use shouldUseSecureCookie for cookie fallback path (no session)', () => { + shouldUseSecureCookie.mockReturnValue(false); + + const tokenset = { + id_token: 'the-id-token', + access_token: 'the-access-token', + refresh_token: 'the-refresh-token', + }; + const req = { session: null }; + const res = mockResponse(); + + setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + + // In the cookie fallback path, we get: refreshToken, openid_access_token, token_provider, openid_user_id + expect(res.cookie).toHaveBeenCalledWith( + 'refreshToken', + expect.any(String), + expect.objectContaining({ secure: false }), + ); + expect(res.cookie).toHaveBeenCalledWith( + 'openid_access_token', + expect.any(String), + expect.objectContaining({ secure: false }), + ); + expect(res.cookie).toHaveBeenCalledWith( + 'token_provider', + 'openid', + expect.objectContaining({ secure: false }), + ); + }); + }); + + describe('edge cases', () => { + it('should return undefined when tokenset is null', () => { + const req = mockRequest(); + const res = mockResponse(); + const result = setOpenIDAuthTokens(null, req, res, 'user-123'); + expect(result).toBeUndefined(); + }); + + it('should return undefined when access_token is missing', () => { + const tokenset = { refresh_token: 'refresh' }; + const req = mockRequest(); + const res = mockResponse(); + const result = setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + expect(result).toBeUndefined(); + }); + + it('should return undefined when no refresh token is available', () => { + const tokenset = { access_token: 'access', id_token: 'id' }; + const req = mockRequest(); + const res = mockResponse(); + const result = setOpenIDAuthTokens(tokenset, req, res, 'user-123'); + expect(result).toBeUndefined(); + }); + + it('should use existingRefreshToken when tokenset has no refresh_token', () => { + const tokenset = { + id_token: 'the-id-token', + access_token: 'the-access-token', + }; + const req = mockRequest(); + const res = mockResponse(); + + const result = setOpenIDAuthTokens(tokenset, req, res, 'user-123', 'existing-refresh'); + expect(result).toBe('the-id-token'); + expect(req.session.openidTokens.refreshToken).toBe('existing-refresh'); + }); + }); +}); diff --git a/api/server/services/Config/__tests__/getCachedTools.spec.js b/api/server/services/Config/__tests__/getCachedTools.spec.js index 48ab6e0737..38d488ed38 100644 --- a/api/server/services/Config/__tests__/getCachedTools.spec.js +++ b/api/server/services/Config/__tests__/getCachedTools.spec.js @@ -1,10 +1,92 @@ -const { ToolCacheKeys } = require('../getCachedTools'); +const { CacheKeys } = require('librechat-data-provider'); + +jest.mock('~/cache/getLogStores'); +const getLogStores = require('~/cache/getLogStores'); + +const mockCache = { get: jest.fn(), set: jest.fn(), delete: jest.fn() }; +getLogStores.mockReturnValue(mockCache); + +const { + ToolCacheKeys, + getCachedTools, + setCachedTools, + getMCPServerTools, + invalidateCachedTools, +} = require('../getCachedTools'); + +describe('getCachedTools', () => { + beforeEach(() => { + jest.clearAllMocks(); + getLogStores.mockReturnValue(mockCache); + }); -describe('getCachedTools - Cache Isolation Security', () => { describe('ToolCacheKeys.MCP_SERVER', () => { it('should generate cache keys that include userId', () => { const key = ToolCacheKeys.MCP_SERVER('user123', 'github'); expect(key).toBe('tools:mcp:user123:github'); }); }); + + describe('TOOL_CACHE namespace usage', () => { + it('getCachedTools should use TOOL_CACHE namespace', async () => { + mockCache.get.mockResolvedValue(null); + await getCachedTools(); + expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE); + }); + + it('getCachedTools with MCP server options should use TOOL_CACHE namespace', async () => { + mockCache.get.mockResolvedValue({ tool1: {} }); + await getCachedTools({ userId: 'user1', serverName: 'github' }); + expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE); + expect(mockCache.get).toHaveBeenCalledWith(ToolCacheKeys.MCP_SERVER('user1', 'github')); + }); + + it('setCachedTools should use TOOL_CACHE namespace', async () => { + mockCache.set.mockResolvedValue(true); + const tools = { tool1: { type: 'function' } }; + await setCachedTools(tools); + expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE); + expect(mockCache.set).toHaveBeenCalledWith(ToolCacheKeys.GLOBAL, tools, expect.any(Number)); + }); + + it('setCachedTools with MCP server options should use TOOL_CACHE namespace', async () => { + mockCache.set.mockResolvedValue(true); + const tools = { tool1: { type: 'function' } }; + await setCachedTools(tools, { userId: 'user1', serverName: 'github' }); + expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE); + expect(mockCache.set).toHaveBeenCalledWith( + ToolCacheKeys.MCP_SERVER('user1', 'github'), + tools, + expect.any(Number), + ); + }); + + it('invalidateCachedTools should use TOOL_CACHE namespace', async () => { + mockCache.delete.mockResolvedValue(true); + await invalidateCachedTools({ invalidateGlobal: true }); + expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE); + expect(mockCache.delete).toHaveBeenCalledWith(ToolCacheKeys.GLOBAL); + }); + + it('getMCPServerTools should use TOOL_CACHE namespace', async () => { + mockCache.get.mockResolvedValue(null); + await getMCPServerTools('user1', 'github'); + expect(getLogStores).toHaveBeenCalledWith(CacheKeys.TOOL_CACHE); + expect(mockCache.get).toHaveBeenCalledWith(ToolCacheKeys.MCP_SERVER('user1', 'github')); + }); + + it('should NOT use CONFIG_STORE namespace', async () => { + mockCache.get.mockResolvedValue(null); + await getCachedTools(); + await getMCPServerTools('user1', 'github'); + mockCache.set.mockResolvedValue(true); + await setCachedTools({ tool1: {} }); + mockCache.delete.mockResolvedValue(true); + await invalidateCachedTools({ invalidateGlobal: true }); + + const allCalls = getLogStores.mock.calls.flat(); + expect(allCalls).not.toContain(CacheKeys.CONFIG_STORE); + expect(allCalls.every((key) => key === CacheKeys.TOOL_CACHE)).toBe(true); + }); + }); }); diff --git a/api/server/services/Config/getCachedTools.js b/api/server/services/Config/getCachedTools.js index cf1618a646..eb7a08305a 100644 --- a/api/server/services/Config/getCachedTools.js +++ b/api/server/services/Config/getCachedTools.js @@ -20,7 +20,7 @@ const ToolCacheKeys = { * @returns {Promise} The available tools object or null if not cached */ async function getCachedTools(options = {}) { - const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cache = getLogStores(CacheKeys.TOOL_CACHE); const { userId, serverName } = options; // Return MCP server-specific tools if requested @@ -43,7 +43,7 @@ async function getCachedTools(options = {}) { * @returns {Promise} Whether the operation was successful */ async function setCachedTools(tools, options = {}) { - const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cache = getLogStores(CacheKeys.TOOL_CACHE); const { userId, serverName, ttl = Time.TWELVE_HOURS } = options; // Cache by MCP server if specified (requires userId) @@ -65,7 +65,7 @@ async function setCachedTools(tools, options = {}) { * @returns {Promise} */ async function invalidateCachedTools(options = {}) { - const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cache = getLogStores(CacheKeys.TOOL_CACHE); const { userId, serverName, invalidateGlobal = false } = options; const keysToDelete = []; @@ -89,7 +89,7 @@ async function invalidateCachedTools(options = {}) { * @returns {Promise} The available tools for the server */ async function getMCPServerTools(userId, serverName) { - const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cache = getLogStores(CacheKeys.TOOL_CACHE); const serverTools = await cache.get(ToolCacheKeys.MCP_SERVER(userId, serverName)); if (serverTools) { diff --git a/api/server/services/Config/mcp.js b/api/server/services/Config/mcp.js index 15ea62a028..cc4e98b59e 100644 --- a/api/server/services/Config/mcp.js +++ b/api/server/services/Config/mcp.js @@ -35,7 +35,7 @@ async function updateMCPServerTools({ userId, serverName, tools }) { await setCachedTools(serverTools, { userId, serverName }); - const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cache = getLogStores(CacheKeys.TOOL_CACHE); await cache.delete(CacheKeys.TOOLS); logger.debug( `[MCP Cache] Updated ${tools.length} tools for server ${serverName} (user: ${userId})`, @@ -61,7 +61,7 @@ async function mergeAppTools(appTools) { const cachedTools = await getCachedTools(); const mergedTools = { ...cachedTools, ...appTools }; await setCachedTools(mergedTools); - const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cache = getLogStores(CacheKeys.TOOL_CACHE); await cache.delete(CacheKeys.TOOLS); logger.debug(`Merged ${count} app-level tools`); } catch (error) { diff --git a/api/server/services/Endpoints/agents/title.js b/api/server/services/Endpoints/agents/title.js index 1d6d359bd6..e31cdeea11 100644 --- a/api/server/services/Endpoints/agents/title.js +++ b/api/server/services/Endpoints/agents/title.js @@ -71,7 +71,7 @@ const addTitle = async (req, { text, response, client }) => { conversationId: response.conversationId, title, }, - { context: 'api/server/services/Endpoints/agents/title.js' }, + { context: 'api/server/services/Endpoints/agents/title.js', noUpsert: true }, ); } catch (error) { logger.error('Error generating title:', error); diff --git a/api/server/services/Endpoints/assistants/title.js b/api/server/services/Endpoints/assistants/title.js index a34de4d1af..1fae68cf54 100644 --- a/api/server/services/Endpoints/assistants/title.js +++ b/api/server/services/Endpoints/assistants/title.js @@ -69,7 +69,7 @@ const addTitle = async (req, { text, responseText, conversationId }) => { conversationId, title, }, - { context: 'api/server/services/Endpoints/assistants/addTitle.js' }, + { context: 'api/server/services/Endpoints/assistants/addTitle.js', noUpsert: true }, ); } catch (error) { logger.error('[addTitle] Error generating title:', error); @@ -81,7 +81,7 @@ const addTitle = async (req, { text, responseText, conversationId }) => { conversationId, title: fallbackTitle, }, - { context: 'api/server/services/Endpoints/assistants/addTitle.js' }, + { context: 'api/server/services/Endpoints/assistants/addTitle.js', noUpsert: true }, ); } }; diff --git a/api/server/services/Files/Azure/crud.js b/api/server/services/Files/Azure/crud.js index 25bd749276..8f681bd06c 100644 --- a/api/server/services/Files/Azure/crud.js +++ b/api/server/services/Files/Azure/crud.js @@ -4,7 +4,7 @@ const mime = require('mime'); const axios = require('axios'); const fetch = require('node-fetch'); const { logger } = require('@librechat/data-schemas'); -const { getAzureContainerClient } = require('@librechat/api'); +const { getAzureContainerClient, deleteRagFile } = require('@librechat/api'); const defaultBasePath = 'images'; const { AZURE_STORAGE_PUBLIC_ACCESS = 'true', AZURE_CONTAINER_NAME = 'files' } = process.env; @@ -102,6 +102,8 @@ async function getAzureURL({ fileName, basePath = defaultBasePath, userId, conta * @param {MongoFile} params.file - The file object. */ async function deleteFileFromAzure(req, file) { + await deleteRagFile({ userId: req.user.id, file }); + try { const containerClient = await getAzureContainerClient(AZURE_CONTAINER_NAME); const blobPath = file.filepath.split(`${AZURE_CONTAINER_NAME}/`)[1]; diff --git a/api/server/services/Files/Firebase/crud.js b/api/server/services/Files/Firebase/crud.js index 170df45677..d5e5a409bf 100644 --- a/api/server/services/Files/Firebase/crud.js +++ b/api/server/services/Files/Firebase/crud.js @@ -3,7 +3,7 @@ const path = require('path'); const axios = require('axios'); const fetch = require('node-fetch'); const { logger } = require('@librechat/data-schemas'); -const { getFirebaseStorage } = require('@librechat/api'); +const { getFirebaseStorage, deleteRagFile } = require('@librechat/api'); const { ref, uploadBytes, getDownloadURL, deleteObject } = require('firebase/storage'); const { getBufferMetadata } = require('~/server/utils'); @@ -167,27 +167,7 @@ function extractFirebaseFilePath(urlString) { * Throws an error if there is an issue with deletion. */ const deleteFirebaseFile = async (req, file) => { - if (file.embedded && process.env.RAG_API_URL) { - const jwtToken = req.headers.authorization.split(' ')[1]; - try { - await axios.delete(`${process.env.RAG_API_URL}/documents`, { - headers: { - Authorization: `Bearer ${jwtToken}`, - 'Content-Type': 'application/json', - accept: 'application/json', - }, - data: [file.file_id], - }); - } catch (error) { - if (error.response?.status === 404) { - logger.warn( - `[deleteFirebaseFile] Document ${file.file_id} not found in RAG API, may have been deleted already`, - ); - } else { - logger.error('[deleteFirebaseFile] Error deleting document from RAG API:', error); - } - } - } + await deleteRagFile({ userId: req.user.id, file }); const fileName = extractFirebaseFilePath(file.filepath); if (!fileName.includes(req.user.id)) { diff --git a/api/server/services/Files/Local/crud.js b/api/server/services/Files/Local/crud.js index b43ab75326..1f38a01f83 100644 --- a/api/server/services/Files/Local/crud.js +++ b/api/server/services/Files/Local/crud.js @@ -1,9 +1,9 @@ const fs = require('fs'); const path = require('path'); const axios = require('axios'); +const { deleteRagFile } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); const { EModelEndpoint } = require('librechat-data-provider'); -const { generateShortLivedToken } = require('@librechat/api'); const { resizeImageBuffer } = require('~/server/services/Files/images/resize'); const { getBufferMetadata } = require('~/server/utils'); const paths = require('~/config/paths'); @@ -213,27 +213,7 @@ const deleteLocalFile = async (req, file) => { /** Filepath stripped of query parameters (e.g., ?manual=true) */ const cleanFilepath = file.filepath.split('?')[0]; - if (file.embedded && process.env.RAG_API_URL) { - const jwtToken = generateShortLivedToken(req.user.id); - try { - await axios.delete(`${process.env.RAG_API_URL}/documents`, { - headers: { - Authorization: `Bearer ${jwtToken}`, - 'Content-Type': 'application/json', - accept: 'application/json', - }, - data: [file.file_id], - }); - } catch (error) { - if (error.response?.status === 404) { - logger.warn( - `[deleteLocalFile] Document ${file.file_id} not found in RAG API, may have been deleted already`, - ); - } else { - logger.error('[deleteLocalFile] Error deleting document from RAG API:', error); - } - } - } + await deleteRagFile({ userId: req.user.id, file }); if (cleanFilepath.startsWith(`/uploads/${req.user.id}`)) { const userUploadDir = path.join(uploads, req.user.id); diff --git a/api/server/services/Files/S3/crud.js b/api/server/services/Files/S3/crud.js index 8dac767aa2..0721e33b29 100644 --- a/api/server/services/Files/S3/crud.js +++ b/api/server/services/Files/S3/crud.js @@ -1,9 +1,9 @@ const fs = require('fs'); const fetch = require('node-fetch'); -const { initializeS3 } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); const { FileSources } = require('librechat-data-provider'); const { getSignedUrl } = require('@aws-sdk/s3-request-presigner'); +const { initializeS3, deleteRagFile } = require('@librechat/api'); const { PutObjectCommand, GetObjectCommand, @@ -142,6 +142,8 @@ async function saveURLToS3({ userId, URL, fileName, basePath = defaultBasePath } * @returns {Promise} */ async function deleteFileFromS3(req, file) { + await deleteRagFile({ userId: req.user.id, file }); + const key = extractKeyFromS3Url(file.filepath); const params = { Bucket: bucketName, Key: key }; if (!key.includes(req.user.id)) { diff --git a/api/server/services/MCP.js b/api/server/services/MCP.js index 8cb9932097..ad1f9f5cc3 100644 --- a/api/server/services/MCP.js +++ b/api/server/services/MCP.js @@ -11,8 +11,9 @@ const { MCPOAuthHandler, isMCPDomainAllowed, normalizeServerName, - resolveJsonSchemaRefs, + normalizeJsonSchema, GenerationJobManager, + resolveJsonSchemaRefs, } = require('@librechat/api'); const { Time, @@ -443,7 +444,7 @@ function createToolInstance({ const { description, parameters } = toolDefinition; const isGoogle = _provider === Providers.VERTEXAI || _provider === Providers.GOOGLE; - let schema = parameters ? resolveJsonSchemaRefs(parameters) : null; + let schema = parameters ? normalizeJsonSchema(resolveJsonSchemaRefs(parameters)) : null; if (!schema || (isGoogle && isEmptyObjectSchema(schema))) { schema = { diff --git a/api/server/services/MCP.spec.js b/api/server/services/MCP.spec.js index 84ec3013dd..b2caebc91e 100644 --- a/api/server/services/MCP.spec.js +++ b/api/server/services/MCP.spec.js @@ -9,30 +9,6 @@ jest.mock('@librechat/data-schemas', () => ({ }, })); -jest.mock('@langchain/core/tools', () => ({ - tool: jest.fn((fn, config) => { - const toolInstance = { _call: fn, ...config }; - return toolInstance; - }), -})); - -jest.mock('@librechat/agents', () => ({ - Providers: { - VERTEXAI: 'vertexai', - GOOGLE: 'google', - }, - StepTypes: { - TOOL_CALLS: 'tool_calls', - }, - GraphEvents: { - ON_RUN_STEP_DELTA: 'on_run_step_delta', - ON_RUN_STEP: 'on_run_step', - }, - Constants: { - CONTENT_AND_ARTIFACT: 'content_and_artifact', - }, -})); - // Create mock registry instance const mockRegistryInstance = { getOAuthServers: jest.fn(() => Promise.resolve(new Set())), @@ -46,26 +22,23 @@ const mockIsMCPDomainAllowed = jest.fn(() => Promise.resolve(true)); const mockGetAppConfig = jest.fn(() => Promise.resolve({})); jest.mock('@librechat/api', () => { - // Access mock via getter to avoid hoisting issues + const actual = jest.requireActual('@librechat/api'); return { - MCPOAuthHandler: { - generateFlowId: jest.fn(), - }, + ...actual, sendEvent: jest.fn(), - normalizeServerName: jest.fn((name) => name), - resolveJsonSchemaRefs: jest.fn((params) => params), get isMCPDomainAllowed() { return mockIsMCPDomainAllowed; }, - MCPServersRegistry: { - getInstance: () => mockRegistryInstance, + GenerationJobManager: { + emitChunk: jest.fn(), }, }; }); const { logger } = require('@librechat/data-schemas'); const { MCPOAuthHandler } = require('@librechat/api'); -const { CacheKeys } = require('librechat-data-provider'); +const { CacheKeys, Constants } = require('librechat-data-provider'); +const D = Constants.mcp_delimiter; const { createMCPTool, createMCPTools, @@ -74,24 +47,6 @@ const { getServerConnectionStatus, } = require('./MCP'); -jest.mock('librechat-data-provider', () => ({ - CacheKeys: { - FLOWS: 'flows', - }, - Constants: { - USE_PRELIM_RESPONSE_MESSAGE_ID: 'prelim_response_id', - mcp_delimiter: '::', - mcp_prefix: 'mcp_', - }, - ContentTypes: { - TEXT: 'text', - }, - isAssistantsEndpoint: jest.fn(() => false), - Time: { - TWO_MINUTES: 120000, - }, -})); - jest.mock('./Config', () => ({ loadCustomConfig: jest.fn(), get getAppConfig() { @@ -132,6 +87,7 @@ describe('tests for the new helper functions used by the MCP connection status e beforeEach(() => { jest.clearAllMocks(); + jest.spyOn(MCPOAuthHandler, 'generateFlowId'); mockGetMCPManager = require('~/config').getMCPManager; mockGetFlowStateManager = require('~/config').getFlowStateManager; @@ -735,7 +691,7 @@ describe('User parameter passing tests', () => { mockReinitMCPServer.mockResolvedValue({ tools: [{ name: 'test-tool' }], availableTools: { - 'test-tool::test-server': { + [`test-tool${D}test-server`]: { function: { description: 'Test tool', parameters: { type: 'object', properties: {} }, @@ -795,7 +751,7 @@ describe('User parameter passing tests', () => { mockReinitMCPServer.mockResolvedValue({ availableTools: { - 'test-tool::test-server': { + [`test-tool${D}test-server`]: { function: { description: 'Test tool', parameters: { type: 'object', properties: {} }, @@ -808,7 +764,7 @@ describe('User parameter passing tests', () => { await createMCPTool({ res: mockRes, user: mockUser, - toolKey: 'test-tool::test-server', + toolKey: `test-tool${D}test-server`, provider: 'openai', signal: mockSignal, userMCPAuthMap: {}, @@ -830,7 +786,7 @@ describe('User parameter passing tests', () => { const mockRes = { write: jest.fn(), flush: jest.fn() }; const availableTools = { - 'test-tool::test-server': { + [`test-tool${D}test-server`]: { function: { description: 'Cached tool', parameters: { type: 'object', properties: {} }, @@ -841,7 +797,7 @@ describe('User parameter passing tests', () => { await createMCPTool({ res: mockRes, user: mockUser, - toolKey: 'test-tool::test-server', + toolKey: `test-tool${D}test-server`, provider: 'openai', userMCPAuthMap: {}, availableTools: availableTools, @@ -864,8 +820,8 @@ describe('User parameter passing tests', () => { return Promise.resolve({ tools: [{ name: 'tool1' }, { name: 'tool2' }], availableTools: { - 'tool1::server1': { function: { description: 'Tool 1', parameters: {} } }, - 'tool2::server1': { function: { description: 'Tool 2', parameters: {} } }, + [`tool1${D}server1`]: { function: { description: 'Tool 1', parameters: {} } }, + [`tool2${D}server1`]: { function: { description: 'Tool 2', parameters: {} } }, }, }); }); @@ -896,7 +852,7 @@ describe('User parameter passing tests', () => { reinitCalls.push(params); return Promise.resolve({ availableTools: { - 'my-tool::my-server': { + [`my-tool${D}my-server`]: { function: { description: 'My Tool', parameters: {} }, }, }, @@ -906,7 +862,7 @@ describe('User parameter passing tests', () => { await createMCPTool({ res: mockRes, user: mockUser, - toolKey: 'my-tool::my-server', + toolKey: `my-tool${D}my-server`, provider: 'google', userMCPAuthMap: {}, availableTools: undefined, // Force reinit @@ -940,11 +896,11 @@ describe('User parameter passing tests', () => { const result = await createMCPTool({ res: mockRes, user: mockUser, - toolKey: 'test-tool::test-server', + toolKey: `test-tool${D}test-server`, provider: 'openai', userMCPAuthMap: {}, availableTools: { - 'test-tool::test-server': { + [`test-tool${D}test-server`]: { function: { description: 'Test tool', parameters: { type: 'object', properties: {} }, @@ -987,7 +943,7 @@ describe('User parameter passing tests', () => { mockIsMCPDomainAllowed.mockResolvedValueOnce(true); const availableTools = { - 'test-tool::test-server': { + [`test-tool${D}test-server`]: { function: { description: 'Test tool', parameters: { type: 'object', properties: {} }, @@ -998,7 +954,7 @@ describe('User parameter passing tests', () => { const result = await createMCPTool({ res: mockRes, user: mockUser, - toolKey: 'test-tool::test-server', + toolKey: `test-tool${D}test-server`, provider: 'openai', userMCPAuthMap: {}, availableTools, @@ -1027,7 +983,7 @@ describe('User parameter passing tests', () => { }); const availableTools = { - 'test-tool::test-server': { + [`test-tool${D}test-server`]: { function: { description: 'Test tool', parameters: { type: 'object', properties: {} }, @@ -1038,7 +994,7 @@ describe('User parameter passing tests', () => { const result = await createMCPTool({ res: mockRes, user: mockUser, - toolKey: 'test-tool::test-server', + toolKey: `test-tool${D}test-server`, provider: 'openai', userMCPAuthMap: {}, availableTools, @@ -1104,7 +1060,7 @@ describe('User parameter passing tests', () => { mockIsMCPDomainAllowed.mockResolvedValue(true); const availableTools = { - 'test-tool::test-server': { + [`test-tool${D}test-server`]: { function: { description: 'Test tool', parameters: { type: 'object', properties: {} }, @@ -1116,7 +1072,7 @@ describe('User parameter passing tests', () => { await createMCPTool({ res: mockRes, user: adminUser, - toolKey: 'test-tool::test-server', + toolKey: `test-tool${D}test-server`, provider: 'openai', userMCPAuthMap: {}, availableTools, @@ -1130,7 +1086,7 @@ describe('User parameter passing tests', () => { await createMCPTool({ res: mockRes, user: regularUser, - toolKey: 'test-tool::test-server', + toolKey: `test-tool${D}test-server`, provider: 'openai', userMCPAuthMap: {}, availableTools, @@ -1158,7 +1114,7 @@ describe('User parameter passing tests', () => { return Promise.resolve({ tools: [{ name: 'test' }], availableTools: { - 'test::server': { function: { description: 'Test', parameters: {} } }, + [`test${D}server`]: { function: { description: 'Test', parameters: {} } }, }, }); }); diff --git a/api/server/services/ToolService.js b/api/server/services/ToolService.js index fe7a0f40c2..eedb95bd4d 100644 --- a/api/server/services/ToolService.js +++ b/api/server/services/ToolService.js @@ -338,6 +338,7 @@ async function processRequiredActions(client, requiredActions) { } // We've already decrypted the metadata, so we can pass it directly + const _allowedDomains = appConfig?.actions?.allowedDomains; tool = await createActionTool({ userId: client.req.user.id, res: client.res, @@ -345,6 +346,7 @@ async function processRequiredActions(client, requiredActions) { requestBuilder, // Note: intentionally not passing zodSchema, name, and description for assistants API encrypted, // Pass the encrypted values for OAuth flow + useSSRFProtection: !Array.isArray(_allowedDomains) || _allowedDomains.length === 0, }); if (!tool) { logger.warn( @@ -1064,6 +1066,7 @@ async function loadAgentTools({ const zodSchema = zodSchemas[functionName]; if (requestBuilder) { + const _allowedDomains = appConfig?.actions?.allowedDomains; const tool = await createActionTool({ userId: req.user.id, res, @@ -1074,6 +1077,7 @@ async function loadAgentTools({ name: toolName, description: functionSig.description, streamId, + useSSRFProtection: !Array.isArray(_allowedDomains) || _allowedDomains.length === 0, }); if (!tool) { @@ -1335,6 +1339,7 @@ async function loadActionToolsForExecution({ }); } + const domainSeparatorRegex = new RegExp(actionDomainSeparator, 'g'); for (const toolName of actionToolNames) { let currentDomain = ''; for (const domain of domainMap.keys()) { @@ -1351,7 +1356,6 @@ async function loadActionToolsForExecution({ const { action, encrypted, zodSchemas, requestBuilders, functionSignatures } = processedActionSets.get(currentDomain); - const domainSeparatorRegex = new RegExp(actionDomainSeparator, 'g'); const normalizedDomain = currentDomain.replace(domainSeparatorRegex, '_'); const functionName = toolName.replace(`${actionDelimiter}${normalizedDomain}`, ''); const functionSig = functionSignatures.find((sig) => sig.name === functionName); @@ -1372,6 +1376,7 @@ async function loadActionToolsForExecution({ requestBuilder, name: toolName, description: functionSig?.description ?? '', + useSSRFProtection: !Array.isArray(allowedDomains) || allowedDomains.length === 0, }); if (!tool) { diff --git a/api/server/socialLogins.js b/api/server/socialLogins.js index cf67fa9436..a84c33bd52 100644 --- a/api/server/socialLogins.js +++ b/api/server/socialLogins.js @@ -1,7 +1,7 @@ const passport = require('passport'); const session = require('express-session'); -const { isEnabled } = require('@librechat/api'); const { CacheKeys } = require('librechat-data-provider'); +const { isEnabled, shouldUseSecureCookie } = require('@librechat/api'); const { logger, DEFAULT_SESSION_EXPIRY } = require('@librechat/data-schemas'); const { openIdJwtLogin, @@ -15,38 +15,6 @@ const { } = require('~/strategies'); const { getLogStores } = require('~/cache'); -/** - * Determines if secure cookies should be used. - * Only use secure cookies in production when not on localhost. - * @returns {boolean} - */ -function shouldUseSecureCookie() { - const isProduction = process.env.NODE_ENV === 'production'; - const domainServer = process.env.DOMAIN_SERVER || ''; - - let hostname = ''; - if (domainServer) { - try { - const normalized = /^https?:\/\//i.test(domainServer) - ? domainServer - : `http://${domainServer}`; - const url = new URL(normalized); - hostname = (url.hostname || '').toLowerCase(); - } catch { - // Fallback: treat DOMAIN_SERVER directly as a hostname-like string - hostname = domainServer.toLowerCase(); - } - } - - const isLocalhost = - hostname === 'localhost' || - hostname === '127.0.0.1' || - hostname === '::1' || - hostname.endsWith('.localhost'); - - return isProduction && !isLocalhost; -} - /** * Configures OpenID Connect for the application. * @param {Express.Application} app - The Express application instance. diff --git a/api/strategies/openIdJwtStrategy.js b/api/strategies/openIdJwtStrategy.js index df318ca30e..997dcec397 100644 --- a/api/strategies/openIdJwtStrategy.js +++ b/api/strategies/openIdJwtStrategy.js @@ -84,19 +84,21 @@ const openIdJwtLogin = (openIdConfig) => { /** Read tokens from session (server-side) to avoid large cookie issues */ const sessionTokens = req.session?.openidTokens; let accessToken = sessionTokens?.accessToken; + let idToken = sessionTokens?.idToken; let refreshToken = sessionTokens?.refreshToken; /** Fallback to cookies for backward compatibility */ - if (!accessToken || !refreshToken) { + if (!accessToken || !refreshToken || !idToken) { const cookieHeader = req.headers.cookie; const parsedCookies = cookieHeader ? cookies.parse(cookieHeader) : {}; accessToken = accessToken || parsedCookies.openid_access_token; + idToken = idToken || parsedCookies.openid_id_token; refreshToken = refreshToken || parsedCookies.refreshToken; } user.federatedTokens = { access_token: accessToken || rawToken, - id_token: rawToken, + id_token: idToken, refresh_token: refreshToken, expires_at: payload.exp, }; diff --git a/api/strategies/openIdJwtStrategy.spec.js b/api/strategies/openIdJwtStrategy.spec.js new file mode 100644 index 0000000000..566afe5a90 --- /dev/null +++ b/api/strategies/openIdJwtStrategy.spec.js @@ -0,0 +1,183 @@ +const { SystemRoles } = require('librechat-data-provider'); + +// --- Capture the verify callback from JwtStrategy --- +let capturedVerifyCallback; +jest.mock('passport-jwt', () => ({ + Strategy: jest.fn((_opts, verifyCallback) => { + capturedVerifyCallback = verifyCallback; + return { name: 'jwt' }; + }), + ExtractJwt: { + fromAuthHeaderAsBearerToken: jest.fn(() => 'mock-extractor'), + }, +})); +jest.mock('jwks-rsa', () => ({ + passportJwtSecret: jest.fn(() => 'mock-secret-provider'), +})); +jest.mock('https-proxy-agent', () => ({ + HttpsProxyAgent: jest.fn(), +})); +jest.mock('@librechat/data-schemas', () => ({ + logger: { info: jest.fn(), warn: jest.fn(), debug: jest.fn(), error: jest.fn() }, +})); +jest.mock('@librechat/api', () => ({ + isEnabled: jest.fn(() => false), + findOpenIDUser: jest.fn(), + math: jest.fn((val, fallback) => fallback), +})); +jest.mock('~/models', () => ({ + findUser: jest.fn(), + updateUser: jest.fn(), +})); + +const { findOpenIDUser } = require('@librechat/api'); +const { updateUser } = require('~/models'); +const openIdJwtLogin = require('./openIdJwtStrategy'); + +// Helper: build a mock openIdConfig +const mockOpenIdConfig = { + serverMetadata: () => ({ jwks_uri: 'https://example.com/.well-known/jwks.json' }), +}; + +// Helper: invoke the captured verify callback +async function invokeVerify(req, payload) { + return new Promise((resolve, reject) => { + capturedVerifyCallback(req, payload, (err, user, info) => { + if (err) { + return reject(err); + } + resolve({ user, info }); + }); + }); +} + +describe('openIdJwtStrategy – token source handling', () => { + const baseUser = { + _id: { toString: () => 'user-abc' }, + role: SystemRoles.USER, + provider: 'openid', + }; + + const payload = { sub: 'oidc-123', email: 'test@example.com', exp: 9999999999 }; + + beforeEach(() => { + jest.clearAllMocks(); + findOpenIDUser.mockResolvedValue({ user: { ...baseUser }, error: null, migration: false }); + updateUser.mockResolvedValue({}); + + // Initialize the strategy so capturedVerifyCallback is set + openIdJwtLogin(mockOpenIdConfig); + }); + + it('should read all tokens from session when available', async () => { + const req = { + headers: { authorization: 'Bearer raw-bearer-token' }, + session: { + openidTokens: { + accessToken: 'session-access', + idToken: 'session-id', + refreshToken: 'session-refresh', + }, + }, + }; + + const { user } = await invokeVerify(req, payload); + + expect(user.federatedTokens).toEqual({ + access_token: 'session-access', + id_token: 'session-id', + refresh_token: 'session-refresh', + expires_at: payload.exp, + }); + }); + + it('should fall back to cookies when session is absent', async () => { + const req = { + headers: { + authorization: 'Bearer raw-bearer-token', + cookie: + 'openid_access_token=cookie-access; openid_id_token=cookie-id; refreshToken=cookie-refresh', + }, + }; + + const { user } = await invokeVerify(req, payload); + + expect(user.federatedTokens).toEqual({ + access_token: 'cookie-access', + id_token: 'cookie-id', + refresh_token: 'cookie-refresh', + expires_at: payload.exp, + }); + }); + + it('should fall back to cookie for idToken only when session lacks it', async () => { + const req = { + headers: { + authorization: 'Bearer raw-bearer-token', + cookie: 'openid_id_token=cookie-id', + }, + session: { + openidTokens: { + accessToken: 'session-access', + // idToken intentionally missing + refreshToken: 'session-refresh', + }, + }, + }; + + const { user } = await invokeVerify(req, payload); + + expect(user.federatedTokens).toEqual({ + access_token: 'session-access', + id_token: 'cookie-id', + refresh_token: 'session-refresh', + expires_at: payload.exp, + }); + }); + + it('should use raw Bearer token as access_token fallback when neither session nor cookie has one', async () => { + const req = { + headers: { + authorization: 'Bearer raw-bearer-token', + cookie: 'openid_id_token=cookie-id; refreshToken=cookie-refresh', + }, + }; + + const { user } = await invokeVerify(req, payload); + + expect(user.federatedTokens.access_token).toBe('raw-bearer-token'); + expect(user.federatedTokens.id_token).toBe('cookie-id'); + expect(user.federatedTokens.refresh_token).toBe('cookie-refresh'); + }); + + it('should set id_token to undefined when not available in session or cookies', async () => { + const req = { + headers: { + authorization: 'Bearer raw-bearer-token', + cookie: 'openid_access_token=cookie-access; refreshToken=cookie-refresh', + }, + }; + + const { user } = await invokeVerify(req, payload); + + expect(user.federatedTokens.access_token).toBe('cookie-access'); + expect(user.federatedTokens.id_token).toBeUndefined(); + expect(user.federatedTokens.refresh_token).toBe('cookie-refresh'); + }); + + it('should keep id_token and access_token as distinct values from cookies', async () => { + const req = { + headers: { + authorization: 'Bearer raw-bearer-token', + cookie: + 'openid_access_token=the-access-token; openid_id_token=the-id-token; refreshToken=the-refresh', + }, + }; + + const { user } = await invokeVerify(req, payload); + + expect(user.federatedTokens.access_token).toBe('the-access-token'); + expect(user.federatedTokens.id_token).toBe('the-id-token'); + expect(user.federatedTokens.access_token).not.toBe(user.federatedTokens.id_token); + }); +}); diff --git a/api/strategies/openidStrategy.js b/api/strategies/openidStrategy.js index 84458ce992..198c8735ae 100644 --- a/api/strategies/openidStrategy.js +++ b/api/strategies/openidStrategy.js @@ -287,6 +287,77 @@ function convertToUsername(input, defaultValue = '') { return defaultValue; } +/** + * Resolve Azure AD groups when group overage is in effect (groups moved to _claim_names/_claim_sources). + * + * NOTE: Microsoft recommends treating _claim_names/_claim_sources as a signal only and using Microsoft Graph + * to resolve group membership instead of calling the endpoint in _claim_sources directly. + * + * @param {string} accessToken - Access token with Microsoft Graph permissions + * @returns {Promise} Resolved group IDs or null on failure + * @see https://learn.microsoft.com/en-us/entra/identity-platform/access-token-claims-reference#groups-overage-claim + * @see https://learn.microsoft.com/en-us/graph/api/directoryobject-getmemberobjects + */ +async function resolveGroupsFromOverage(accessToken) { + try { + if (!accessToken) { + logger.error('[openidStrategy] Access token missing; cannot resolve group overage'); + return null; + } + + // Use /me/getMemberObjects so least-privileged delegated permission User.Read is sufficient + // when resolving the signed-in user's group membership. + const url = 'https://graph.microsoft.com/v1.0/me/getMemberObjects'; + + logger.debug( + `[openidStrategy] Detected group overage, resolving groups via Microsoft Graph getMemberObjects: ${url}`, + ); + + const fetchOptions = { + method: 'POST', + headers: { + Authorization: `Bearer ${accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ securityEnabledOnly: false }), + }; + + if (process.env.PROXY) { + const { ProxyAgent } = undici; + fetchOptions.dispatcher = new ProxyAgent(process.env.PROXY); + } + + const response = await undici.fetch(url, fetchOptions); + if (!response.ok) { + logger.error( + `[openidStrategy] Failed to resolve groups via Microsoft Graph getMemberObjects: HTTP ${response.status} ${response.statusText}`, + ); + return null; + } + + const data = await response.json(); + const values = Array.isArray(data?.value) ? data.value : null; + if (!values) { + logger.error( + '[openidStrategy] Unexpected response format when resolving groups via Microsoft Graph getMemberObjects', + ); + return null; + } + const groupIds = values.filter((id) => typeof id === 'string'); + + logger.debug( + `[openidStrategy] Successfully resolved ${groupIds.length} groups via Microsoft Graph getMemberObjects`, + ); + return groupIds; + } catch (err) { + logger.error( + '[openidStrategy] Error resolving groups via Microsoft Graph getMemberObjects:', + err, + ); + return null; + } +} + /** * Process OpenID authentication tokenset and userinfo * This is the core logic extracted from the passport strategy callback @@ -350,6 +421,25 @@ async function processOpenIDAuth(tokenset, existingUsersOnly = false) { } let roles = get(decodedToken, requiredRoleParameterPath); + + // Handle Azure AD group overage for ID token groups: when hasgroups or _claim_* indicate overage, + // resolve groups via Microsoft Graph instead of relying on token group values. + if ( + !Array.isArray(roles) && + typeof roles !== 'string' && + requiredRoleTokenKind === 'id' && + requiredRoleParameterPath === 'groups' && + decodedToken && + (decodedToken.hasgroups || + (decodedToken._claim_names?.groups && + decodedToken._claim_sources?.[decodedToken._claim_names.groups])) + ) { + const overageGroups = await resolveGroupsFromOverage(tokenset.access_token); + if (overageGroups) { + roles = overageGroups; + } + } + if (!roles || (!Array.isArray(roles) && typeof roles !== 'string')) { logger.error( `[openidStrategy] Key '${requiredRoleParameterPath}' not found in ${requiredRoleTokenKind} token!`, @@ -361,7 +451,9 @@ async function processOpenIDAuth(tokenset, existingUsersOnly = false) { throw new Error(`You must have ${rolesList} role to log in.`); } - if (!requiredRoles.some((role) => roles.includes(role))) { + const roleValues = Array.isArray(roles) ? roles : [roles]; + + if (!requiredRoles.some((role) => roleValues.includes(role))) { const rolesList = requiredRoles.length === 1 ? `"${requiredRoles[0]}"` @@ -498,6 +590,7 @@ async function processOpenIDAuth(tokenset, existingUsersOnly = false) { tokenset, federatedTokens: { access_token: tokenset.access_token, + id_token: tokenset.id_token, refresh_token: tokenset.refresh_token, expires_at: tokenset.expires_at, }, diff --git a/api/strategies/openidStrategy.spec.js b/api/strategies/openidStrategy.spec.js index ada27cca17..b1dc54d77b 100644 --- a/api/strategies/openidStrategy.spec.js +++ b/api/strategies/openidStrategy.spec.js @@ -1,5 +1,6 @@ const fetch = require('node-fetch'); const jwtDecode = require('jsonwebtoken/decode'); +const undici = require('undici'); const { ErrorTypes } = require('librechat-data-provider'); const { findUser, createUser, updateUser } = require('~/models'); const { setupOpenId } = require('./openidStrategy'); @@ -7,6 +8,10 @@ const { setupOpenId } = require('./openidStrategy'); // --- Mocks --- jest.mock('node-fetch'); jest.mock('jsonwebtoken/decode'); +jest.mock('undici', () => ({ + fetch: jest.fn(), + ProxyAgent: jest.fn(), +})); jest.mock('~/server/services/Files/strategies', () => ({ getStrategyFunctions: jest.fn(() => ({ saveBuffer: jest.fn().mockResolvedValue('/fake/path/to/avatar.png'), @@ -360,6 +365,25 @@ describe('setupOpenId', () => { expect(details.message).toBe('You must have "requiredRole" role to log in.'); }); + it('should not treat substring matches in string roles as satisfying required role', async () => { + // Arrange – override required role to "read" then re-setup + process.env.OPENID_REQUIRED_ROLE = 'read'; + await setupOpenId(); + verifyCallback = require('openid-client/passport').__getVerifyCallbackByName('openid'); + + // Token contains "bread" which *contains* "read" as a substring + jwtDecode.mockReturnValue({ + roles: 'bread', + }); + + // Act + const { user, details } = await validate(tokenset); + + // Assert – verify that substring match does not grant access + expect(user).toBe(false); + expect(details.message).toBe('You must have "read" role to log in.'); + }); + it('should allow login when single required role is present (backward compatibility)', async () => { // Arrange – ensure single role configuration (as set in beforeEach) // OPENID_REQUIRED_ROLE = 'requiredRole' @@ -378,6 +402,292 @@ describe('setupOpenId', () => { expect(createUser).toHaveBeenCalled(); }); + describe('group overage and groups handling', () => { + it.each([ + ['groups array contains required group', ['group-required', 'other-group'], true, undefined], + [ + 'groups array missing required group', + ['other-group'], + false, + 'You must have "group-required" role to log in.', + ], + ['groups string equals required group', 'group-required', true, undefined], + [ + 'groups string is other group', + 'other-group', + false, + 'You must have "group-required" role to log in.', + ], + ])( + 'uses groups claim directly when %s (no overage)', + async (_label, groupsClaim, expectedAllowed, expectedMessage) => { + process.env.OPENID_REQUIRED_ROLE = 'group-required'; + process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'groups'; + process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND = 'id'; + + jwtDecode.mockReturnValue({ + groups: groupsClaim, + permissions: ['admin'], + }); + + await setupOpenId(); + verifyCallback = require('openid-client/passport').__getVerifyCallbackByName('openid'); + + const { user, details } = await validate(tokenset); + + expect(undici.fetch).not.toHaveBeenCalled(); + expect(Boolean(user)).toBe(expectedAllowed); + expect(details?.message).toBe(expectedMessage); + }, + ); + + it.each([ + ['token kind is not id', { kind: 'access', path: 'groups', decoded: { hasgroups: true } }], + ['parameter path is not groups', { kind: 'id', path: 'roles', decoded: { hasgroups: true } }], + ['decoded token is falsy', { kind: 'id', path: 'groups', decoded: null }], + [ + 'no overage indicators in decoded token', + { + kind: 'id', + path: 'groups', + decoded: { + permissions: ['admin'], + }, + }, + ], + [ + 'only _claim_names present (no _claim_sources)', + { + kind: 'id', + path: 'groups', + decoded: { + _claim_names: { groups: 'src1' }, + permissions: ['admin'], + }, + }, + ], + [ + 'only _claim_sources present (no _claim_names)', + { + kind: 'id', + path: 'groups', + decoded: { + _claim_sources: { src1: { endpoint: 'https://graph.windows.net/ignored' } }, + permissions: ['admin'], + }, + }, + ], + ])('does not attempt overage resolution when %s', async (_label, cfg) => { + process.env.OPENID_REQUIRED_ROLE = 'group-required'; + process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = cfg.path; + process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND = cfg.kind; + + jwtDecode.mockReturnValue(cfg.decoded); + + await setupOpenId(); + verifyCallback = require('openid-client/passport').__getVerifyCallbackByName('openid'); + + const { user, details } = await validate(tokenset); + + expect(undici.fetch).not.toHaveBeenCalled(); + expect(user).toBe(false); + expect(details.message).toBe('You must have "group-required" role to log in.'); + const { logger } = require('@librechat/data-schemas'); + const expectedTokenKind = cfg.kind === 'access' ? 'access token' : 'id token'; + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining(`Key '${cfg.path}' not found in ${expectedTokenKind}!`), + ); + }); + }); + + describe('resolving groups via Microsoft Graph', () => { + it('denies login and does not call Graph when access token is missing', async () => { + process.env.OPENID_REQUIRED_ROLE = 'group-required'; + process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'groups'; + process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND = 'id'; + + const { logger } = require('@librechat/data-schemas'); + + jwtDecode.mockReturnValue({ + hasgroups: true, + permissions: ['admin'], + }); + + await setupOpenId(); + verifyCallback = require('openid-client/passport').__getVerifyCallbackByName('openid'); + + const tokensetWithoutAccess = { + ...tokenset, + access_token: undefined, + }; + + const { user, details } = await validate(tokensetWithoutAccess); + + expect(user).toBe(false); + expect(details.message).toBe('You must have "group-required" role to log in.'); + + expect(undici.fetch).not.toHaveBeenCalled(); + expect(logger.error).toHaveBeenCalledWith( + expect.stringContaining('Access token missing; cannot resolve group overage'), + ); + }); + + it.each([ + [ + 'Graph returns HTTP error', + async () => ({ + ok: false, + status: 403, + statusText: 'Forbidden', + json: async () => ({}), + }), + [ + '[openidStrategy] Failed to resolve groups via Microsoft Graph getMemberObjects: HTTP 403 Forbidden', + ], + ], + [ + 'Graph network error', + async () => { + throw new Error('network error'); + }, + [ + '[openidStrategy] Error resolving groups via Microsoft Graph getMemberObjects:', + expect.any(Error), + ], + ], + [ + 'Graph returns unexpected shape (no value)', + async () => ({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({}), + }), + [ + '[openidStrategy] Unexpected response format when resolving groups via Microsoft Graph getMemberObjects', + ], + ], + [ + 'Graph returns invalid value type', + async () => ({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ value: 'not-an-array' }), + }), + [ + '[openidStrategy] Unexpected response format when resolving groups via Microsoft Graph getMemberObjects', + ], + ], + ])( + 'denies login when overage resolution fails because %s', + async (_label, setupFetch, expectedErrorArgs) => { + process.env.OPENID_REQUIRED_ROLE = 'group-required'; + process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'groups'; + process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND = 'id'; + + const { logger } = require('@librechat/data-schemas'); + + jwtDecode.mockReturnValue({ + hasgroups: true, + permissions: ['admin'], + }); + + await setupOpenId(); + verifyCallback = require('openid-client/passport').__getVerifyCallbackByName('openid'); + + undici.fetch.mockImplementation(setupFetch); + + const { user, details } = await validate(tokenset); + + expect(undici.fetch).toHaveBeenCalled(); + expect(user).toBe(false); + expect(details.message).toBe('You must have "group-required" role to log in.'); + + expect(logger.error).toHaveBeenCalledWith(...expectedErrorArgs); + }, + ); + + it.each([ + [ + 'hasgroups overage and Graph contains required group', + { + hasgroups: true, + }, + ['group-required', 'some-other-group'], + true, + ], + [ + '_claim_* overage and Graph contains required group', + { + _claim_names: { groups: 'src1' }, + _claim_sources: { src1: { endpoint: 'https://graph.windows.net/ignored' } }, + }, + ['group-required', 'some-other-group'], + true, + ], + [ + 'hasgroups overage and Graph does NOT contain required group', + { + hasgroups: true, + }, + ['some-other-group'], + false, + ], + [ + '_claim_* overage and Graph does NOT contain required group', + { + _claim_names: { groups: 'src1' }, + _claim_sources: { src1: { endpoint: 'https://graph.windows.net/ignored' } }, + }, + ['some-other-group'], + false, + ], + ])( + 'resolves groups via Microsoft Graph when %s', + async (_label, decodedTokenValue, graphGroups, expectedAllowed) => { + process.env.OPENID_REQUIRED_ROLE = 'group-required'; + process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'groups'; + process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND = 'id'; + + const { logger } = require('@librechat/data-schemas'); + + jwtDecode.mockReturnValue(decodedTokenValue); + + await setupOpenId(); + verifyCallback = require('openid-client/passport').__getVerifyCallbackByName('openid'); + + undici.fetch.mockResolvedValue({ + ok: true, + status: 200, + statusText: 'OK', + json: async () => ({ + value: graphGroups, + }), + }); + + const { user } = await validate(tokenset); + + expect(undici.fetch).toHaveBeenCalledWith( + 'https://graph.microsoft.com/v1.0/me/getMemberObjects', + expect.objectContaining({ + method: 'POST', + headers: expect.objectContaining({ + Authorization: `Bearer ${tokenset.access_token}`, + }), + }), + ); + expect(Boolean(user)).toBe(expectedAllowed); + + expect(logger.debug).toHaveBeenCalledWith( + expect.stringContaining( + `Successfully resolved ${graphGroups.length} groups via Microsoft Graph getMemberObjects`, + ), + ); + }, + ); + }); + it('should attempt to download and save the avatar if picture is provided', async () => { // Act const { user } = await validate(tokenset); @@ -465,10 +775,11 @@ describe('setupOpenId', () => { }); it('should attach federatedTokens to user object for token propagation', async () => { - // Arrange - setup tokenset with access token, refresh token, and expiration + // Arrange - setup tokenset with access token, id token, refresh token, and expiration const tokensetWithTokens = { ...tokenset, access_token: 'mock_access_token_abc123', + id_token: 'mock_id_token_def456', refresh_token: 'mock_refresh_token_xyz789', expires_at: 1234567890, }; @@ -480,16 +791,37 @@ describe('setupOpenId', () => { expect(user.federatedTokens).toBeDefined(); expect(user.federatedTokens).toEqual({ access_token: 'mock_access_token_abc123', + id_token: 'mock_id_token_def456', refresh_token: 'mock_refresh_token_xyz789', expires_at: 1234567890, }); }); + it('should include id_token in federatedTokens distinct from access_token', async () => { + // Arrange - use different values for access_token and id_token + const tokensetWithTokens = { + ...tokenset, + access_token: 'the_access_token', + id_token: 'the_id_token', + refresh_token: 'the_refresh_token', + expires_at: 9999999999, + }; + + // Act + const { user } = await validate(tokensetWithTokens); + + // Assert - id_token and access_token must be different values + expect(user.federatedTokens.access_token).toBe('the_access_token'); + expect(user.federatedTokens.id_token).toBe('the_id_token'); + expect(user.federatedTokens.id_token).not.toBe(user.federatedTokens.access_token); + }); + it('should include tokenset along with federatedTokens', async () => { // Arrange const tokensetWithTokens = { ...tokenset, access_token: 'test_access_token', + id_token: 'test_id_token', refresh_token: 'test_refresh_token', expires_at: 9999999999, }; @@ -501,7 +833,9 @@ describe('setupOpenId', () => { expect(user.tokenset).toBeDefined(); expect(user.federatedTokens).toBeDefined(); expect(user.tokenset.access_token).toBe('test_access_token'); + expect(user.tokenset.id_token).toBe('test_id_token'); expect(user.federatedTokens.access_token).toBe('test_access_token'); + expect(user.federatedTokens.id_token).toBe('test_id_token'); }); it('should set role to "ADMIN" if OPENID_ADMIN_ROLE is set and user has that role', async () => { diff --git a/api/utils/tokens.spec.js b/api/utils/tokens.spec.js index 0cfdc30227..18905d6d18 100644 --- a/api/utils/tokens.spec.js +++ b/api/utils/tokens.spec.js @@ -1162,6 +1162,56 @@ describe('Claude Model Tests', () => { expect(matchModelName(model, EModelEndpoint.anthropic)).toBe('claude-opus-4-6'); }); }); + + it('should return correct context length for Claude Sonnet 4.6 (1M)', () => { + expect(getModelMaxTokens('claude-sonnet-4-6', EModelEndpoint.anthropic)).toBe( + maxTokensMap[EModelEndpoint.anthropic]['claude-sonnet-4-6'], + ); + expect(getModelMaxTokens('claude-sonnet-4-6')).toBe( + maxTokensMap[EModelEndpoint.anthropic]['claude-sonnet-4-6'], + ); + }); + + it('should return correct max output tokens for Claude Sonnet 4.6 (64K)', () => { + const { getModelMaxOutputTokens } = require('@librechat/api'); + expect(getModelMaxOutputTokens('claude-sonnet-4-6', EModelEndpoint.anthropic)).toBe( + maxOutputTokensMap[EModelEndpoint.anthropic]['claude-sonnet-4-6'], + ); + }); + + it('should handle Claude Sonnet 4.6 model name variations', () => { + const modelVariations = [ + 'claude-sonnet-4-6', + 'claude-sonnet-4-6-20260101', + 'claude-sonnet-4-6-latest', + 'anthropic/claude-sonnet-4-6', + 'claude-sonnet-4-6/anthropic', + 'claude-sonnet-4-6-preview', + ]; + + modelVariations.forEach((model) => { + const modelKey = findMatchingPattern(model, maxTokensMap[EModelEndpoint.anthropic]); + expect(modelKey).toBe('claude-sonnet-4-6'); + expect(getModelMaxTokens(model, EModelEndpoint.anthropic)).toBe( + maxTokensMap[EModelEndpoint.anthropic]['claude-sonnet-4-6'], + ); + }); + }); + + it('should match model names correctly for Claude Sonnet 4.6', () => { + const modelVariations = [ + 'claude-sonnet-4-6', + 'claude-sonnet-4-6-20260101', + 'claude-sonnet-4-6-latest', + 'anthropic/claude-sonnet-4-6', + 'claude-sonnet-4-6/anthropic', + 'claude-sonnet-4-6-preview', + ]; + + modelVariations.forEach((model) => { + expect(matchModelName(model, EModelEndpoint.anthropic)).toBe('claude-sonnet-4-6'); + }); + }); }); describe('Moonshot/Kimi Model Tests', () => { diff --git a/client/package.json b/client/package.json index cd40c7c7c6..f6838f5091 100644 --- a/client/package.json +++ b/client/package.json @@ -80,7 +80,7 @@ "lodash": "^4.17.23", "lucide-react": "^0.394.0", "match-sorter": "^8.1.0", - "mermaid": "^11.12.2", + "mermaid": "^11.12.3", "micromark-extension-llm-math": "^3.1.0", "qrcode.react": "^4.2.0", "rc-input-number": "^7.4.2", diff --git a/client/src/Providers/BadgeRowContext.tsx b/client/src/Providers/BadgeRowContext.tsx index 40df795aba..dce1c38a78 100644 --- a/client/src/Providers/BadgeRowContext.tsx +++ b/client/src/Providers/BadgeRowContext.tsx @@ -1,4 +1,4 @@ -import React, { createContext, useContext, useEffect, useRef } from 'react'; +import React, { createContext, useContext, useEffect, useMemo, useRef } from 'react'; import { useSetRecoilState } from 'recoil'; import { Tools, Constants, LocalStorageKeys, AgentCapabilities } from 'librechat-data-provider'; import type { TAgentsEndpoint } from 'librechat-data-provider'; @@ -9,11 +9,13 @@ import { useCodeApiKeyForm, useToolToggle, } from '~/hooks'; -import { getTimestampedValue, setTimestamp } from '~/utils/timestamps'; +import { getTimestampedValue } from '~/utils/timestamps'; +import { useGetStartupConfig } from '~/data-provider'; import { ephemeralAgentByConvoId } from '~/store'; interface BadgeRowContextType { conversationId?: string | null; + storageContextKey?: string; agentsConfig?: TAgentsEndpoint | null; webSearch: ReturnType; artifacts: ReturnType; @@ -38,34 +40,70 @@ interface BadgeRowProviderProps { children: React.ReactNode; isSubmitting?: boolean; conversationId?: string | null; + specName?: string | null; } export default function BadgeRowProvider({ children, isSubmitting, conversationId, + specName, }: BadgeRowProviderProps) { - const lastKeyRef = useRef(''); + const lastContextKeyRef = useRef(''); const hasInitializedRef = useRef(false); const { agentsConfig } = useGetAgentsConfig(); + const { data: startupConfig } = useGetStartupConfig(); const key = conversationId ?? Constants.NEW_CONVO; + const hasModelSpecs = (startupConfig?.modelSpecs?.list?.length ?? 0) > 0; + + /** + * Compute the storage context key for non-spec persistence: + * - `__defaults__`: specs configured but none active → shared defaults key + * - undefined: spec active (no persistence) or no specs configured (original behavior) + * + * When a spec is active, tool/MCP state is NOT persisted — the admin's spec + * configuration is always applied fresh. Only non-spec user preferences persist. + */ + const storageContextKey = useMemo(() => { + if (!specName && hasModelSpecs) { + return Constants.spec_defaults_key as string; + } + return undefined; + }, [specName, hasModelSpecs]); + + /** + * Compute the storage suffix for reading localStorage defaults: + * - New conversations read from environment key (spec or non-spec defaults) + * - Existing conversations read from conversation key (per-conversation state) + */ + const isNewConvo = key === Constants.NEW_CONVO; + const storageSuffix = isNewConvo && storageContextKey ? storageContextKey : key; const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(key)); - /** Initialize ephemeralAgent from localStorage on mount and when conversation changes */ + /** Initialize ephemeralAgent from localStorage on mount and when conversation/spec changes. + * Skipped when a spec is active — applyModelSpecEphemeralAgent handles both new conversations + * (pure spec values) and existing conversations (spec values + localStorage overrides). */ useEffect(() => { if (isSubmitting) { return; } - // Check if this is a new conversation or the first load - if (!hasInitializedRef.current || lastKeyRef.current !== key) { + if (specName) { + // Spec active: applyModelSpecEphemeralAgent handles all state (spec base + localStorage + // overrides for existing conversations). Reset init flag so switching back to non-spec + // triggers a fresh re-init. + hasInitializedRef.current = false; + return; + } + // Check if this is a new conversation/spec or the first load + if (!hasInitializedRef.current || lastContextKeyRef.current !== storageSuffix) { hasInitializedRef.current = true; - lastKeyRef.current = key; + lastContextKeyRef.current = storageSuffix; - const codeToggleKey = `${LocalStorageKeys.LAST_CODE_TOGGLE_}${key}`; - const webSearchToggleKey = `${LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_}${key}`; - const fileSearchToggleKey = `${LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_}${key}`; - const artifactsToggleKey = `${LocalStorageKeys.LAST_ARTIFACTS_TOGGLE_}${key}`; + const codeToggleKey = `${LocalStorageKeys.LAST_CODE_TOGGLE_}${storageSuffix}`; + const webSearchToggleKey = `${LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_}${storageSuffix}`; + const fileSearchToggleKey = `${LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_}${storageSuffix}`; + const artifactsToggleKey = `${LocalStorageKeys.LAST_ARTIFACTS_TOGGLE_}${storageSuffix}`; const codeToggleValue = getTimestampedValue(codeToggleKey); const webSearchToggleValue = getTimestampedValue(webSearchToggleKey); @@ -106,39 +144,53 @@ export default function BadgeRowProvider({ } } - /** - * Always set values for all tools (use defaults if not in `localStorage`) - * If `ephemeralAgent` is `null`, create a new object with just our tool values - */ - const finalValues = { - [Tools.execute_code]: initialValues[Tools.execute_code] ?? false, - [Tools.web_search]: initialValues[Tools.web_search] ?? false, - [Tools.file_search]: initialValues[Tools.file_search] ?? false, - [AgentCapabilities.artifacts]: initialValues[AgentCapabilities.artifacts] ?? false, - }; + const hasOverrides = Object.keys(initialValues).length > 0; - setEphemeralAgent((prev) => ({ - ...(prev || {}), - ...finalValues, - })); - - Object.entries(finalValues).forEach(([toolKey, value]) => { - if (value !== false) { - let storageKey = artifactsToggleKey; - if (toolKey === Tools.execute_code) { - storageKey = codeToggleKey; - } else if (toolKey === Tools.web_search) { - storageKey = webSearchToggleKey; - } else if (toolKey === Tools.file_search) { - storageKey = fileSearchToggleKey; + /** Read persisted MCP values from localStorage */ + let mcpOverrides: string[] | null = null; + const mcpStorageKey = `${LocalStorageKeys.LAST_MCP_}${storageSuffix}`; + const mcpRaw = localStorage.getItem(mcpStorageKey); + if (mcpRaw !== null) { + try { + const parsed = JSON.parse(mcpRaw); + if (Array.isArray(parsed) && parsed.length > 0) { + mcpOverrides = parsed; } - // Store the value and set timestamp for existing values - localStorage.setItem(storageKey, JSON.stringify(value)); - setTimestamp(storageKey); + } catch (e) { + console.error('Failed to parse MCP values:', e); } + } + + setEphemeralAgent((prev) => { + if (prev == null) { + /** ephemeralAgent is null — use localStorage defaults */ + if (hasOverrides || mcpOverrides) { + const result = { ...initialValues }; + if (mcpOverrides) { + result.mcp = mcpOverrides; + } + return result; + } + return prev; + } + /** ephemeralAgent already has values (from prior state). + * Only fill in undefined keys from localStorage. */ + let changed = false; + const result = { ...prev }; + for (const [toolKey, value] of Object.entries(initialValues)) { + if (result[toolKey] === undefined) { + result[toolKey] = value; + changed = true; + } + } + if (mcpOverrides && result.mcp === undefined) { + result.mcp = mcpOverrides; + changed = true; + } + return changed ? result : prev; }); } - }, [key, isSubmitting, setEphemeralAgent]); + }, [storageSuffix, specName, isSubmitting, setEphemeralAgent]); /** CodeInterpreter hooks */ const codeApiKeyForm = useCodeApiKeyForm({}); @@ -146,6 +198,7 @@ export default function BadgeRowProvider({ const codeInterpreter = useToolToggle({ conversationId, + storageContextKey, setIsDialogOpen: setCodeDialogOpen, toolKey: Tools.execute_code, localStorageKey: LocalStorageKeys.LAST_CODE_TOGGLE_, @@ -161,6 +214,7 @@ export default function BadgeRowProvider({ const webSearch = useToolToggle({ conversationId, + storageContextKey, toolKey: Tools.web_search, localStorageKey: LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_, setIsDialogOpen: setWebSearchDialogOpen, @@ -173,6 +227,7 @@ export default function BadgeRowProvider({ /** FileSearch hook */ const fileSearch = useToolToggle({ conversationId, + storageContextKey, toolKey: Tools.file_search, localStorageKey: LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_, isAuthenticated: true, @@ -181,12 +236,13 @@ export default function BadgeRowProvider({ /** Artifacts hook - using a custom key since it's not a Tool but a capability */ const artifacts = useToolToggle({ conversationId, + storageContextKey, toolKey: AgentCapabilities.artifacts, localStorageKey: LocalStorageKeys.LAST_ARTIFACTS_TOGGLE_, isAuthenticated: true, }); - const mcpServerManager = useMCPServerManager({ conversationId }); + const mcpServerManager = useMCPServerManager({ conversationId, storageContextKey }); const value: BadgeRowContextType = { webSearch, @@ -194,6 +250,7 @@ export default function BadgeRowProvider({ fileSearch, agentsConfig, conversationId, + storageContextKey, codeApiKeyForm, codeInterpreter, searchApiKeyForm, diff --git a/client/src/common/agents-types.ts b/client/src/common/agents-types.ts index c3832b7ff8..c3ea06f890 100644 --- a/client/src/common/agents-types.ts +++ b/client/src/common/agents-types.ts @@ -9,6 +9,8 @@ import type { } from 'librechat-data-provider'; import type { OptionWithIcon, ExtendedFile } from './types'; +export type AgentQueryResult = { found: true; agent: Agent } | { found: false }; + export type TAgentOption = OptionWithIcon & Agent & { knowledge_files?: Array<[string, ExtendedFile]>; diff --git a/client/src/components/Chat/Input/BadgeRow.tsx b/client/src/components/Chat/Input/BadgeRow.tsx index 5036dcd5e4..6fea6b0d58 100644 --- a/client/src/components/Chat/Input/BadgeRow.tsx +++ b/client/src/components/Chat/Input/BadgeRow.tsx @@ -28,6 +28,7 @@ interface BadgeRowProps { onChange: (badges: Pick[]) => void; onToggle?: (badgeId: string, currentActive: boolean) => void; conversationId?: string | null; + specName?: string | null; isSubmitting?: boolean; isInChat: boolean; } @@ -142,6 +143,7 @@ const dragReducer = (state: DragState, action: DragAction): DragState => { function BadgeRow({ showEphemeralBadges, conversationId, + specName, isSubmitting, onChange, onToggle, @@ -320,7 +322,11 @@ function BadgeRow({ }, [dragState.draggedBadge, handleMouseMove, handleMouseUp]); return ( - +
{showEphemeralBadges === true && } {tempBadges.map((badge, index) => ( diff --git a/client/src/components/Chat/Input/ChatForm.tsx b/client/src/components/Chat/Input/ChatForm.tsx index f8f0fbb40b..45277e5b9c 100644 --- a/client/src/components/Chat/Input/ChatForm.tsx +++ b/client/src/components/Chat/Input/ChatForm.tsx @@ -325,6 +325,7 @@ const ChatForm = memo(({ index = 0 }: { index?: number }) => { } isSubmitting={isSubmitting} conversationId={conversationId} + specName={conversation?.spec} onChange={setBadges} isInChat={ Array.isArray(conversation?.messages) && conversation.messages.length >= 1 diff --git a/client/src/components/Chat/Input/Files/ImagePreview.tsx b/client/src/components/Chat/Input/Files/ImagePreview.tsx index c675c9326c..2714c3677f 100644 --- a/client/src/components/Chat/Input/Files/ImagePreview.tsx +++ b/client/src/components/Chat/Input/Files/ImagePreview.tsx @@ -158,11 +158,11 @@ const ImagePreview = ({ { e.preventDefault(); closeButtonRef.current?.focus(); diff --git a/client/src/components/Chat/Input/MCPSelect.tsx b/client/src/components/Chat/Input/MCPSelect.tsx index 278e603db0..a5356f5094 100644 --- a/client/src/components/Chat/Input/MCPSelect.tsx +++ b/client/src/components/Chat/Input/MCPSelect.tsx @@ -11,7 +11,7 @@ import { useHasAccess } from '~/hooks'; import { cn } from '~/utils'; function MCPSelectContent() { - const { conversationId, mcpServerManager } = useBadgeRowContext(); + const { conversationId, storageContextKey, mcpServerManager } = useBadgeRowContext(); const { localize, isPinned, @@ -128,7 +128,11 @@ function MCPSelectContent() { {configDialogProps && ( - + )} ); diff --git a/client/src/components/Chat/Input/MCPSubMenu.tsx b/client/src/components/Chat/Input/MCPSubMenu.tsx index ca547ca1f7..b0b8fad1bb 100644 --- a/client/src/components/Chat/Input/MCPSubMenu.tsx +++ b/client/src/components/Chat/Input/MCPSubMenu.tsx @@ -15,7 +15,7 @@ interface MCPSubMenuProps { const MCPSubMenu = React.forwardRef( ({ placeholder, ...props }, ref) => { const localize = useLocalize(); - const { mcpServerManager } = useBadgeRowContext(); + const { storageContextKey, mcpServerManager } = useBadgeRowContext(); const { isPinned, mcpValues, @@ -106,7 +106,9 @@ const MCPSubMenu = React.forwardRef(
- {configDialogProps && } + {configDialogProps && ( + + )} ); }, diff --git a/client/src/components/Chat/Input/SendButton.tsx b/client/src/components/Chat/Input/SendButton.tsx index 14c21f0586..a07e574928 100644 --- a/client/src/components/Chat/Input/SendButton.tsx +++ b/client/src/components/Chat/Input/SendButton.tsx @@ -41,7 +41,8 @@ const SubmitButton = React.memo( const SendButton = React.memo( forwardRef((props: SendButtonProps, ref: React.ForwardedRef) => { const data = useWatch({ control: props.control }); - return ; + const content = data?.text?.trim(); + return ; }), ); diff --git a/client/src/components/Chat/Menus/Endpoints/components/EndpointItem.tsx b/client/src/components/Chat/Menus/Endpoints/components/EndpointItem.tsx index 27c1236cb2..6f73f76d79 100644 --- a/client/src/components/Chat/Menus/Endpoints/components/EndpointItem.tsx +++ b/client/src/components/Chat/Menus/Endpoints/components/EndpointItem.tsx @@ -80,12 +80,76 @@ const SettingsButton = ({ ); }; +/** + * Lazily-rendered content for an endpoint submenu. By extracting this into a + * separate component, the expensive model-list rendering (and per-item hooks + * such as MutationObservers in EndpointModelItem) only runs when the submenu + * is actually mounted — which Ariakit defers via `unmountOnHide`. + */ +function EndpointMenuContent({ + endpoint, + endpointIndex, +}: { + endpoint: Endpoint; + endpointIndex: number; +}) { + const localize = useLocalize(); + const { agentsMap, assistantsMap, modelSpecs, selectedValues, endpointSearchValues } = + useModelSelectorContext(); + const { model: selectedModel, modelSpec: selectedSpec } = selectedValues; + const searchValue = endpointSearchValues[endpoint.value] || ''; + + const endpointSpecs = useMemo(() => { + if (!modelSpecs || !modelSpecs.length) { + return []; + } + return modelSpecs.filter((spec: TModelSpec) => spec.group === endpoint.value); + }, [modelSpecs, endpoint.value]); + + if (isAssistantsEndpoint(endpoint.value) && endpoint.models === undefined) { + return ( +
+
+ ); + } + + const filteredModels = searchValue + ? filterModels( + endpoint, + (endpoint.models || []).map((model) => model.name), + searchValue, + agentsMap, + assistantsMap, + ) + : null; + + return ( + <> + {endpointSpecs.map((spec: TModelSpec) => ( + + ))} + {filteredModels + ? renderEndpointModels( + endpoint, + endpoint.models || [], + selectedModel, + filteredModels, + endpointIndex, + ) + : endpoint.models && + renderEndpointModels(endpoint, endpoint.models, selectedModel, undefined, endpointIndex)} + + ); +} + export function EndpointItem({ endpoint, endpointIndex }: EndpointItemProps) { const localize = useLocalize(); const { - agentsMap, - assistantsMap, - modelSpecs, selectedValues, handleOpenKeyDialog, handleSelectEndpoint, @@ -93,19 +157,7 @@ export function EndpointItem({ endpoint, endpointIndex }: EndpointItemProps) { setEndpointSearchValue, endpointRequiresUserKey, } = useModelSelectorContext(); - const { - model: selectedModel, - endpoint: selectedEndpoint, - modelSpec: selectedSpec, - } = selectedValues; - - // Filter modelSpecs for this endpoint (by group matching endpoint value) - const endpointSpecs = useMemo(() => { - if (!modelSpecs || !modelSpecs.length) { - return []; - } - return modelSpecs.filter((spec: TModelSpec) => spec.group === endpoint.value); - }, [modelSpecs, endpoint.value]); + const { endpoint: selectedEndpoint } = selectedValues; const searchValue = endpointSearchValues[endpoint.value] || ''; const isUserProvided = useMemo( @@ -130,15 +182,6 @@ export function EndpointItem({ endpoint, endpointIndex }: EndpointItemProps) { const isEndpointSelected = selectedEndpoint === endpoint.value; if (endpoint.hasModels) { - const filteredModels = searchValue - ? filterModels( - endpoint, - (endpoint.models || []).map((model) => model.name), - searchValue, - agentsMap, - assistantsMap, - ) - : null; const placeholder = isAgentsEndpoint(endpoint.value) || isAssistantsEndpoint(endpoint.value) ? localize('com_endpoint_search_var', { 0: endpoint.label }) @@ -147,7 +190,6 @@ export function EndpointItem({ endpoint, endpointIndex }: EndpointItemProps) { setEndpointSearchValue(endpoint.value, value)} combobox={} @@ -170,39 +212,7 @@ export function EndpointItem({ endpoint, endpointIndex }: EndpointItemProps) { } > - {isAssistantsEndpoint(endpoint.value) && endpoint.models === undefined ? ( -
-
- ) : ( - <> - {/* Render modelSpecs for this endpoint */} - {endpointSpecs.map((spec: TModelSpec) => ( - - ))} - {/* Render endpoint models */} - {filteredModels - ? renderEndpointModels( - endpoint, - endpoint.models || [], - selectedModel, - filteredModels, - endpointIndex, - ) - : endpoint.models && - renderEndpointModels( - endpoint, - endpoint.models, - selectedModel, - undefined, - endpointIndex, - )} - - )} +
); } else { diff --git a/client/src/components/Chat/Messages/Content/MarkdownComponents.tsx b/client/src/components/Chat/Messages/Content/MarkdownComponents.tsx index 7db3fa668a..d647147151 100644 --- a/client/src/components/Chat/Messages/Content/MarkdownComponents.tsx +++ b/client/src/components/Chat/Messages/Content/MarkdownComponents.tsx @@ -111,7 +111,7 @@ export const a: React.ElementType = memo(({ href, children }: TAnchorProps) => { }, [user?.id, href]); const { refetch: downloadFile } = useFileDownload(user?.id ?? '', file_id); - const props: { target?: string; onClick?: React.MouseEventHandler } = { target: '_new' }; + const props: { target?: string; onClick?: React.MouseEventHandler } = { target: '_blank' }; if (!file_id || !filename) { return ( diff --git a/client/src/components/Chat/Messages/Content/MarkdownLite.tsx b/client/src/components/Chat/Messages/Content/MarkdownLite.tsx index 65efe2f256..24980d8a90 100644 --- a/client/src/components/Chat/Messages/Content/MarkdownLite.tsx +++ b/client/src/components/Chat/Messages/Content/MarkdownLite.tsx @@ -38,7 +38,6 @@ const MarkdownLite = memo( ]} /** @ts-ignore */ rehypePlugins={rehypePlugins} - // linkTarget="_new" components={ { code: codeExecution ? code : codeNoExecution, diff --git a/client/src/components/Chat/Messages/Content/Part.tsx b/client/src/components/Chat/Messages/Content/Part.tsx index 4a74e3606f..f97d1343b9 100644 --- a/client/src/components/Chat/Messages/Content/Part.tsx +++ b/client/src/components/Chat/Messages/Content/Part.tsx @@ -67,9 +67,20 @@ const Part = memo( if (part.tool_call_ids != null && !text) { return null; } - /** Skip rendering if text is only whitespace to avoid empty Container */ - if (!isLast && text.length > 0 && /^\s*$/.test(text)) { - return null; + /** Handle whitespace-only text to avoid layout shift */ + if (text.length > 0 && /^\s*$/.test(text)) { + /** Show placeholder for whitespace-only last part during streaming */ + if (isLast && showCursor) { + return ( + + + + ); + } + /** Skip rendering non-last whitespace-only parts to avoid empty Container */ + if (!isLast) { + return null; + } } return ( diff --git a/client/src/components/Chat/Messages/Content/ToolCall.tsx b/client/src/components/Chat/Messages/Content/ToolCall.tsx index b9feef1bad..c807288b46 100644 --- a/client/src/components/Chat/Messages/Content/ToolCall.tsx +++ b/client/src/components/Chat/Messages/Content/ToolCall.tsx @@ -1,7 +1,12 @@ -import { useMemo, useState, useEffect, useRef, useLayoutEffect } from 'react'; +import { useMemo, useState, useEffect, useRef, useCallback, useLayoutEffect } from 'react'; import { Button } from '@librechat/client'; import { TriangleAlert } from 'lucide-react'; -import { actionDelimiter, actionDomainSeparator, Constants } from 'librechat-data-provider'; +import { + Constants, + dataService, + actionDelimiter, + actionDomainSeparator, +} from 'librechat-data-provider'; import type { TAttachment } from 'librechat-data-provider'; import { useLocalize, useProgress } from '~/hooks'; import { AttachmentGroup } from './Parts'; @@ -36,9 +41,9 @@ export default function ToolCall({ const [isAnimating, setIsAnimating] = useState(false); const prevShowInfoRef = useRef(showInfo); - const { function_name, domain, isMCPToolCall } = useMemo(() => { + const { function_name, domain, isMCPToolCall, mcpServerName } = useMemo(() => { if (typeof name !== 'string') { - return { function_name: '', domain: null, isMCPToolCall: false }; + return { function_name: '', domain: null, isMCPToolCall: false, mcpServerName: '' }; } if (name.includes(Constants.mcp_delimiter)) { const [func, server] = name.split(Constants.mcp_delimiter); @@ -46,6 +51,7 @@ export default function ToolCall({ function_name: func || '', domain: server && (server.replaceAll(actionDomainSeparator, '.') || null), isMCPToolCall: true, + mcpServerName: server || '', }; } const [func, _domain] = name.includes(actionDelimiter) @@ -55,9 +61,40 @@ export default function ToolCall({ function_name: func || '', domain: _domain && (_domain.replaceAll(actionDomainSeparator, '.') || null), isMCPToolCall: false, + mcpServerName: '', }; }, [name]); + const actionId = useMemo(() => { + if (isMCPToolCall || !auth) { + return ''; + } + try { + const url = new URL(auth); + const redirectUri = url.searchParams.get('redirect_uri') || ''; + const match = redirectUri.match(/\/api\/actions\/([^/]+)\/oauth\/callback/); + return match?.[1] || ''; + } catch { + return ''; + } + }, [auth, isMCPToolCall]); + + const handleOAuthClick = useCallback(async () => { + if (!auth) { + return; + } + try { + if (isMCPToolCall && mcpServerName) { + await dataService.bindMCPOAuth(mcpServerName); + } else if (actionId) { + await dataService.bindActionOAuth(actionId); + } + } catch (e) { + logger.error('Failed to bind OAuth CSRF cookie', e); + } + window.open(auth, '_blank', 'noopener,noreferrer'); + }, [auth, isMCPToolCall, mcpServerName, actionId]); + const error = typeof output === 'string' && output.toLowerCase().includes('error processing tool'); @@ -230,7 +267,7 @@ export default function ToolCall({ className="font-mediu inline-flex items-center justify-center rounded-xl px-4 py-2 text-sm" variant="default" rel="noopener noreferrer" - onClick={() => window.open(auth, '_blank', 'noopener,noreferrer')} + onClick={handleOAuthClick} > {localize('com_ui_sign_in_to_domain', { 0: authDomain })} diff --git a/client/src/components/MCP/MCPConfigDialog.tsx b/client/src/components/MCP/MCPConfigDialog.tsx index a3727971e9..f1079c2799 100644 --- a/client/src/components/MCP/MCPConfigDialog.tsx +++ b/client/src/components/MCP/MCPConfigDialog.tsx @@ -24,6 +24,7 @@ interface MCPConfigDialogProps { serverName: string; serverStatus?: MCPServerStatus; conversationId?: string | null; + storageContextKey?: string; } export default function MCPConfigDialog({ @@ -36,6 +37,7 @@ export default function MCPConfigDialog({ serverName, serverStatus, conversationId, + storageContextKey, }: MCPConfigDialogProps) { const localize = useLocalize(); @@ -167,6 +169,7 @@ export default function MCPConfigDialog({ 0} /> diff --git a/client/src/components/MCP/ServerInitializationSection.tsx b/client/src/components/MCP/ServerInitializationSection.tsx index b5f71335d7..c080866b3d 100644 --- a/client/src/components/MCP/ServerInitializationSection.tsx +++ b/client/src/components/MCP/ServerInitializationSection.tsx @@ -9,12 +9,14 @@ interface ServerInitializationSectionProps { requiresOAuth: boolean; hasCustomUserVars?: boolean; conversationId?: string | null; + storageContextKey?: string; } export default function ServerInitializationSection({ serverName, requiresOAuth, conversationId, + storageContextKey, sidePanel = false, hasCustomUserVars = false, }: ServerInitializationSectionProps) { @@ -28,7 +30,7 @@ export default function ServerInitializationSection({ initializeServer, availableMCPServers, revokeOAuthForServer, - } = useMCPServerManager({ conversationId }); + } = useMCPServerManager({ conversationId, storageContextKey }); const { connectionStatus } = useMCPConnectionStatus({ enabled: !!availableMCPServers && availableMCPServers.length > 0, diff --git a/client/src/components/Nav/Favorites/FavoritesList.tsx b/client/src/components/Nav/Favorites/FavoritesList.tsx index b142b0cfc3..86fe4a793f 100644 --- a/client/src/components/Nav/Favorites/FavoritesList.tsx +++ b/client/src/components/Nav/Favorites/FavoritesList.tsx @@ -9,6 +9,7 @@ import { QueryKeys, dataService } from 'librechat-data-provider'; import type t from 'librechat-data-provider'; import { useFavorites, useLocalize, useShowMarketplace, useNewConvo } from '~/hooks'; import { useAssistantsMapContext, useAgentsMapContext } from '~/Providers'; +import type { AgentQueryResult } from '~/common'; import useSelectMention from '~/hooks/Input/useSelectMention'; import { useGetEndpointsQuery } from '~/data-provider'; import FavoriteItem from './FavoriteItem'; @@ -184,7 +185,20 @@ export default function FavoritesList({ const missingAgentQueries = useQueries({ queries: missingAgentIds.map((agentId) => ({ queryKey: [QueryKeys.agent, agentId], - queryFn: () => dataService.getAgentById({ agent_id: agentId }), + queryFn: async (): Promise => { + try { + const agent = await dataService.getAgentById({ agent_id: agentId }); + return { found: true, agent }; + } catch (error) { + if (error && typeof error === 'object' && 'response' in error) { + const axiosError = error as { response?: { status?: number } }; + if (axiosError.response?.status === 404) { + return { found: false }; + } + } + throw error; + } + }, staleTime: 1000 * 60 * 5, enabled: missingAgentIds.length > 0, })), @@ -201,8 +215,8 @@ export default function FavoritesList({ } } missingAgentQueries.forEach((query) => { - if (query.data) { - combined[query.data.id] = query.data; + if (query.data?.found) { + combined[query.data.agent.id] = query.data.agent; } }); return combined; diff --git a/client/src/components/Nav/Favorites/tests/FavoritesList.spec.tsx b/client/src/components/Nav/Favorites/tests/FavoritesList.spec.tsx new file mode 100644 index 0000000000..8318b94698 --- /dev/null +++ b/client/src/components/Nav/Favorites/tests/FavoritesList.spec.tsx @@ -0,0 +1,191 @@ +import React from 'react'; +import { render, waitFor } from '@testing-library/react'; +import '@testing-library/jest-dom'; +import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; +import { RecoilRoot } from 'recoil'; +import { DndProvider } from 'react-dnd'; +import { HTML5Backend } from 'react-dnd-html5-backend'; +import { BrowserRouter } from 'react-router-dom'; +import { dataService } from 'librechat-data-provider'; +import type t from 'librechat-data-provider'; + +// Mock store before importing FavoritesList +jest.mock('~/store', () => { + const { atom } = jest.requireActual('recoil'); + return { + __esModule: true, + default: { + search: atom({ + key: 'mock-search-atom', + default: { query: '' }, + }), + conversationByIndex: (index: number) => + atom({ + key: `mock-conversation-atom-${index}`, + default: null, + }), + }, + }; +}); +import FavoritesList from '../FavoritesList'; + +type FavoriteItem = { + agentId?: string; + model?: string; + endpoint?: string; +}; + +// Mock dataService +jest.mock('librechat-data-provider', () => ({ + ...jest.requireActual('librechat-data-provider'), + dataService: { + getAgentById: jest.fn(), + }, +})); + +// Mock hooks +const mockFavorites: FavoriteItem[] = []; +const mockUseFavorites = jest.fn(() => ({ + favorites: mockFavorites, + reorderFavorites: jest.fn(), + isLoading: false, +})); + +jest.mock('~/hooks', () => ({ + useFavorites: () => mockUseFavorites(), + useLocalize: () => (key: string) => key, + useShowMarketplace: () => false, + useNewConvo: () => ({ newConversation: jest.fn() }), +})); + +jest.mock('~/Providers', () => ({ + useAssistantsMapContext: () => ({}), + useAgentsMapContext: () => ({}), +})); + +jest.mock('~/hooks/Input/useSelectMention', () => () => ({ + onSelectEndpoint: jest.fn(), +})); + +jest.mock('~/data-provider', () => ({ + useGetEndpointsQuery: () => ({ data: {} }), +})); + +jest.mock('../FavoriteItem', () => ({ + __esModule: true, + default: ({ item, type }: { item: any; type: string }) => ( +
+ {type === 'agent' ? item.name : item.model} +
+ ), +})); + +const createTestQueryClient = () => + new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, + }); + +const renderWithProviders = (ui: React.ReactElement) => { + const queryClient = createTestQueryClient(); + return render( + + + + {ui} + + + , + ); +}; + +describe('FavoritesList', () => { + beforeEach(() => { + jest.clearAllMocks(); + mockFavorites.length = 0; + }); + + describe('rendering', () => { + it('should render nothing when favorites is empty and marketplace is hidden', () => { + const { container } = renderWithProviders(); + expect(container.firstChild).toBeNull(); + }); + + it('should render skeleton while loading', () => { + mockUseFavorites.mockReturnValueOnce({ + favorites: [], + reorderFavorites: jest.fn(), + isLoading: true, + }); + + const { container } = renderWithProviders(); + // Skeletons should be present during loading - container should have children + expect(container.firstChild).not.toBeNull(); + // When loading, the component renders skeleton placeholders (check for content, not specific CSS) + expect(container.innerHTML).toContain('div'); + }); + }); + + describe('missing agent handling', () => { + it('should exclude missing agents (404) from rendered favorites and render valid agents', async () => { + const validAgent: t.Agent = { + id: 'valid-agent', + name: 'Valid Agent', + author: 'test-author', + } as t.Agent; + + // Set up favorites with both valid and missing agent + mockFavorites.push({ agentId: 'valid-agent' }, { agentId: 'deleted-agent' }); + + // Mock getAgentById: valid-agent returns successfully, deleted-agent returns 404 + (dataService.getAgentById as jest.Mock).mockImplementation( + ({ agent_id }: { agent_id: string }) => { + if (agent_id === 'valid-agent') { + return Promise.resolve(validAgent); + } + if (agent_id === 'deleted-agent') { + return Promise.reject({ response: { status: 404 } }); + } + return Promise.reject(new Error('Unknown agent')); + }, + ); + + const { findAllByTestId } = renderWithProviders(); + + // Wait for queries to resolve + const favoriteItems = await findAllByTestId('favorite-item'); + + // Only the valid agent should be rendered + expect(favoriteItems).toHaveLength(1); + expect(favoriteItems[0]).toHaveTextContent('Valid Agent'); + + // The deleted agent should still be requested, but not rendered + expect(dataService.getAgentById as jest.Mock).toHaveBeenCalledWith({ + agent_id: 'deleted-agent', + }); + }); + + it('should not show infinite loading skeleton when agents return 404', async () => { + // Set up favorites with only a deleted agent + mockFavorites.push({ agentId: 'deleted-agent' }); + + // Mock getAgentById to return 404 + (dataService.getAgentById as jest.Mock).mockRejectedValue({ response: { status: 404 } }); + + const { queryAllByTestId } = renderWithProviders(); + + // Wait for the loading state to resolve after 404 handling by ensuring the agent request was made + await waitFor(() => { + expect(dataService.getAgentById as jest.Mock).toHaveBeenCalledWith({ + agent_id: 'deleted-agent', + }); + }); + + // No favorite items should be rendered (deleted agent is filtered out) + expect(queryAllByTestId('favorite-item')).toHaveLength(0); + }); + }); +}); diff --git a/client/src/components/SidePanel/Agents/AgentPanel.tsx b/client/src/components/SidePanel/Agents/AgentPanel.tsx index f74dcfddcc..890488e88d 100644 --- a/client/src/components/SidePanel/Agents/AgentPanel.tsx +++ b/client/src/components/SidePanel/Agents/AgentPanel.tsx @@ -1,7 +1,7 @@ -import { Plus } from 'lucide-react'; import React, { useMemo, useCallback, useRef, useState } from 'react'; +import { Plus } from 'lucide-react'; import { Button, useToastContext } from '@librechat/client'; -import { useWatch, useForm, FormProvider, type FieldNamesMarkedBoolean } from 'react-hook-form'; +import { useWatch, useForm, FormProvider } from 'react-hook-form'; import { useGetModelsQuery } from 'librechat-data-provider/react-query'; import { Tools, @@ -11,8 +11,10 @@ import { PermissionBits, isAssistantsEndpoint, } from 'librechat-data-provider'; -import type { AgentForm, StringOption } from '~/common'; +import type { FieldNamesMarkedBoolean } from 'react-hook-form'; import type { Agent } from 'librechat-data-provider'; +import type { TranslationKeys } from '~/hooks/useLocalize'; +import type { AgentForm, StringOption } from '~/common'; import { useCreateAgentMutation, useUpdateAgentMutation, @@ -23,7 +25,6 @@ import { import { createProviderOption, getDefaultAgentFormValues } from '~/utils'; import { useResourcePermissions } from '~/hooks/useResourcePermissions'; import { useSelectAgent, useLocalize, useAuthContext } from '~/hooks'; -import type { TranslationKeys } from '~/hooks/useLocalize'; import { useAgentPanelContext } from '~/Providers/AgentPanelContext'; import AgentPanelSkeleton from './AgentPanelSkeleton'; import AdvancedPanel from './Advanced/AdvancedPanel'; diff --git a/client/src/components/SidePanel/Agents/MCPTools.tsx b/client/src/components/SidePanel/Agents/MCPTools.tsx index 552f2b313b..3dc9a19d6a 100644 --- a/client/src/components/SidePanel/Agents/MCPTools.tsx +++ b/client/src/components/SidePanel/Agents/MCPTools.tsx @@ -46,7 +46,7 @@ export default function MCPTools({ return null; } - if (serverInfo.isConnected) { + if (serverInfo?.tools?.length && serverInfo.tools.length > 0) { return ( ); diff --git a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/MCPServerForm.tsx b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/MCPServerForm.tsx index 188c518597..d4096ea96a 100644 --- a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/MCPServerForm.tsx +++ b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/MCPServerForm.tsx @@ -1,10 +1,10 @@ import { FormProvider } from 'react-hook-form'; +import type { useMCPServerForm } from './hooks/useMCPServerForm'; import ConnectionSection from './sections/ConnectionSection'; import BasicInfoSection from './sections/BasicInfoSection'; import TransportSection from './sections/TransportSection'; -import AuthSection from './sections/AuthSection'; import TrustSection from './sections/TrustSection'; -import type { useMCPServerForm } from './hooks/useMCPServerForm'; +import AuthSection from './sections/AuthSection'; interface MCPServerFormProps { formHook: ReturnType; diff --git a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/index.tsx b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/index.tsx index f86d3f8056..c9d3473d60 100644 --- a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/index.tsx +++ b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/index.tsx @@ -1,13 +1,18 @@ import React, { useState, useEffect } from 'react'; +import { Copy, CopyCheck } from 'lucide-react'; import { - OGDialog, - OGDialogTemplate, - OGDialogContent, - OGDialogHeader, - OGDialogTitle, + Label, + Input, Button, - TrashIcon, Spinner, + TrashIcon, + useToastContext, + OGDialog, + OGDialogTitle, + OGDialogHeader, + OGDialogFooter, + OGDialogContent, + OGDialogTemplate, } from '@librechat/client'; import { SystemRoles, @@ -16,10 +21,10 @@ import { PermissionBits, PermissionTypes, } from 'librechat-data-provider'; -import { GenericGrantAccessDialog } from '~/components/Sharing'; import { useAuthContext, useHasAccess, useResourcePermissions, MCPServerDefinition } from '~/hooks'; -import { useLocalize } from '~/hooks'; +import { GenericGrantAccessDialog } from '~/components/Sharing'; import { useMCPServerForm } from './hooks/useMCPServerForm'; +import { useLocalize, useCopyToClipboard } from '~/hooks'; import MCPServerForm from './MCPServerForm'; interface MCPServerDialogProps { @@ -39,8 +44,10 @@ export default function MCPServerDialog({ }: MCPServerDialogProps) { const localize = useLocalize(); const { user } = useAuthContext(); + const { showToast } = useToastContext(); // State for dialogs + const [isCopying, setIsCopying] = useState(false); const [showDeleteConfirm, setShowDeleteConfirm] = useState(false); const [showRedirectUriDialog, setShowRedirectUriDialog] = useState(false); const [createdServerId, setCreatedServerId] = useState(null); @@ -99,20 +106,26 @@ export default function MCPServerDialog({ ? `${window.location.origin}/api/mcp/${createdServerId}/oauth/callback` : ''; + const copyLink = useCopyToClipboard({ text: redirectUri }); + return ( <> {/* Delete confirmation dialog */} setShowDeleteConfirm(isOpen)}> {localize('com_ui_mcp_server_delete_confirm')}

} - selection={{ - selectHandler: handleDelete, - selectClasses: - 'bg-destructive text-white transition-all duration-200 hover:bg-destructive/80', - selectText: isDeleting ? : localize('com_ui_delete'), - }} + title={localize('com_ui_delete_mcp_server')} + className="w-11/12 max-w-md" + description={localize('com_ui_mcp_server_delete_confirm', { 0: server?.serverName })} + selection={ + + } />
@@ -127,48 +140,53 @@ export default function MCPServerDialog({ } }} > - - + + {localize('com_ui_mcp_server_created')} -
-

- {localize('com_ui_redirect_uri_instructions')} -

-
-
diff --git a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/ConnectionSection.tsx b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/ConnectionSection.tsx index 5d7094fd83..ee77a54699 100644 --- a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/ConnectionSection.tsx +++ b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/ConnectionSection.tsx @@ -15,13 +15,19 @@ export default function ConnectionSection() { return (
{ @@ -29,9 +35,13 @@ export default function ConnectionSection() { return isValidUrl(normalized) || localize('com_ui_mcp_invalid_url'); }, })} - className={cn(errors.url && 'border-red-500 focus:border-red-500')} + className={cn(errors.url && 'border-border-destructive')} /> - {errors.url &&

{errors.url.message}

} + {errors.url && ( + + )}
); } diff --git a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/TransportSection.tsx b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/TransportSection.tsx index 80d4595719..5c7b610b70 100644 --- a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/TransportSection.tsx +++ b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/TransportSection.tsx @@ -25,14 +25,19 @@ export default function TransportSection() { ); return ( -
- +
+ + + -
+ ); } diff --git a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/TrustSection.tsx b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/TrustSection.tsx index 854ac717b7..36d8d73a49 100644 --- a/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/TrustSection.tsx +++ b/client/src/components/SidePanel/MCPBuilder/MCPServerDialog/sections/TrustSection.tsx @@ -26,17 +26,17 @@ export default function TrustSection() { checked={field.value} onCheckedChange={field.onChange} aria-labelledby="trust-label" - aria-describedby="trust-description" + aria-describedby={ + errors.trust ? 'trust-description trust-error' : 'trust-description' + } + aria-invalid={errors.trust ? 'true' : 'false'} + aria-required="true" className="mt-0.5" /> )} /> -
{errors.trust && ( -

{localize('com_ui_field_required')}

+ )} ); diff --git a/client/src/components/Tools/MCPToolSelectDialog.tsx b/client/src/components/Tools/MCPToolSelectDialog.tsx index 487f767250..a27484d4e8 100644 --- a/client/src/components/Tools/MCPToolSelectDialog.tsx +++ b/client/src/components/Tools/MCPToolSelectDialog.tsx @@ -96,17 +96,17 @@ function MCPToolSelectDialog({ await new Promise((resolve) => setTimeout(resolve, 500)); } - // Then initialize server if needed + // Only initialize if no cached tools exist; skip if tools are already available from DB const serverInfo = mcpServersMap.get(serverName); - if (!serverInfo?.isConnected) { + if (!serverInfo?.tools?.length) { const result = await initializeServer(serverName); - if (result?.success && result.oauthRequired && result.oauthUrl) { + if (result?.oauthRequired && result.oauthUrl) { setIsInitializing(null); - return; + return; // OAuth flow must complete first } } - // Finally, add tools to form + // Add tools to form (refetches from backend's persisted cache) await addToolsToForm(serverName); setIsInitializing(null); } catch (error) { diff --git a/client/src/data-provider/MCP/queries.ts b/client/src/data-provider/MCP/queries.ts index afc17f3a93..8590e43735 100644 --- a/client/src/data-provider/MCP/queries.ts +++ b/client/src/data-provider/MCP/queries.ts @@ -12,10 +12,10 @@ export const useMCPServersQuery = ( [QueryKeys.mcpServers], () => dataService.getMCPServers(), { - staleTime: 1000 * 60 * 5, // 5 minutes - data stays fresh longer - refetchOnWindowFocus: false, + staleTime: 30 * 1000, // 30 seconds — short enough to pick up servers that finish initializing after first load + refetchOnWindowFocus: true, refetchOnReconnect: false, - refetchOnMount: false, + refetchOnMount: true, retry: false, ...config, }, diff --git a/client/src/hooks/Agents/useApplyModelSpecAgents.ts b/client/src/hooks/Agents/useApplyModelSpecAgents.ts index 94d62a058a..2c677f85ca 100644 --- a/client/src/hooks/Agents/useApplyModelSpecAgents.ts +++ b/client/src/hooks/Agents/useApplyModelSpecAgents.ts @@ -1,4 +1,5 @@ import { useCallback } from 'react'; +import { Constants } from 'librechat-data-provider'; import type { TStartupConfig, TSubmission } from 'librechat-data-provider'; import { useUpdateEphemeralAgent, useApplyNewAgentTemplate } from '~/store/agents'; import { getModelSpec, applyModelSpecEphemeralAgent } from '~/utils'; @@ -6,6 +7,10 @@ import { getModelSpec, applyModelSpecEphemeralAgent } from '~/utils'; /** * Hook that applies a model spec from a preset to an ephemeral agent. * This is used when initializing a new conversation with a preset that has a spec. + * + * When a spec is provided, its tool settings are applied to the ephemeral agent. + * When no spec is provided but specs are configured, the ephemeral agent is reset + * to null so BadgeRowContext can apply localStorage defaults (non-spec experience). */ export function useApplyModelSpecEffects() { const updateEphemeralAgent = useUpdateEphemeralAgent(); @@ -20,6 +25,11 @@ export function useApplyModelSpecEffects() { startupConfig?: TStartupConfig; }) => { if (specName == null || !specName) { + if (startupConfig?.modelSpecs?.list?.length) { + /** Specs are configured but none selected — reset ephemeral agent to null + * so BadgeRowContext fills all values (tool toggles + MCP) from localStorage. */ + updateEphemeralAgent((convoId ?? Constants.NEW_CONVO) || Constants.NEW_CONVO, null); + } return; } @@ -80,6 +90,9 @@ export function useApplyAgentTemplate() { web_search: ephemeralAgent?.web_search ?? modelSpec.webSearch ?? false, file_search: ephemeralAgent?.file_search ?? modelSpec.fileSearch ?? false, execute_code: ephemeralAgent?.execute_code ?? modelSpec.executeCode ?? false, + artifacts: + ephemeralAgent?.artifacts ?? + (modelSpec.artifacts === true ? 'default' : modelSpec.artifacts || ''), }; mergedAgent.mcp = [...new Set(mergedAgent.mcp)]; diff --git a/client/src/hooks/Chat/useAddedResponse.ts b/client/src/hooks/Chat/useAddedResponse.ts index c01cef0c69..fe35e4e56e 100644 --- a/client/src/hooks/Chat/useAddedResponse.ts +++ b/client/src/hooks/Chat/useAddedResponse.ts @@ -1,7 +1,12 @@ import { useCallback } from 'react'; import { useRecoilValue } from 'recoil'; import { useGetModelsQuery } from 'librechat-data-provider/react-query'; -import { getEndpointField, LocalStorageKeys, isAssistantsEndpoint } from 'librechat-data-provider'; +import { + getEndpointField, + LocalStorageKeys, + isAssistantsEndpoint, + getDefaultParamsEndpoint, +} from 'librechat-data-provider'; import type { TEndpointsConfig, EModelEndpoint, TConversation } from 'librechat-data-provider'; import type { AssistantListItem, NewConversationParams } from '~/common'; import useAssistantListMap from '~/hooks/Assistants/useAssistantListMap'; @@ -84,11 +89,13 @@ export default function useAddedResponse() { } const models = modelsConfig?.[defaultEndpoint ?? ''] ?? []; + const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint); newConversation = buildDefaultConvo({ conversation: newConversation, lastConversationSetup: preset as TConversation, endpoint: defaultEndpoint ?? ('' as EModelEndpoint), models, + defaultParamsEndpoint, }); if (preset?.title != null && preset.title !== '') { diff --git a/client/src/hooks/Chat/useChatFunctions.ts b/client/src/hooks/Chat/useChatFunctions.ts index 8479d8eaac..7cf8c6bf25 100644 --- a/client/src/hooks/Chat/useChatFunctions.ts +++ b/client/src/hooks/Chat/useChatFunctions.ts @@ -13,6 +13,7 @@ import { parseCompactConvo, replaceSpecialVars, isAssistantsEndpoint, + getDefaultParamsEndpoint, } from 'librechat-data-provider'; import type { TMessage, @@ -96,6 +97,8 @@ export default function useChatFunctions({ ) => { setShowStopButton(false); resetLatestMultiMessage(); + + text = text.trim(); if (!!isSubmitting || text === '') { return; } @@ -133,7 +136,6 @@ export default function useChatFunctions({ // construct the query message // this is not a real messageId, it is used as placeholder before real messageId returned - text = text.trim(); const intermediateId = overrideUserMessageId ?? v4(); parentMessageId = parentMessageId ?? latestMessage?.messageId ?? Constants.NO_PARENT; @@ -173,12 +175,14 @@ export default function useChatFunctions({ const startupConfig = queryClient.getQueryData([QueryKeys.startupConfig]); const endpointType = getEndpointField(endpointsConfig, endpoint, 'type'); const iconURL = conversation?.iconURL; + const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, endpoint); /** This becomes part of the `endpointOption` */ const convo = parseCompactConvo({ endpoint: endpoint as EndpointSchemaKey, endpointType: endpointType as EndpointSchemaKey, conversation: conversation ?? {}, + defaultParamsEndpoint, }); const { modelDisplayLabel } = endpointsConfig?.[endpoint ?? ''] ?? {}; diff --git a/client/src/hooks/Conversations/useDefaultConvo.ts b/client/src/hooks/Conversations/useDefaultConvo.ts index bfca39d3e0..67a40ce64e 100644 --- a/client/src/hooks/Conversations/useDefaultConvo.ts +++ b/client/src/hooks/Conversations/useDefaultConvo.ts @@ -1,5 +1,5 @@ -import { excludedKeys } from 'librechat-data-provider'; import { useGetModelsQuery } from 'librechat-data-provider/react-query'; +import { excludedKeys, getDefaultParamsEndpoint } from 'librechat-data-provider'; import type { TEndpointsConfig, TModelsConfig, @@ -47,11 +47,14 @@ const useDefaultConvo = () => { } } + const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, endpoint); + const defaultConvo = buildDefaultConvo({ conversation: conversation as TConversation, endpoint, lastConversationSetup: preset as TConversation, models, + defaultParamsEndpoint, }); if (!cleanOutput) { diff --git a/client/src/hooks/Conversations/useExportConversation.ts b/client/src/hooks/Conversations/useExportConversation.ts index 579b5f1cf6..dc352ccab9 100644 --- a/client/src/hooks/Conversations/useExportConversation.ts +++ b/client/src/hooks/Conversations/useExportConversation.ts @@ -106,6 +106,9 @@ export default function useExportConversation({ // TEXT const textPart = content[ContentTypes.TEXT]; const text = typeof textPart === 'string' ? textPart : (textPart?.value ?? ''); + if (text.trim().length === 0) { + return []; + } return [sender, text]; } diff --git a/client/src/hooks/Conversations/useGenerateConvo.ts b/client/src/hooks/Conversations/useGenerateConvo.ts index d96f60e05d..abe3215753 100644 --- a/client/src/hooks/Conversations/useGenerateConvo.ts +++ b/client/src/hooks/Conversations/useGenerateConvo.ts @@ -1,7 +1,12 @@ import { useRecoilValue } from 'recoil'; import { useCallback, useRef, useEffect } from 'react'; import { useGetModelsQuery } from 'librechat-data-provider/react-query'; -import { getEndpointField, LocalStorageKeys, isAssistantsEndpoint } from 'librechat-data-provider'; +import { + getEndpointField, + LocalStorageKeys, + isAssistantsEndpoint, + getDefaultParamsEndpoint, +} from 'librechat-data-provider'; import type { TEndpointsConfig, EModelEndpoint, @@ -117,11 +122,13 @@ const useGenerateConvo = ({ } const models = modelsConfig?.[defaultEndpoint ?? ''] ?? []; + const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint); conversation = buildDefaultConvo({ conversation, lastConversationSetup: preset as TConversation, endpoint: defaultEndpoint ?? ('' as EModelEndpoint), models, + defaultParamsEndpoint, }); if (preset?.title != null && preset.title !== '') { diff --git a/client/src/hooks/Conversations/useNavigateToConvo.tsx b/client/src/hooks/Conversations/useNavigateToConvo.tsx index 114b70c6ef..b9d188eaf0 100644 --- a/client/src/hooks/Conversations/useNavigateToConvo.tsx +++ b/client/src/hooks/Conversations/useNavigateToConvo.tsx @@ -2,7 +2,13 @@ import { useCallback } from 'react'; import { useSetRecoilState } from 'recoil'; import { useNavigate } from 'react-router-dom'; import { useQueryClient } from '@tanstack/react-query'; -import { QueryKeys, Constants, dataService, getEndpointField } from 'librechat-data-provider'; +import { + QueryKeys, + Constants, + dataService, + getEndpointField, + getDefaultParamsEndpoint, +} from 'librechat-data-provider'; import type { TEndpointsConfig, TStartupConfig, @@ -106,11 +112,13 @@ const useNavigateToConvo = (index = 0) => { const models = modelsConfig?.[defaultEndpoint ?? ''] ?? []; + const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint); convo = buildDefaultConvo({ models, conversation, endpoint: defaultEndpoint, lastConversationSetup: conversation, + defaultParamsEndpoint, }); } clearAllConversations(true); diff --git a/client/src/hooks/MCP/__tests__/useMCPSelect.test.tsx b/client/src/hooks/MCP/__tests__/useMCPSelect.test.tsx index 26595b611c..783f525b9c 100644 --- a/client/src/hooks/MCP/__tests__/useMCPSelect.test.tsx +++ b/client/src/hooks/MCP/__tests__/useMCPSelect.test.tsx @@ -415,7 +415,7 @@ describe('useMCPSelect', () => { }); }); - it('should handle empty ephemeralAgent.mcp array correctly', async () => { + it('should clear mcpValues when ephemeralAgent.mcp is set to empty array', async () => { // Create a shared wrapper const { Wrapper, servers } = createWrapper(['initial-value']); @@ -437,19 +437,21 @@ describe('useMCPSelect', () => { expect(result.current.mcpHook.mcpValues).toEqual(['initial-value']); }); - // Try to set empty array externally + // Set empty array externally (e.g., spec with no MCP servers) act(() => { result.current.setEphemeralAgent({ mcp: [], }); }); - // Values should remain unchanged since empty mcp array doesn't trigger update - // (due to the condition: ephemeralAgent?.mcp && ephemeralAgent.mcp.length > 0) - expect(result.current.mcpHook.mcpValues).toEqual(['initial-value']); + // Jotai atom should be cleared — an explicit empty mcp array means + // the spec (or reset) has no MCP servers, so the visual selection must clear + await waitFor(() => { + expect(result.current.mcpHook.mcpValues).toEqual([]); + }); }); - it('should handle ephemeralAgent with clear mcp value', async () => { + it('should handle ephemeralAgent being reset to null', async () => { // Create a shared wrapper const { Wrapper, servers } = createWrapper(['server1', 'server2']); @@ -471,16 +473,15 @@ describe('useMCPSelect', () => { expect(result.current.mcpHook.mcpValues).toEqual(['server1', 'server2']); }); - // Set ephemeralAgent with clear value + // Reset ephemeralAgent to null (simulating non-spec reset) act(() => { - result.current.setEphemeralAgent({ - mcp: [Constants.mcp_clear as string], - }); + result.current.setEphemeralAgent(null); }); - // mcpValues should be cleared + // mcpValues should remain unchanged since null ephemeral agent + // doesn't trigger the sync effect (mcps.length === 0) await waitFor(() => { - expect(result.current.mcpHook.mcpValues).toEqual([]); + expect(result.current.mcpHook.mcpValues).toEqual(['server1', 'server2']); }); }); @@ -590,6 +591,233 @@ describe('useMCPSelect', () => { }); }); + describe('Environment-Keyed Storage (storageContextKey)', () => { + it('should use storageContextKey as atom key for new conversations', async () => { + const { Wrapper, servers } = createWrapper(['server1', 'server2']); + const storageContextKey = '__defaults__'; + + // Hook A: new conversation with storageContextKey + const { result: resultA } = renderHook( + () => useMCPSelect({ conversationId: null, storageContextKey, servers }), + { wrapper: Wrapper }, + ); + + act(() => { + resultA.current.setMCPValues(['server1']); + }); + + await waitFor(() => { + expect(resultA.current.mcpValues).toEqual(['server1']); + }); + + // Hook B: new conversation WITHOUT storageContextKey (different environment) + const { result: resultB } = renderHook( + () => useMCPSelect({ conversationId: null, servers }), + { wrapper: Wrapper }, + ); + + // Should NOT see server1 since it's a different atom (NEW_CONVO vs __defaults__) + expect(resultB.current.mcpValues).toEqual([]); + }); + + it('should use conversationId as atom key for existing conversations even with storageContextKey', async () => { + const conversationId = 'existing-convo-123'; + const { Wrapper, servers } = createWrapper(['server1', 'server2']); + const storageContextKey = '__defaults__'; + + const { result } = renderHook( + () => useMCPSelect({ conversationId, storageContextKey, servers }), + { wrapper: Wrapper }, + ); + + act(() => { + result.current.setMCPValues(['server1', 'server2']); + }); + + await waitFor(() => { + expect(result.current.mcpValues).toEqual(['server1', 'server2']); + }); + + // Verify timestamp was written to the conversation key, not the environment key + const convoKey = `${LocalStorageKeys.LAST_MCP_}${conversationId}`; + expect(setTimestamp).toHaveBeenCalledWith(convoKey); + }); + + it('should dual-write to environment key when storageContextKey is provided', async () => { + const { Wrapper, servers } = createWrapper(['server1', 'server2']); + const storageContextKey = '__defaults__'; + + const { result } = renderHook( + () => useMCPSelect({ conversationId: null, storageContextKey, servers }), + { wrapper: Wrapper }, + ); + + act(() => { + result.current.setMCPValues(['server1', 'server2']); + }); + + await waitFor(() => { + // Verify dual-write to environment key + const envKey = `${LocalStorageKeys.LAST_MCP_}${storageContextKey}`; + expect(localStorage.getItem(envKey)).toEqual(JSON.stringify(['server1', 'server2'])); + expect(setTimestamp).toHaveBeenCalledWith(envKey); + }); + }); + + it('should NOT dual-write when storageContextKey is undefined', async () => { + const conversationId = 'convo-no-specs'; + const { Wrapper, servers } = createWrapper(['server1']); + + const { result } = renderHook(() => useMCPSelect({ conversationId, servers }), { + wrapper: Wrapper, + }); + + act(() => { + result.current.setMCPValues(['server1']); + }); + + await waitFor(() => { + expect(result.current.mcpValues).toEqual(['server1']); + }); + + // Only the conversation-keyed timestamp should be set, no environment key + const envKey = `${LocalStorageKeys.LAST_MCP_}__defaults__`; + expect(localStorage.getItem(envKey)).toBeNull(); + }); + + it('should isolate per-conversation state from environment defaults', async () => { + const { Wrapper, servers } = createWrapper(['server1', 'server2', 'server3']); + const storageContextKey = '__defaults__'; + + // Set environment defaults via new conversation + const { result: newConvoResult } = renderHook( + () => useMCPSelect({ conversationId: null, storageContextKey, servers }), + { wrapper: Wrapper }, + ); + + act(() => { + newConvoResult.current.setMCPValues(['server1', 'server2']); + }); + + await waitFor(() => { + expect(newConvoResult.current.mcpValues).toEqual(['server1', 'server2']); + }); + + // Existing conversation should have its own isolated state + const { result: existingResult } = renderHook( + () => useMCPSelect({ conversationId: 'existing-convo', storageContextKey, servers }), + { wrapper: Wrapper }, + ); + + // Should start empty (its own atom), not inherit from defaults + expect(existingResult.current.mcpValues).toEqual([]); + + // Set different value for existing conversation + act(() => { + existingResult.current.setMCPValues(['server3']); + }); + + await waitFor(() => { + expect(existingResult.current.mcpValues).toEqual(['server3']); + }); + + // New conversation defaults should be unchanged + expect(newConvoResult.current.mcpValues).toEqual(['server1', 'server2']); + }); + }); + + describe('Spec/Non-Spec Context Switching', () => { + it('should clear MCP when ephemeral agent switches to empty mcp (spec with no MCP)', async () => { + const { Wrapper, servers } = createWrapper(['server1', 'server2']); + const storageContextKey = '__defaults__'; + + const TestComponent = ({ ctxKey }: { ctxKey?: string }) => { + const mcpHook = useMCPSelect({ conversationId: null, storageContextKey: ctxKey, servers }); + const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(Constants.NEW_CONVO)); + return { mcpHook, setEphemeralAgent }; + }; + + // Start in non-spec context with some servers selected + const { result } = renderHook(() => TestComponent({ ctxKey: storageContextKey }), { + wrapper: Wrapper, + }); + + act(() => { + result.current.mcpHook.setMCPValues(['server1', 'server2']); + }); + + await waitFor(() => { + expect(result.current.mcpHook.mcpValues).toEqual(['server1', 'server2']); + }); + + // Simulate switching to a spec with no MCP — ephemeral agent gets mcp: [] + act(() => { + result.current.setEphemeralAgent({ mcp: [] }); + }); + + // MCP values should clear since the spec explicitly has no MCP servers + await waitFor(() => { + expect(result.current.mcpHook.mcpValues).toEqual([]); + }); + }); + + it('should handle ephemeral agent with spec MCP servers syncing to Jotai atom', async () => { + const { Wrapper, servers } = createWrapper(['spec-server1', 'spec-server2']); + + const TestComponent = () => { + const mcpHook = useMCPSelect({ conversationId: null, servers }); + const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(Constants.NEW_CONVO)); + return { mcpHook, setEphemeralAgent }; + }; + + const { result } = renderHook(() => TestComponent(), { wrapper: Wrapper }); + + // Simulate spec application setting ephemeral agent MCP + act(() => { + result.current.setEphemeralAgent({ + mcp: ['spec-server1', 'spec-server2'], + execute_code: true, + }); + }); + + await waitFor(() => { + expect(result.current.mcpHook.mcpValues).toEqual(['spec-server1', 'spec-server2']); + }); + }); + + it('should handle null ephemeral agent reset (non-spec with specs configured)', async () => { + const { Wrapper, servers } = createWrapper(['server1', 'server2']); + + const TestComponent = () => { + const mcpHook = useMCPSelect({ servers }); + const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(Constants.NEW_CONVO)); + return { mcpHook, setEphemeralAgent }; + }; + + const { result } = renderHook(() => TestComponent(), { wrapper: Wrapper }); + + // Set values from a spec + act(() => { + result.current.setEphemeralAgent({ mcp: ['server1', 'server2'] }); + }); + + await waitFor(() => { + expect(result.current.mcpHook.mcpValues).toEqual(['server1', 'server2']); + }); + + // Reset ephemeral agent to null (switching to non-spec) + act(() => { + result.current.setEphemeralAgent(null); + }); + + // mcpValues should remain unchanged — null ephemeral agent doesn't trigger sync + // (BadgeRowContext will fill from localStorage defaults separately) + await waitFor(() => { + expect(result.current.mcpHook.mcpValues).toEqual(['server1', 'server2']); + }); + }); + }); + describe('Memory Leak Prevention', () => { it('should not leak memory on repeated updates', async () => { const values = Array.from({ length: 100 }, (_, i) => `value-${i}`); diff --git a/client/src/hooks/MCP/useMCPSelect.ts b/client/src/hooks/MCP/useMCPSelect.ts index ec9dfe0bbb..b15786f678 100644 --- a/client/src/hooks/MCP/useMCPSelect.ts +++ b/client/src/hooks/MCP/useMCPSelect.ts @@ -9,9 +9,11 @@ import { MCPServerDefinition } from './useMCPServerManager'; export function useMCPSelect({ conversationId, + storageContextKey, servers, }: { conversationId?: string | null; + storageContextKey?: string; servers: MCPServerDefinition[]; }) { const key = conversationId ?? Constants.NEW_CONVO; @@ -19,47 +21,61 @@ export function useMCPSelect({ return new Set(servers?.map((s) => s.serverName)); }, [servers]); + /** + * For new conversations, key the MCP atom by environment (spec or defaults) + * so switching between spec ↔ non-spec gives each its own atom. + * For existing conversations, key by conversation ID for per-conversation isolation. + */ + const isNewConvo = key === Constants.NEW_CONVO; + const mcpAtomKey = isNewConvo && storageContextKey ? storageContextKey : key; + const [isPinned, setIsPinned] = useAtom(mcpPinnedAtom); - const [mcpValues, setMCPValuesRaw] = useAtom(mcpValuesAtomFamily(key)); + const [mcpValues, setMCPValuesRaw] = useAtom(mcpValuesAtomFamily(mcpAtomKey)); const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key)); - // Sync Jotai state with ephemeral agent state + // Sync ephemeral agent MCP → Jotai atom (strip unconfigured servers) useEffect(() => { - const mcps = ephemeralAgent?.mcp ?? []; - if (mcps.length === 1 && mcps[0] === Constants.mcp_clear) { - setMCPValuesRaw([]); - } else if (mcps.length > 0) { - // Strip out servers that are not available in the startup config + const mcps = ephemeralAgent?.mcp; + if (Array.isArray(mcps) && mcps.length > 0 && configuredServers.size > 0) { const activeMcps = mcps.filter((mcp) => configuredServers.has(mcp)); - setMCPValuesRaw(activeMcps); - } - }, [ephemeralAgent?.mcp, setMCPValuesRaw, configuredServers]); - - useEffect(() => { - setEphemeralAgent((prev) => { - if (!isEqual(prev?.mcp, mcpValues)) { - return { ...(prev ?? {}), mcp: mcpValues }; + if (!isEqual(activeMcps, mcpValues)) { + setMCPValuesRaw(activeMcps); } - return prev; - }); - }, [mcpValues, setEphemeralAgent]); + } else if (Array.isArray(mcps) && mcps.length === 0 && mcpValues.length > 0) { + // Ephemeral agent explicitly has empty MCP (e.g., spec with no MCP servers) — clear atom + setMCPValuesRaw([]); + } + }, [ephemeralAgent?.mcp, setMCPValuesRaw, configuredServers, mcpValues]); + // Write timestamp when MCP values change useEffect(() => { - const mcpStorageKey = `${LocalStorageKeys.LAST_MCP_}${key}`; + const mcpStorageKey = `${LocalStorageKeys.LAST_MCP_}${mcpAtomKey}`; if (mcpValues.length > 0) { setTimestamp(mcpStorageKey); } - }, [mcpValues, key]); + }, [mcpValues, mcpAtomKey]); - /** Stable memoized setter */ + /** Stable memoized setter with dual-write to environment key */ const setMCPValues = useCallback( (value: string[]) => { if (!Array.isArray(value)) { return; } setMCPValuesRaw(value); + setEphemeralAgent((prev) => { + if (!isEqual(prev?.mcp, value)) { + return { ...(prev ?? {}), mcp: value }; + } + return prev; + }); + // Dual-write to environment key for new conversation defaults + if (storageContextKey) { + const envKey = `${LocalStorageKeys.LAST_MCP_}${storageContextKey}`; + localStorage.setItem(envKey, JSON.stringify(value)); + setTimestamp(envKey); + } }, - [setMCPValuesRaw], + [setMCPValuesRaw, setEphemeralAgent, storageContextKey], ); return { diff --git a/client/src/hooks/MCP/useMCPServerManager.ts b/client/src/hooks/MCP/useMCPServerManager.ts index bb5214be7c..af65ba4507 100644 --- a/client/src/hooks/MCP/useMCPServerManager.ts +++ b/client/src/hooks/MCP/useMCPServerManager.ts @@ -28,7 +28,10 @@ export interface MCPServerDefinition { // The init states (isInitializing, isCancellable, etc.) are stored in the global Jotai atom type PollIntervals = Record; -export function useMCPServerManager({ conversationId }: { conversationId?: string | null } = {}) { +export function useMCPServerManager({ + conversationId, + storageContextKey, +}: { conversationId?: string | null; storageContextKey?: string } = {}) { const localize = useLocalize(); const queryClient = useQueryClient(); const { showToast } = useToastContext(); @@ -73,6 +76,7 @@ export function useMCPServerManager({ conversationId }: { conversationId?: strin const { mcpValues, setMCPValues, isPinned, setIsPinned } = useMCPSelect({ conversationId, + storageContextKey, servers: selectableServers, }); const mcpValuesRef = useRef(mcpValues); @@ -429,33 +433,6 @@ export function useMCPServerManager({ conversationId }: { conversationId?: strin [startupConfig?.interface?.mcpServers?.placeholder, localize], ); - const batchToggleServers = useCallback( - (serverNames: string[]) => { - const connectedServers: string[] = []; - const disconnectedServers: string[] = []; - - serverNames.forEach((serverName) => { - if (isInitializing(serverName)) { - return; - } - - const serverStatus = connectionStatus?.[serverName]; - if (serverStatus?.connectionState === 'connected') { - connectedServers.push(serverName); - } else { - disconnectedServers.push(serverName); - } - }); - - setMCPValues(connectedServers); - - disconnectedServers.forEach((serverName) => { - initializeServer(serverName); - }); - }, - [connectionStatus, setMCPValues, initializeServer, isInitializing], - ); - const toggleServerSelection = useCallback( (serverName: string) => { if (isInitializing(serverName)) { @@ -469,15 +446,10 @@ export function useMCPServerManager({ conversationId }: { conversationId?: strin const filteredValues = currentValues.filter((name) => name !== serverName); setMCPValues(filteredValues); } else { - const serverStatus = connectionStatus?.[serverName]; - if (serverStatus?.connectionState === 'connected') { - setMCPValues([...currentValues, serverName]); - } else { - initializeServer(serverName); - } + setMCPValues([...currentValues, serverName]); } }, - [mcpValues, setMCPValues, connectionStatus, initializeServer, isInitializing], + [mcpValues, setMCPValues, isInitializing], ); const handleConfigSave = useCallback( @@ -673,7 +645,6 @@ export function useMCPServerManager({ conversationId }: { conversationId?: strin isPinned, setIsPinned, placeholderText, - batchToggleServers, toggleServerSelection, localize, diff --git a/client/src/hooks/Plugins/__tests__/useToolToggle.test.tsx b/client/src/hooks/Plugins/__tests__/useToolToggle.test.tsx new file mode 100644 index 0000000000..f617db2249 --- /dev/null +++ b/client/src/hooks/Plugins/__tests__/useToolToggle.test.tsx @@ -0,0 +1,328 @@ +import React from 'react'; +import { renderHook, act, waitFor } from '@testing-library/react'; +import { LocalStorageKeys, Tools } from 'librechat-data-provider'; +import { RecoilRoot, useRecoilValue, useSetRecoilState } from 'recoil'; +import { ephemeralAgentByConvoId } from '~/store'; +import { useToolToggle } from '../useToolToggle'; + +/** + * Tests for useToolToggle — the hook responsible for toggling tool badges + * (code execution, web search, file search, artifacts) and persisting state. + * + * Desired behaviors: + * - User toggles persist to per-conversation localStorage + * - In non-spec mode with specs configured (storageContextKey = '__defaults__'), + * toggles ALSO persist to the defaults key so future new conversations inherit them + * - In spec mode (storageContextKey = undefined), toggles only persist per-conversation + * - The hook reflects the current ephemeral agent state + */ + +// Mock data-provider auth query +jest.mock('~/data-provider', () => ({ + useVerifyAgentToolAuth: jest.fn().mockReturnValue({ + data: { authenticated: true }, + }), +})); + +// Mock timestamps (track calls without actual localStorage timestamp logic) +jest.mock('~/utils/timestamps', () => ({ + setTimestamp: jest.fn(), +})); + +// Mock useLocalStorageAlt (isPinned state — not relevant to our behavior tests) +jest.mock('~/hooks/useLocalStorageAlt', () => jest.fn(() => [false, jest.fn()])); + +const Wrapper: React.FC<{ children: React.ReactNode }> = ({ children }) => ( + {children} +); + +describe('useToolToggle', () => { + beforeEach(() => { + jest.clearAllMocks(); + localStorage.clear(); + }); + + // ─── Dual-Write Behavior ─────────────────────────────────────────── + + describe('non-spec mode: dual-write to defaults key', () => { + const storageContextKey = '__defaults__'; + + it('should write to both conversation key and defaults key when user toggles a tool', () => { + const conversationId = 'convo-123'; + const { result } = renderHook( + () => + useToolToggle({ + conversationId, + storageContextKey, + toolKey: Tools.execute_code, + localStorageKey: LocalStorageKeys.LAST_CODE_TOGGLE_, + isAuthenticated: true, + }), + { wrapper: Wrapper }, + ); + + act(() => { + result.current.handleChange({ value: true }); + }); + + // Conversation key: per-conversation persistence + const convoKey = `${LocalStorageKeys.LAST_CODE_TOGGLE_}${conversationId}`; + // Defaults key: persists for future new conversations + const defaultsKey = `${LocalStorageKeys.LAST_CODE_TOGGLE_}${storageContextKey}`; + + // Sync effect writes to conversation key + expect(localStorage.getItem(convoKey)).toBe(JSON.stringify(true)); + // handleChange dual-writes to defaults key + expect(localStorage.getItem(defaultsKey)).toBe(JSON.stringify(true)); + }); + + it('should persist false values to defaults key when user disables a tool', () => { + const { result } = renderHook( + () => + useToolToggle({ + conversationId: 'convo-456', + storageContextKey, + toolKey: Tools.web_search, + localStorageKey: LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_, + isAuthenticated: true, + }), + { wrapper: Wrapper }, + ); + + // Enable then disable + act(() => { + result.current.handleChange({ value: true }); + }); + act(() => { + result.current.handleChange({ value: false }); + }); + + const defaultsKey = `${LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_}${storageContextKey}`; + expect(localStorage.getItem(defaultsKey)).toBe(JSON.stringify(false)); + }); + }); + + describe('spec mode: no dual-write', () => { + it('should only write to conversation key, not to any defaults key', () => { + const conversationId = 'spec-convo-789'; + const { result } = renderHook( + () => + useToolToggle({ + conversationId, + storageContextKey: undefined, // spec mode + toolKey: Tools.execute_code, + localStorageKey: LocalStorageKeys.LAST_CODE_TOGGLE_, + isAuthenticated: true, + }), + { wrapper: Wrapper }, + ); + + act(() => { + result.current.handleChange({ value: true }); + }); + + // Conversation key should have the value + const convoKey = `${LocalStorageKeys.LAST_CODE_TOGGLE_}${conversationId}`; + expect(localStorage.getItem(convoKey)).toBe(JSON.stringify(true)); + + // Defaults key should NOT have a value + const defaultsKey = `${LocalStorageKeys.LAST_CODE_TOGGLE_}__defaults__`; + expect(localStorage.getItem(defaultsKey)).toBeNull(); + }); + }); + + // ─── Per-Conversation Isolation ──────────────────────────────────── + + describe('per-conversation isolation', () => { + it('should maintain separate toggle state per conversation', () => { + const TestComponent = ({ conversationId }: { conversationId: string }) => { + const toggle = useToolToggle({ + conversationId, + toolKey: Tools.execute_code, + localStorageKey: LocalStorageKeys.LAST_CODE_TOGGLE_, + isAuthenticated: true, + }); + const ephemeralAgent = useRecoilValue(ephemeralAgentByConvoId(conversationId)); + return { toggle, ephemeralAgent }; + }; + + // Conversation A: enable code + const { result: resultA } = renderHook(() => TestComponent({ conversationId: 'convo-A' }), { + wrapper: Wrapper, + }); + + act(() => { + resultA.current.toggle.handleChange({ value: true }); + }); + + // Conversation B: disable code + const { result: resultB } = renderHook(() => TestComponent({ conversationId: 'convo-B' }), { + wrapper: Wrapper, + }); + + act(() => { + resultB.current.toggle.handleChange({ value: false }); + }); + + // Each conversation has its own value in localStorage + expect(localStorage.getItem(`${LocalStorageKeys.LAST_CODE_TOGGLE_}convo-A`)).toBe('true'); + expect(localStorage.getItem(`${LocalStorageKeys.LAST_CODE_TOGGLE_}convo-B`)).toBe('false'); + }); + }); + + // ─── Ephemeral Agent Sync ────────────────────────────────────────── + + describe('ephemeral agent reflects toggle state', () => { + it('should update ephemeral agent when user toggles a tool', async () => { + const conversationId = 'convo-sync-test'; + const TestComponent = () => { + const toggle = useToolToggle({ + conversationId, + toolKey: Tools.execute_code, + localStorageKey: LocalStorageKeys.LAST_CODE_TOGGLE_, + isAuthenticated: true, + }); + const ephemeralAgent = useRecoilValue(ephemeralAgentByConvoId(conversationId)); + return { toggle, ephemeralAgent }; + }; + + const { result } = renderHook(() => TestComponent(), { wrapper: Wrapper }); + + act(() => { + result.current.toggle.handleChange({ value: true }); + }); + + await waitFor(() => { + expect(result.current.ephemeralAgent?.execute_code).toBe(true); + }); + + act(() => { + result.current.toggle.handleChange({ value: false }); + }); + + await waitFor(() => { + expect(result.current.ephemeralAgent?.execute_code).toBe(false); + }); + }); + + it('should reflect external ephemeral agent changes in toolValue', async () => { + const conversationId = 'convo-external'; + const TestComponent = () => { + const toggle = useToolToggle({ + conversationId, + toolKey: Tools.web_search, + localStorageKey: LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_, + isAuthenticated: true, + }); + const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(conversationId)); + return { toggle, setEphemeralAgent }; + }; + + const { result } = renderHook(() => TestComponent(), { wrapper: Wrapper }); + + // External update (e.g., from applyModelSpecEphemeralAgent) + act(() => { + result.current.setEphemeralAgent({ web_search: true, execute_code: false }); + }); + + await waitFor(() => { + expect(result.current.toggle.toolValue).toBe(true); + expect(result.current.toggle.isToolEnabled).toBe(true); + }); + }); + + it('should sync externally-set ephemeral agent values to localStorage', async () => { + const conversationId = 'convo-sync-ls'; + const TestComponent = () => { + const toggle = useToolToggle({ + conversationId, + toolKey: Tools.file_search, + localStorageKey: LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_, + isAuthenticated: true, + }); + const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(conversationId)); + return { toggle, setEphemeralAgent }; + }; + + const { result } = renderHook(() => TestComponent(), { wrapper: Wrapper }); + + // Simulate applyModelSpecEphemeralAgent setting a value + act(() => { + result.current.setEphemeralAgent({ file_search: true }); + }); + + // The sync effect should write to conversation-keyed localStorage + await waitFor(() => { + const storageKey = `${LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_}${conversationId}`; + expect(localStorage.getItem(storageKey)).toBe(JSON.stringify(true)); + }); + }); + }); + + // ─── isToolEnabled computation ───────────────────────────────────── + + describe('isToolEnabled computation', () => { + it('should return false when tool is not set', () => { + const { result } = renderHook( + () => + useToolToggle({ + conversationId: 'convo-1', + toolKey: Tools.execute_code, + localStorageKey: LocalStorageKeys.LAST_CODE_TOGGLE_, + isAuthenticated: true, + }), + { wrapper: Wrapper }, + ); + + expect(result.current.isToolEnabled).toBe(false); + }); + + it('should treat non-empty string as enabled (artifacts)', async () => { + const conversationId = 'convo-artifacts'; + const TestComponent = () => { + const toggle = useToolToggle({ + conversationId, + toolKey: 'artifacts', + localStorageKey: LocalStorageKeys.LAST_ARTIFACTS_TOGGLE_, + isAuthenticated: true, + }); + const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(conversationId)); + return { toggle, setEphemeralAgent }; + }; + + const { result } = renderHook(() => TestComponent(), { wrapper: Wrapper }); + + act(() => { + result.current.setEphemeralAgent({ artifacts: 'default' }); + }); + + await waitFor(() => { + expect(result.current.toggle.isToolEnabled).toBe(true); + }); + }); + + it('should treat empty string as disabled (artifacts off)', async () => { + const conversationId = 'convo-no-artifacts'; + const TestComponent = () => { + const toggle = useToolToggle({ + conversationId, + toolKey: 'artifacts', + localStorageKey: LocalStorageKeys.LAST_ARTIFACTS_TOGGLE_, + isAuthenticated: true, + }); + const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(conversationId)); + return { toggle, setEphemeralAgent }; + }; + + const { result } = renderHook(() => TestComponent(), { wrapper: Wrapper }); + + act(() => { + result.current.setEphemeralAgent({ artifacts: '' }); + }); + + await waitFor(() => { + expect(result.current.toggle.isToolEnabled).toBe(false); + }); + }); + }); +}); diff --git a/client/src/hooks/Plugins/useToolToggle.ts b/client/src/hooks/Plugins/useToolToggle.ts index 3b12e87d51..d8026cad1c 100644 --- a/client/src/hooks/Plugins/useToolToggle.ts +++ b/client/src/hooks/Plugins/useToolToggle.ts @@ -13,6 +13,7 @@ type ToolValue = boolean | string; interface UseToolToggleOptions { conversationId?: string | null; + storageContextKey?: string; toolKey: string; localStorageKey: LocalStorageKeys; isAuthenticated?: boolean; @@ -26,6 +27,7 @@ interface UseToolToggleOptions { export function useToolToggle({ conversationId, + storageContextKey, toolKey: _toolKey, localStorageKey, isAuthenticated: externalIsAuthenticated, @@ -93,8 +95,22 @@ export function useToolToggle({ ...(prev || {}), [toolKey]: value, })); + + // Dual-write to environment key for new conversation defaults + if (storageContextKey) { + const envKey = `${localStorageKey}${storageContextKey}`; + localStorage.setItem(envKey, JSON.stringify(value)); + setTimestamp(envKey); + } }, - [setIsDialogOpen, isAuthenticated, setEphemeralAgent, toolKey], + [ + setIsDialogOpen, + isAuthenticated, + setEphemeralAgent, + toolKey, + storageContextKey, + localStorageKey, + ], ); const debouncedChange = useMemo( diff --git a/client/src/hooks/useNewConvo.ts b/client/src/hooks/useNewConvo.ts index c468ab30a2..7fa499f40d 100644 --- a/client/src/hooks/useNewConvo.ts +++ b/client/src/hooks/useNewConvo.ts @@ -14,6 +14,7 @@ import { LocalStorageKeys, isEphemeralAgentId, isAssistantsEndpoint, + getDefaultParamsEndpoint, } from 'librechat-data-provider'; import type { TPreset, @@ -191,11 +192,13 @@ const useNewConvo = (index = 0) => { } const models = modelsConfig?.[defaultEndpoint] ?? []; + const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint); conversation = buildDefaultConvo({ conversation, lastConversationSetup: activePreset as TConversation, endpoint: defaultEndpoint, models, + defaultParamsEndpoint, }); } diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index e961e6cd3c..a9f8805d9b 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -859,11 +859,13 @@ "com_ui_create_api_key": "Create API Key", "com_ui_create_assistant": "Create Assistant", "com_ui_create_link": "Create link", + "com_ui_create_mcp_server": "Create MCP server", "com_ui_create_memory": "Create Memory", "com_ui_create_new_agent": "Create New Agent", "com_ui_create_prompt": "Create Prompt", "com_ui_create_prompt_page": "New Prompt Configuration Page", "com_ui_created": "Created", + "com_ui_creating": "Creating...", "com_ui_creating_image": "Creating image. May take a moment", "com_ui_current": "Current", "com_ui_currently_production": "Currently in production", @@ -904,6 +906,8 @@ "com_ui_delete_confirm_strong": "This will delete {{title}}", "com_ui_delete_conversation": "Delete chat?", "com_ui_delete_conversation_tooltip": "Delete conversation", + "com_ui_delete_mcp_server": "Delete MCP Server?", + "com_ui_delete_mcp_server_name": "Delete MCP server {{0}}", "com_ui_delete_memory": "Delete Memory", "com_ui_delete_not_allowed": "Delete operation is not allowed", "com_ui_delete_preset": "Delete Preset?", @@ -916,6 +920,7 @@ "com_ui_delete_tool_confirm": "Are you sure you want to delete this tool?", "com_ui_delete_tool_save_reminder": "Tool removed. Save the agent to apply changes.", "com_ui_deleted": "Deleted", + "com_ui_deleting": "Deleting...", "com_ui_deleting_file": "Deleting file...", "com_ui_descending": "Desc", "com_ui_description": "Description", @@ -1438,6 +1443,8 @@ "com_ui_unset": "Unset", "com_ui_untitled": "Untitled", "com_ui_update": "Update", + "com_ui_update_mcp_server": "Update MCP server", + "com_ui_updating": "Updating...", "com_ui_upload": "Upload", "com_ui_upload_agent_avatar": "Successfully updated agent avatar", "com_ui_upload_agent_avatar_label": "Upload agent avatar image", diff --git a/client/src/locales/lv/translation.json b/client/src/locales/lv/translation.json index d13b2c83b9..5048c33dcc 100644 --- a/client/src/locales/lv/translation.json +++ b/client/src/locales/lv/translation.json @@ -224,6 +224,7 @@ "com_endpoint_agent": "Aģents", "com_endpoint_agent_placeholder": "Lūdzu, izvēlieties aģentu", "com_endpoint_ai": "Mākslīgais intelekts", + "com_endpoint_anthropic_effort": "Kontrolē, cik lielu skaitļošanas piepūli piemēro Claude. Mazāka piepūle ietaupa tokenus un samazina ātrumu; lielāka piepūle nodrošina rūpīgākas atbildes. 'Max' ļauj veikt visdziļāko argumentāciju (tikai Opus 4.6).", "com_endpoint_anthropic_maxoutputtokens": "Maksimālais atbildē ģenerējamo tokenu skaits. Norādiet zemāku vērtību īsākām atbildēm un augstāku vērtību garākām atbildēm. Piezīme: modeļi var apstāties pirms šī maksimālā skaita sasniegšanas.", "com_endpoint_anthropic_prompt_cache": "Uzvednes kešatmiņa ļauj atkārtoti izmantot lielu kontekstu vai instrukcijas API izsaukumos, samazinot izmaksas un ābildes ātrumu.", "com_endpoint_anthropic_temp": "Diapazons no 0 līdz 1. Analītiskiem/atbilžu variantiem izmantot temp vērtību tuvāk 0, bet radošiem un ģeneratīviem uzdevumiem — tuvāk 1. Iesakām mainīt šo vai Top P, bet ne abus.", @@ -265,6 +266,7 @@ "com_endpoint_default_with_num": "noklusējums: {{0}}", "com_endpoint_disable_streaming": "Izslēgt atbilžu straumēšanu un saņemt visu atbildi uzreiz. Noderīgi tādiem modeļiem kā o3, kas pieprasa organizācijas pārbaudi straumēšanai.", "com_endpoint_disable_streaming_label": "Atspējot straumēšanu", + "com_endpoint_effort": "Piepūle", "com_endpoint_examples": "Iestatījumi", "com_endpoint_export": "Eksportēt", "com_endpoint_export_share": "Eksportēt/kopīgot", @@ -857,11 +859,13 @@ "com_ui_create_api_key": "Izveidot API atslēgu", "com_ui_create_assistant": "Izveidot palīgu", "com_ui_create_link": "Izveidot saiti", + "com_ui_create_mcp_server": "Izveidot MCP serveri", "com_ui_create_memory": "Izveidot atmiņu", "com_ui_create_new_agent": "Izveidot jaunu aģentu", "com_ui_create_prompt": "Izveidot uzvedni", "com_ui_create_prompt_page": "Jauna uzvedņu konfigurācijas lapa", "com_ui_created": "Izveidots", + "com_ui_creating": "Notiek izveide...", "com_ui_creating_image": "Attēla izveide. Var aizņemt brīdi.", "com_ui_current": "Pašreizējais", "com_ui_currently_production": "Pašlaik produkcijā", @@ -902,6 +906,8 @@ "com_ui_delete_confirm_strong": "Šis izdzēsīs {{title}}", "com_ui_delete_conversation": "Dzēst sarunu?", "com_ui_delete_conversation_tooltip": "Dzēst sarunu", + "com_ui_delete_mcp_server": "Vai dzēst MCP serveri?", + "com_ui_delete_mcp_server_name": "Dzēst MCP serveri {{0}}", "com_ui_delete_memory": "Dzēst atmiņu", "com_ui_delete_not_allowed": "Dzēšanas darbība nav atļauta", "com_ui_delete_preset": "Vai dzēst iestatījumu?", @@ -914,6 +920,7 @@ "com_ui_delete_tool_confirm": "Vai tiešām vēlaties dzēst šo rīku?", "com_ui_delete_tool_save_reminder": "Rīks noņemts. Saglabājiet aģentu, lai piemērotu izmaiņas.", "com_ui_deleted": "Dzēsts", + "com_ui_deleting": "Dzēš...", "com_ui_deleting_file": "Dzēšu failu...", "com_ui_descending": "Dilstošs", "com_ui_description": "Apraksts", @@ -1084,6 +1091,7 @@ "com_ui_manage": "Pārvaldīt", "com_ui_marketplace": "Katalogs", "com_ui_marketplace_allow_use": "Atļaut izmantot katalogu", + "com_ui_max": "Maksimums", "com_ui_max_favorites_reached": "Sasniegts maksimālais piesprausto elementu skaits ({{0}}). Atvienojiet elementu, lai pievienotu citu.", "com_ui_max_file_size": "PNG, JPG vai JPEG (maks. {{0}})", "com_ui_max_tags": "Maksimālais atļautais skaits ir {{0}}, izmantojot jaunākās vērtības.", @@ -1437,6 +1445,8 @@ "com_ui_unset": "Neuzlikts", "com_ui_untitled": "Bez nosaukuma", "com_ui_update": "Atjauninājums", + "com_ui_update_mcp_server": "Atjaunināt MCP serveri", + "com_ui_updating": "Atjaunina...", "com_ui_upload": "Augšupielādēt", "com_ui_upload_agent_avatar": "Aģenta avatars veiksmīgi atjaunināts", "com_ui_upload_agent_avatar_label": "Augšupielādēt aģenta avatāra attēlu", diff --git a/client/src/locales/nb/translation.json b/client/src/locales/nb/translation.json index 15d77af35d..f5b1943b39 100644 --- a/client/src/locales/nb/translation.json +++ b/client/src/locales/nb/translation.json @@ -3,11 +3,13 @@ "chat_direction_right_to_left": "Høyre til venstre", "com_a11y_ai_composing": "KI-en skriver fortsatt.", "com_a11y_end": "KI-en har fullført svaret sitt.", + "com_a11y_selected": "valgt", "com_a11y_start": "KI-en har begynt å svare.", "com_agents_agent_card_label": "{{name}}-agent. {{description}}", "com_agents_all": "Alle agenter", "com_agents_all_category": "Alle", "com_agents_all_description": "Utforsk delte agenter på tvers av alle kategorier", + "com_agents_avatar_upload_error": "Kunne ikke laste opp agentavatar", "com_agents_by_librechat": "av LibreChat", "com_agents_category_aftersales": "Salgsoppfølging", "com_agents_category_aftersales_description": "Agenter for kundeservice, support og oppfølging etter et gjennomført salg.", @@ -26,6 +28,7 @@ "com_agents_category_sales_description": "Agenter som bistår i salgsprosesser og med kundekontakt.", "com_agents_category_tab_label": "Kategorien {{category}}, {{position}} av {{total}}", "com_agents_category_tabs_label": "Agentkategorier", + "com_agents_chat_with": "Chat med {{navn}}", "com_agents_clear_search": "Tøm søket", "com_agents_code_interpreter": "Når aktivert, kan agenten din bruke LibreChat Code Interpreter API for å kjøre generert kode sikkert, inkludert filbehandling. Krever en gyldig API-nøkkel.", "com_agents_code_interpreter_title": "Code Interpreter API", @@ -33,6 +36,7 @@ "com_agents_copy_link": "Kopier lenke", "com_agents_create_error": "Det oppstod en feil under oppretting av agenten.", "com_agents_created_by": "av", + "com_agents_description_card": "Beskrivelse: {{description}}", "com_agents_description_placeholder": "Valgfritt: Beskriv agenten din her.", "com_agents_empty_state_heading": "Ingen agenter funnet", "com_agents_enable_file_search": "Aktiver filsøk", @@ -59,7 +63,9 @@ "com_agents_error_timeout_suggestion": "Sjekk internettforbindelsen din og prøv igjen.", "com_agents_error_timeout_title": "Tidsavbrudd for tilkobling", "com_agents_error_title": "Noe gikk galt", + "com_agents_file_context_description": "Filer lastet opp som \"Kontekst\" er analysert son tekst for å supplementere agenten sine instruksjoner. Dersom OCR er tilgjengelig, eller er konfigurert for den opplastede filtypen, vil prosessen bli brukt til å hente ut tekst. Dette er ideelt for dokumenter, bilder med tekst, eller PDFer som krever det fulle tekstinnholdet i en fil.", "com_agents_file_context_disabled": "Agenten må være opprettet før du kan laste opp filer for filkontekst.", + "com_agents_file_context_label": "Filkontekst", "com_agents_file_search_disabled": "Agenten må være opprettet før du kan laste opp filer for filsøk.", "com_agents_file_search_info": "Når dette er aktivert, vil agenten bruke de eksakte filnavnene listet nedenfor for å hente relevant kontekst fra disse filene.", "com_agents_grid_announcement": "Viser {{count}} agenter i kategorien {{category}}.", @@ -87,7 +93,7 @@ "com_agents_search_empty_heading": "Ingen søkeresultater", "com_agents_search_info": "Når aktivert, kan agenten din søke på nettet for oppdatert informasjon. Krever en gyldig API-nøkkel.", "com_agents_search_instructions": "Skriv for å søke etter agenter etter navn eller beskrivelse.", - "com_agents_search_name": "Søk agenter etter navn", + "com_agents_search_name": "Søk etter agenter ved navn", "com_agents_search_no_results": "Ingen agenter funnet for «{{query}}».", "com_agents_search_placeholder": "Søk agenter ...", "com_agents_see_more": "Se mer", @@ -139,6 +145,7 @@ "com_assistants_update_actions_success": "Handlingen ble opprettet eller oppdatert.", "com_assistants_update_error": "Det oppstod en feil under oppdatering av assistenten.", "com_assistants_update_success": "Oppdatering fullført", + "com_assistants_update_success_name": "Oppdatering av {{name}} vellykket", "com_auth_already_have_account": "Har du allerede en konto?", "com_auth_apple_login": "Logg inn med Apple", "com_auth_back_to_login": "Tilbake til innlogging", @@ -217,10 +224,11 @@ "com_endpoint_agent": "Agent", "com_endpoint_agent_placeholder": "Velg en agent", "com_endpoint_ai": "KI", + "com_endpoint_anthropic_effort": "Kontrollerer hvor mye innsats Claude legger i beregning. Lavere innsats sparer tokens og reduserer treghet, høyere innsats produserer mer gjennom responser. \"Maks\" gir den høyeste graden av resonnering (kun Opus 4.6)", "com_endpoint_anthropic_maxoutputtokens": "Maksimalt antall tokens som kan genereres i svaret. Angi en lavere verdi for kortere svar og en høyere verdi for lengre svar. Merk: Modeller kan stoppe før de når dette maksimumet.", "com_endpoint_anthropic_prompt_cache": "Prompt-mellomlagring gjør det mulig å gjenbruke stor kontekst eller instruksjoner på tvers av API-kall, noe som reduserer kostnader og ventetid.", "com_endpoint_anthropic_temp": "Varierer fra 0 til 1. Bruk en temperatur nærmere 0 for analytiske oppgaver, og nærmere 1 for kreative og generative oppgaver. Vi anbefaler å endre enten denne eller Topp P, men ikke begge.", - "com_endpoint_anthropic_thinking": "Aktiverer intern resonnering for støttede Claude-modeller (f.eks. 3.7 Sonnet). Merk: Krever at \"Tenkebudsjett\" er satt og er lavere enn \"Maks utdata-tokens\".", + "com_endpoint_anthropic_thinking": "Aktiverer intern resonnering for støttede Claude-modeller. For nyere modeller (Opus 4.6+) brukes adaptiv tenkning kontrollert av Effort-parameteren. For eldre modeller kreves det at \"Thinking Budget\" er satt og lavere enn \"Max Output Tokens\".", "com_endpoint_anthropic_thinking_budget": "Bestemmer det maksimale antallet tokens Claude kan bruke for sin interne resonneringsprosess. Et større budsjett kan forbedre svarkvaliteten for komplekse problemer. Denne verdien må være lavere enn \"Maks utdata-tokens\".", "com_endpoint_anthropic_topk": "Top-k endrer hvordan modellen velger tokens for utdata. En top-k på 1 betyr at det valgte tokenet er det mest sannsynlige (grådig dekoding). En top-k på 3 betyr at det neste tokenet velges blant de 3 mest sannsynlige (ved hjelp av temperatur).", "com_endpoint_anthropic_topp": "Top-p endrer hvordan modellen velger tokens for utdata. Tokens velges fra de mest sannsynlige til summen av sannsynlighetene deres er lik top-p-verdien.", @@ -258,6 +266,7 @@ "com_endpoint_default_with_num": "standard: {{0}}", "com_endpoint_disable_streaming": "Deaktiver strømming av svar og motta hele svaret på en gang. Nyttig for modeller som krever organisasjonsverifisering for strømming.", "com_endpoint_disable_streaming_label": "Deaktiver strømming", + "com_endpoint_effort": "Innsats", "com_endpoint_examples": "Forhåndsinnstillinger", "com_endpoint_export": "Eksporter", "com_endpoint_export_share": "Eksporter/Del", @@ -274,7 +283,7 @@ "com_endpoint_instructions_assistants_placeholder": "Overstyrer assistentens instruksjoner. Nyttig for å endre atferden for en enkelt kjøring.", "com_endpoint_max_output_tokens": "Maks utdata-tokens", "com_endpoint_message": "Melding", - "com_endpoint_message_new": "Melding {{0}}", + "com_endpoint_message_new": "Send melding til {{0}}", "com_endpoint_message_not_appendable": "Rediger meldingen din eller regenerer.", "com_endpoint_my_preset": "Min forhåndsinnstilling", "com_endpoint_no_presets": "Ingen forhåndsinnstillinger ennå. Bruk innstillingsknappen for å lage en.", @@ -308,6 +317,7 @@ "com_endpoint_preset_default_removed": "er ikke lenger standard forhåndsinnstilling.", "com_endpoint_preset_delete_confirm": "Er du sikker på at du vil slette denne forhåndsinnstillingen?", "com_endpoint_preset_delete_error": "Det oppstod en feil under sletting av forhåndsinnstillingen. Vennligst prøv igjen.", + "com_endpoint_preset_delete_success": "Sletting av forhåndsinnstilling vellykket", "com_endpoint_preset_import": "Forhåndsinnstilling importert!", "com_endpoint_preset_import_error": "Det oppstod en feil under importering av forhåndsinnstillingen. Vennligst prøv igjen.", "com_endpoint_preset_name": "Navn på forhåndsinnstilling", @@ -348,6 +358,7 @@ "com_error_files_process": "Det oppstod en feil under behandling av filen.", "com_error_files_upload": "Det oppstod en feil under opplasting av filen.", "com_error_files_upload_canceled": "Forespørselen om filopplasting ble avbrutt. Merk: Filopplastingen kan fortsatt behandles og må slettes manuelt.", + "com_error_files_upload_too_large": "Filen er for stor. Vennligst last opp en fil som er mindre enn {{}} MB", "com_error_files_validation": "Det oppstod en feil under validering av filen.", "com_error_google_tool_conflict": "Bruk av innebygde Google-verktøy støttes ikke sammen med eksterne verktøy. Deaktiver enten de innebygde eller de eksterne verktøyene.", "com_error_heic_conversion": "Konvertering av HEIC-bilde til JPEG mislyktes. Prøv å konvertere bildet manuelt eller bruk et annet format.", @@ -360,6 +371,7 @@ "com_error_moderation": "Innholdet du sendte inn ble flagget av vårt moderasjonssystem. Vi kan ikke fortsette med dette emnet. Rediger meldingen din eller start en ny samtale.", "com_error_no_base_url": "Ingen base-URL funnet. Oppgi en og prøv igjen.", "com_error_no_user_key": "Ingen nøkkel funnet. Oppgi en nøkkel og prøv igjen.", + "com_error_refusal": "Responsen ble avslått av sikkerhetsfiltere. Skriv om på meldingen din og prøv igjen. Dersom denne feilmeldingen forekommer ofte imens du bruker Claude Sonnet 4.5 eller Opus 4.1, kan du prøve Sonnet 4, som har andre bruksrestriksjoner.", "com_file_pages": "Sider: {{pages}}", "com_file_source": "Fil", "com_file_unknown": "Ukjent fil", @@ -368,9 +380,12 @@ "com_files_download_progress": "{{0}} av {{1}} filer", "com_files_downloading": "Laster ned filer", "com_files_filter": "Filtrer filer ...", + "com_files_filter_by": "Filtrer filer etter...", "com_files_no_results": "Ingen resultater.", "com_files_number_selected": "{{0}} av {{1}} valgt", "com_files_preparing_download": "Forbereder nedlasting ...", + "com_files_result_found": "{{count}} resultater funnet", + "com_files_results_found": "{{count}} resultater funnet", "com_files_sharepoint_picker_title": "Velg filer", "com_files_table": "Fil-tabell", "com_files_upload_local_machine": "Fra lokal datamaskin", @@ -421,6 +436,7 @@ "com_nav_chat_commands": "Samtalekommandoer", "com_nav_chat_commands_info": "Disse kommandoene aktiveres ved å skrive bestemte tegn i begynnelsen av meldingen din. Hver kommando utløses av sitt angitte prefiks. Du kan deaktivere dem hvis du ofte bruker disse tegnene til å starte meldinger.", "com_nav_chat_direction": "Samtaleretning", + "com_nav_chat_direction_selected": "Chat retning: {{direction}}", "com_nav_clear_all_chats": "Fjern alle samtaler", "com_nav_clear_cache_confirm_message": "Er du sikker på at du vil tømme mellomlageret?", "com_nav_clear_conversation": "Fjern samtaler", @@ -428,9 +444,11 @@ "com_nav_close_sidebar": "Lukk sidefelt", "com_nav_commands": "Kommandoer", "com_nav_confirm_clear": "Bekreft fjerning", + "com_nav_control_panel": "Kontrollpanel", "com_nav_conversation_mode": "Samtalemodus", "com_nav_convo_menu_options": "Samtalemenyvalg", "com_nav_db_sensitivity": "Desibelfølsomhet", + "com_nav_default_temporary_chat": "Midlertidig Chat som standard", "com_nav_delete_account": "Slett konto", "com_nav_delete_account_button": "Slett kontoen min permanent", "com_nav_delete_account_confirm": "Slett konto – er du sikker?", @@ -464,6 +482,7 @@ "com_nav_info_code_artifacts": "Aktiverer visning av eksperimentelle kodeartefakter ved siden av samtalen.", "com_nav_info_code_artifacts_agent": "Aktiverer bruk av kodeartefakter for denne agenten. Som standard legges det til tilleggsinstruksjoner for bruk av artefakter, med mindre \"Egendefinert prompt-modus\" er aktivert.", "com_nav_info_custom_prompt_mode": "Når aktivert, vil standard systemprompt for artefakter ikke bli inkludert. Alle instruksjoner for å generere artefakter må gis manuelt i denne modusen.", + "com_nav_info_default_temporary_chat": "Når dette er påskrudd vil nye chatter starte med \"midlertidig chat\" som standard. Midlertidige chatter blir ikke lagret til historikken din.", "com_nav_info_enter_to_send": "Når aktivert, vil et trykk på `ENTER` sende meldingen din. Når deaktivert, vil et trykk på Enter legge til en ny linje. Du må da trykke `CTRL + ENTER` / `⌘ + ENTER` for å sende.", "com_nav_info_fork_change_default": "`Kun synlige meldinger` inkluderer bare den direkte stien til den valgte meldingen. `Inkluder relaterte grener` legger til grener langs stien. `Inkluder alt til/fra her` inkluderer alle tilknyttede meldinger og grener.", "com_nav_info_fork_split_target_setting": "Når aktivert, vil forgreningen starte fra målmeldingen til den siste meldingen i samtalen, i henhold til den valgte atferden.", @@ -473,6 +492,7 @@ "com_nav_info_save_draft": "Når aktivert, vil teksten og vedleggene du skriver inn bli lagret lokalt som et utkast. Utkastet er tilgjengelig selv om du laster siden på nytt eller bytter samtale. Utkastet slettes når meldingen er sendt.", "com_nav_info_show_thinking": "Når aktivert, vil tenke-nedtrekksmenyene vises som standard, slik at du kan se KI-ens resonnement i sanntid. Når deaktivert, vil de være lukket for et renere grensesnitt.", "com_nav_info_user_name_display": "Når aktivert, vil brukernavnet ditt vises over hver melding du sender. Når deaktivert, vil du bare se \"Du\" over meldingene dine.", + "com_nav_keep_screen_awake": "Hold skjermen på gjennom generering av respons", "com_nav_lang_arabic": "Arabisk (العربية)", "com_nav_lang_armenian": "Armensk (Հայերեն)", "com_nav_lang_auto": "Automatisk gjenkjenning", @@ -491,16 +511,20 @@ "com_nav_lang_german": "Tysk (Deutsch)", "com_nav_lang_hebrew": "Hebraisk (עברית)", "com_nav_lang_hungarian": "Ungarsk (Magyar)", + "com_nav_lang_icelandic": "Islandsk", "com_nav_lang_indonesia": "Indonesisk (Indonesia)", "com_nav_lang_italian": "Italiensk (Italiano)", "com_nav_lang_japanese": "Japansk (日本語)", "com_nav_lang_korean": "Koreansk (한국어)", "com_nav_lang_latvian": "Latvisk (Latviski)", + "com_nav_lang_lithuanian": "Litauisk", "com_nav_lang_norwegian_bokmal": "Norsk bokmål", + "com_nav_lang_norwegian_nynorsk": "Norsk nynorsk", "com_nav_lang_persian": "Persisk (فارسی)", "com_nav_lang_polish": "Polsk (Polski)", "com_nav_lang_portuguese": "Portugisisk (Português)", "com_nav_lang_russian": "Russisk (Русский)", + "com_nav_lang_slovak": "Slovensk", "com_nav_lang_slovenian": "Slovensk", "com_nav_lang_spanish": "Spansk (Español)", "com_nav_lang_swedish": "Svensk (Svenska)", @@ -516,8 +540,18 @@ "com_nav_log_out": "Logg ut", "com_nav_long_audio_warning": "Lengre tekster vil ta lengre tid å behandle.", "com_nav_maximize_chat_space": "Maksimer samtaleplass", + "com_nav_mcp_access_revoked": "Tilbakekalling av MCP servertilgang vellykket.", "com_nav_mcp_configure_server": "Konfigurer {{0}}", + "com_nav_mcp_connect": "Koble til", + "com_nav_mcp_connect_server": "Koble til {{0}}", + "com_nav_mcp_reconnect": "Koble til på nytt", + "com_nav_mcp_status_connected": "Tilkoblet", "com_nav_mcp_status_connecting": "{{0}} - Kobler til", + "com_nav_mcp_status_disconnected": "Frakoblet", + "com_nav_mcp_status_error": "Feil", + "com_nav_mcp_status_initializing": "Starter", + "com_nav_mcp_status_needs_auth": "Trenger Auth", + "com_nav_mcp_status_unknown": "Ukjent", "com_nav_mcp_vars_update_error": "Feil ved oppdatering av egendefinerte MCP-brukervariabler.", "com_nav_mcp_vars_updated": "Egendefinerte MCP-brukervariabler ble oppdatert.", "com_nav_modular_chat": "Aktiver bytte av endepunkter midt i en samtale", @@ -538,6 +572,7 @@ "com_nav_setting_balance": "Saldo", "com_nav_setting_chat": "Samtale", "com_nav_setting_data": "Datakontroll", + "com_nav_setting_delay": "Forsinkelse (s)", "com_nav_setting_general": "Generelt", "com_nav_setting_mcp": "MCP-innstillinger", "com_nav_setting_personalization": "Personalisering", @@ -555,6 +590,7 @@ "com_nav_theme_dark": "Mørkt", "com_nav_theme_light": "Lyst", "com_nav_theme_system": "System", + "com_nav_toggle_sidebar": "Skru sidebar av/på", "com_nav_tool_dialog": "Assistentverktøy", "com_nav_tool_dialog_agents": "Agentverktøy", "com_nav_tool_dialog_description": "Assistenten må lagres for at verktøyvalg skal vedvare.", @@ -605,17 +641,27 @@ "com_ui_action_button": "Handlingsknapp", "com_ui_active": "Aktiv", "com_ui_add": "Legg til", + "com_ui_add_code_interpreter_api_key": "Legg til kodetolk API nøkkel", + "com_ui_add_first_bookmark": "Klikk på en chat for å legge til", + "com_ui_add_first_mcp_server": "Lag din første MCP server for å komme i gang", + "com_ui_add_first_prompt": "Lag din første prompt for å komme i gang", "com_ui_add_mcp": "Legg til MCP", "com_ui_add_mcp_server": "Legg til MCP-server", "com_ui_add_model_preset": "Legg til en modell eller forhåndsinnstilling for et ekstra svar.", "com_ui_add_multi_conversation": "Legg til flersamtale", + "com_ui_add_special_variables": "Legg til spesialvariable", + "com_ui_add_web_search_api_keys": "Legg til nettsøk API-nøkler", "com_ui_adding_details": "Legger til detaljer", + "com_ui_additional_details": "Flere detaljer", "com_ui_admin": "Admin", "com_ui_admin_access_warning": "Deaktivering av admin-tilgang til denne funksjonen kan forårsake uventede UI-problemer. Hvis lagret, kan dette kun tilbakestilles via konfigurasjonsfilen (librechat.yaml).", "com_ui_admin_settings": "Admin-innstillinger", + "com_ui_admin_settings_section": "Admininnstillinger - {{section}}", "com_ui_advanced": "Avansert", "com_ui_advanced_settings": "Avanserte innstillinger", "com_ui_agent": "Agent", + "com_ui_agent_api_keys": "Agent API-nøkler", + "com_ui_agent_api_keys_description": "Lag API-nøkler for å få tilgang til agenter via API", "com_ui_agent_category_aftersales": "Ettersalg", "com_ui_agent_category_finance": "Finans", "com_ui_agent_category_general": "Generelt", @@ -631,6 +677,17 @@ "com_ui_agent_deleted": "Agenten ble slettet.", "com_ui_agent_duplicate_error": "Det oppstod en feil under duplisering av agenten.", "com_ui_agent_duplicated": "Agenten ble duplisert.", + "com_ui_agent_handoff_add": "Legg til overleveringsagent", + "com_ui_agent_handoff_description": "Beskrivelse av overlevering", + "com_ui_agent_handoff_description_placeholder": "f.eks., Overfør til dataanalytiker for statistisk analyse", + "com_ui_agent_handoff_info": "Konfigurer agenter som denne agenten kan overføre samtaler til når spesifikk ekspertise er nødvendig", + "com_ui_agent_handoff_info_2": "Hver overlevering lager et overføringsverktøy som tillater sømløs ruting til spesialistagenter med kontekst.", + "com_ui_agent_handoff_max": "Maksgrensen på {{0}} overleveringsagenter er nådd", + "com_ui_agent_handoff_prompt": "Gjennomføringsinnhold", + "com_ui_agent_handoff_prompt_key": "Innholdsparameter navn (standard: \"instruksjoner\")", + "com_ui_agent_handoff_prompt_key_placeholder": "Merk innholdet som er sendt (standard: \"instruksjoner\")", + "com_ui_agent_handoff_prompt_placeholder": "Fortell denne agenten hvilket innhold den skal generere og videreføre til overleveringsagenten. Du må legge til noe her for å skru på denne funksjonen.", + "com_ui_agent_handoffs": "Agentoverleveringer", "com_ui_agent_name_is_required": "Agentnavn er påkrevd.", "com_ui_agent_recursion_limit": "Maks agentsteg", "com_ui_agent_recursion_limit_info": "Begrenser hvor mange steg agenten kan ta i en kjøring før den gir et endelig svar. Standard er 25 steg. Et steg er enten en API-forespørsel eller bruk av et verktøy.", @@ -652,12 +709,23 @@ "com_ui_agents": "Agenter", "com_ui_agents_allow_create": "Tillat oppretting av agenter", "com_ui_agents_allow_share": "Tillat deling av agenter", + "com_ui_agents_allow_share_public": "Tillat offentlig deling av agenter", "com_ui_agents_allow_use": "Tillat bruk av agenter", "com_ui_all": "alle", "com_ui_all_proper": "Alle", "com_ui_analyzing": "Analyserer", "com_ui_analyzing_finished": "Ferdig med å analysere", "com_ui_api_key": "API-nøkkel", + "com_ui_api_key_copied": "API-nøkler kopiert til utklippstavlen", + "com_ui_api_key_create_error": "Kunne ikke lage API-nøkkel", + "com_ui_api_key_created": "Oppretting av API-nøkkel vellykket", + "com_ui_api_key_delete_error": "Kunne ikke slette API-nøkkel", + "com_ui_api_key_deleted": "Sletting av API-nøkkel vellykket", + "com_ui_api_key_name": "Navn på nøkkel", + "com_ui_api_key_name_placeholder": "Min API-nøkkel", + "com_ui_api_key_name_required": "Navn på API-nøkkel påkrevd", + "com_ui_api_key_warning": "Husk å kopiere API-nøkkelen din nå, du vil ikke kunne se den igjen!", + "com_ui_api_keys_load_error": "Kunne ikke laste inn API-nøkler", "com_ui_archive": "Arkiver", "com_ui_archive_delete_error": "Sletting av arkivert samtale mislyktes.", "com_ui_archive_error": "Arkivering av samtale mislyktes.", @@ -674,6 +742,7 @@ "com_ui_assistants_output": "Assistent-utdata", "com_ui_at_least_one_owner_required": "Minst én eier er påkrevd.", "com_ui_attach_error": "Kan ikke legge ved fil. Opprett eller velg en samtale, eller prøv å laste siden på nytt.", + "com_ui_attach_error_disabled": "FIlopplasting er deaktivert for dette endepunktet", "com_ui_attach_error_openai": "Kan ikke legge ved assistentfiler til andre endepunkter.", "com_ui_attach_error_size": "Filstørrelsesgrensen er overskredet for endepunktet:", "com_ui_attach_error_type": "Filtypen støttes ikke for endepunktet:", @@ -690,6 +759,7 @@ "com_ui_azure": "Azure", "com_ui_azure_ad": "Entra ID", "com_ui_back": "Tilbake", + "com_ui_back_to_builder": "Tilbake til bygger", "com_ui_back_to_chat": "Tilbake til samtale", "com_ui_back_to_prompts": "Tilbake til prompter", "com_ui_backup_code_number": "Kode #{{number}}", @@ -701,10 +771,12 @@ "com_ui_basic": "Grunnleggende", "com_ui_basic_auth_header": "Grunnleggende autorisasjonshode", "com_ui_bearer": "Bearer", + "com_ui_beta": "Beta", "com_ui_bookmark_delete_confirm": "Er du sikker på at du vil slette dette bokmerket?", "com_ui_bookmarks": "Bokmerker", "com_ui_bookmarks_add": "Legg til bokmerker", "com_ui_bookmarks_add_to_conversation": "Legg til i gjeldende samtale", + "com_ui_bookmarks_count_selected": "Bokmerker, {{count}} valgt", "com_ui_bookmarks_create_error": "Det oppstod en feil under oppretting av bokmerket.", "com_ui_bookmarks_create_exists": "Dette bokmerket finnes allerede.", "com_ui_bookmarks_create_success": "Bokmerket ble opprettet.", @@ -719,52 +791,88 @@ "com_ui_bookmarks_title": "Tittel", "com_ui_bookmarks_update_error": "Det oppstod en feil under oppdatering av bokmerket.", "com_ui_bookmarks_update_success": "Bokmerket ble oppdatert.", + "com_ui_branch_created": "Oppretting av gren vellykket", + "com_ui_branch_error": "Kunne ikke opprette gren", + "com_ui_branch_message": "Lag en gren fra denne responsen", + "com_ui_by_author": "av {{0}}", "com_ui_callback_url": "Tilbakekallings-URL", "com_ui_cancel": "Avbryt", "com_ui_cancelled": "Avbrutt", "com_ui_category": "Kategori", + "com_ui_change_version": "Endre versjon", "com_ui_chat": "Samtale", "com_ui_chat_history": "Samtalehistorikk", + "com_ui_chats": "Samtaler", + "com_ui_check_internet": "Sjekk din internettforbindelse", "com_ui_clear": "Fjern", "com_ui_clear_all": "Fjern alle", + "com_ui_clear_browser_cache": "Tøm nettleserbufferen", + "com_ui_clear_presets": "Tøm forhåndsinnstillinger", + "com_ui_clear_search": "Tøm søk", + "com_ui_click_to_close": "Klikk her for å lukke", + "com_ui_click_to_view_var": "Klikk her for å se {{0}}", "com_ui_client_id": "Klient-ID", "com_ui_client_secret": "Klienthemmelighet", "com_ui_close": "Lukk", "com_ui_close_menu": "Lukk meny", + "com_ui_close_settings": "Lukk innstillinger", + "com_ui_close_var": "Lukk {{0}}", "com_ui_close_window": "Lukk vindu", "com_ui_code": "Kode", + "com_ui_collapse": "Skjul", "com_ui_collapse_chat": "Skjul samtale", + "com_ui_collapse_thoughts": "Skjul tanker", "com_ui_command_placeholder": "Valgfritt: Skriv inn en kommando for prompten, ellers vil navnet bli brukt.", "com_ui_command_usage_placeholder": "Velg en prompt med kommando eller navn.", "com_ui_complete_setup": "Fullfør oppsett", "com_ui_concise": "Kortfattet", + "com_ui_configure": "Konfigurer", "com_ui_configure_mcp_variables_for": "Konfigurer variabler for {{0}}", "com_ui_confirm": "Bekreft", "com_ui_confirm_action": "Bekreft handling", "com_ui_confirm_admin_use_change": "Endring av denne innstillingen vil blokkere tilgang for administratorer, inkludert deg selv. Er du sikker på at du vil fortsette?", "com_ui_confirm_change": "Bekreft endring", "com_ui_connecting": "Kobler til", + "com_ui_contact_admin_if_issue_persists": "Kontakt en adiministrator dersom problemet vedvarer", "com_ui_context": "Kontekst", + "com_ui_context_filter_sort": "Filtrer og sortér etter kontekst", "com_ui_continue": "Fortsett", "com_ui_continue_oauth": "Fortsett med OAuth", + "com_ui_control_bar": "Kontroller bar", "com_ui_controls": "Kontroller", + "com_ui_conversation": "samtale", + "com_ui_conversation_label": "{{tittel}} samtale", + "com_ui_conversations": "samtaler", + "com_ui_convo_archived": "Samtale arkivert", "com_ui_convo_delete_error": "Sletting av samtale mislyktes.", + "com_ui_convo_delete_success": "Sletting av samtale vellykket", "com_ui_copied": "Kopiert!", "com_ui_copied_to_clipboard": "Kopiert til utklippstavlen", + "com_ui_copy": "Kopier", "com_ui_copy_code": "Kopier kode", "com_ui_copy_link": "Kopier lenke", + "com_ui_copy_stack_trace": "Kopier stack trace", + "com_ui_copy_thoughts_to_clipboard": "Kopier tanker til utklippstavle", "com_ui_copy_to_clipboard": "Kopier til utklippstavlen", "com_ui_copy_url_to_clipboard": "Kopier URL til utklippstavlen", "com_ui_create": "Opprett", + "com_ui_create_api_key": "Opprett API-nøkkel", + "com_ui_create_assistant": "Lag assistent", "com_ui_create_link": "Opprett lenke", + "com_ui_create_mcp_server": "Lag MCP-server", "com_ui_create_memory": "Opprett minne", + "com_ui_create_new_agent": "L", "com_ui_create_prompt": "Opprett prompt", + "com_ui_create_prompt_page": "ag ", + "com_ui_created": "Opprettet", + "com_ui_creating": "Oppretter...", "com_ui_creating_image": "Oppretter bilde. Dette kan ta et øyeblikk.", "com_ui_current": "Gjeldende", "com_ui_currently_production": "For øyeblikket i produksjon", "com_ui_custom": "Egendefinert", "com_ui_custom_header_name": "Egendefinert overskriftsnavn", "com_ui_custom_prompt_mode": "Egendefinert prompt-modus", + "com_ui_dark_theme_enabled": "Mørkt tema aktivert", "com_ui_dashboard": "Oversikt", "com_ui_date": "Dato", "com_ui_date_april": "April", @@ -781,6 +889,7 @@ "com_ui_date_previous_30_days": "Siste 30 dager", "com_ui_date_previous_7_days": "Siste 7 dager", "com_ui_date_september": "September", + "com_ui_date_sort": "Sorter etter dato", "com_ui_date_today": "I dag", "com_ui_date_yesterday": "I går", "com_ui_decline": "Jeg godtar ikke", @@ -788,19 +897,30 @@ "com_ui_delete": "Slett", "com_ui_delete_action": "Slett handling", "com_ui_delete_action_confirm": "Er du sikker på at du vil slette denne handlingen?", + "com_ui_delete_agent": "Slett agent", "com_ui_delete_agent_confirm": "Er du sikker på at du vil slette denne agenten?", + "com_ui_delete_assistant": "Slett assistent", "com_ui_delete_assistant_confirm": "Er du sikker på at du vil slette denne assistenten? Dette kan ikke angres.", "com_ui_delete_confirm": "Dette vil slette", "com_ui_delete_confirm_prompt_version_var": "Dette vil slette den valgte versjonen for \"{{0}}\". Hvis ingen andre versjoner eksisterer, vil prompten bli slettet.", + "com_ui_delete_confirm_strong": "Dette vil slette {{title}}", "com_ui_delete_conversation": "Slette samtalen?", + "com_ui_delete_conversation_tooltip": "Slett samtale", + "com_ui_delete_mcp_server": "Ønsker du å slette MCP-serveren?", + "com_ui_delete_mcp_server_name": "Slett MCP-server {{0}}", "com_ui_delete_memory": "Slett minne", "com_ui_delete_not_allowed": "Sletteoperasjon er ikke tillatt.", + "com_ui_delete_preset": "Ønsker du å slette forhåndsinnstillingen", "com_ui_delete_prompt": "Slette prompten?", + "com_ui_delete_prompt_name": "Slett prompt - {{name}}", "com_ui_delete_shared_link": "Slette delt lenke?", + "com_ui_delete_shared_link_heading": "Slett delt lenke", "com_ui_delete_success": "Vellykket slettet", "com_ui_delete_tool": "Slett verktøy", "com_ui_delete_tool_confirm": "Er du sikker på at du vil slette dette verktøyet?", + "com_ui_delete_tool_save_reminder": "Verktøy fjernet. Lagre agenten for å ta i bruk endreinger.", "com_ui_deleted": "Slettet", + "com_ui_deleting": "Sletter...", "com_ui_deleting_file": "Sletter fil ...", "com_ui_descending": "Synkende", "com_ui_description": "Beskrivelse", @@ -808,37 +928,52 @@ "com_ui_deselect_all": "Fravelg alle", "com_ui_detailed": "Detaljert", "com_ui_disabling": "Deaktiverer ...", + "com_ui_done": "Ferdig", "com_ui_download": "Last ned", "com_ui_download_artifact": "Last ned artefakt", "com_ui_download_backup": "Last ned reservekoder", "com_ui_download_backup_tooltip": "Før du fortsetter, last ned reservekodene dine. Du vil trenge dem for å få tilgang igjen hvis du mister autentiseringsenheten din.", "com_ui_download_error": "Feil ved nedlasting av fil. Filen kan ha blitt slettet.", + "com_ui_download_error_logs": "Last ned feillogger", "com_ui_drag_drop": "Dra og slipp fil(er) her, eller klikk for å velge.", "com_ui_dropdown_variables": "Nedtrekksvariabler:", "com_ui_dropdown_variables_info": "Opprett egendefinerte nedtrekksmenyer for promptene dine: `{{variabelnavn:valg1|valg2|valg3}}`", "com_ui_duplicate": "Dupliser", + "com_ui_duplicate_agent": "Dupliser Agent", "com_ui_duplication_error": "Det oppstod en feil under duplisering av samtalen.", "com_ui_duplication_processing": "Dupliserer samtale ...", "com_ui_duplication_success": "Samtalen ble duplisert.", "com_ui_edit": "Rediger", "com_ui_edit_editing_image": "Redigerer bilde", "com_ui_edit_mcp_server": "Rediger MCP-server", + "com_ui_edit_mcp_server_dialog_description": "Unik Serveridentifikator: {{serverName}}", "com_ui_edit_memory": "Rediger minne", + "com_ui_edit_preset_title": "Rediger forhåndsinnstilling - {{title}}", + "com_ui_edit_prompt_page": "Rediger promptside", + "com_ui_editable_message": "Redigerbar melding", + "com_ui_editor_instructions": "Dra bildet for å flytte • Bruk zoom slider eller knapper for å justere størrelse", "com_ui_empty_category": "-", "com_ui_endpoint": "Endepunkt", "com_ui_endpoint_menu": "LLM-endepunktmeny", "com_ui_enter": "Enter", "com_ui_enter_api_key": "Skriv inn API-nøkkel", + "com_ui_enter_description": "Angi beskrivelse (valgfritt)", "com_ui_enter_key": "Skriv inn nøkkel", + "com_ui_enter_name": "Angi navn", "com_ui_enter_openapi_schema": "Skriv inn ditt OpenAPI-skjema her.", "com_ui_enter_value": "Skriv inn verdi", "com_ui_error": "Feil", "com_ui_error_connection": "Feil ved tilkobling til serveren, prøv å laste siden på nytt.", + "com_ui_error_message_prefix": "Feilmelding:", "com_ui_error_save_admin_settings": "Det oppstod en feil under lagring av admin-innstillingene.", + "com_ui_error_try_following_prefix": "Vennligst prøv en av de følgende", + "com_ui_error_unexpected": "Oops! Noe uforventet skjedde", "com_ui_error_updating_preferences": "Feil ved oppdatering av preferanser.", "com_ui_everyone_permission_level": "Alles tillatelsesnivå", "com_ui_examples": "Eksempler", + "com_ui_expand": "Utvid", "com_ui_expand_chat": "Utvid samtale", + "com_ui_expand_thoughts": "Utvidede tanker", "com_ui_export_convo_modal": "Eksporter samtale-modal", "com_ui_feedback_more": "Mer ...", "com_ui_feedback_more_information": "Gi ytterligere tilbakemelding", @@ -858,10 +993,12 @@ "com_ui_feedback_tag_unjustified_refusal": "Nektet uten grunn", "com_ui_field_max_length": "{{field}} må inneholde mindre enn {{length}} tegn", "com_ui_field_required": "Dette feltet er påkrevd.", + "com_ui_file_input_avatar_label": "Filinput for avatar", "com_ui_file_size": "Filstørrelse", "com_ui_file_token_limit": "Tokengrense for filer", "com_ui_file_token_limit_desc": "Angir maksimalt antall tokens som kan benyttes for filhåndtering. En høyere grense kan øke behandlingstid og kostnader.", "com_ui_files": "Filer", + "com_ui_filter_mcp_servers": "Filtrer MCP-servere etter navn", "com_ui_filter_prompts": "Filtrer prompter", "com_ui_filter_prompts_name": "Filtrer prompter etter navn", "com_ui_final_touch": "Siste finpuss", @@ -885,6 +1022,7 @@ "com_ui_fork_info_visible": "Dette alternativet forgrener kun de synlige meldingene, altså den direkte stien til målmeldingen, uten noen grener.", "com_ui_fork_more_details_about": "Se tilleggsinformasjon om forgrening-alternativet «{{0}}»", "com_ui_fork_more_info_options": "Se detaljert forklaring av alle forgrening-alternativer.", + "com_ui_fork_open_menu": "Åpne forgreningsmeny", "com_ui_fork_processing": "Forgrener samtale ...", "com_ui_fork_remember": "Husk", "com_ui_fork_remember_checked": "Ditt valg vil bli husket. Endre dette når som helst i innstillingene.", @@ -903,7 +1041,11 @@ "com_ui_good_evening": "God kveld", "com_ui_good_morning": "God morgen", "com_ui_group": "Gruppe", + "com_ui_handoff_instructions": "Overleveringsinstruksjoner", "com_ui_happy_birthday": "Det er min første bursdag!", + "com_ui_header_format": "Overskriftsformat", + "com_ui_hide": "Skjul", + "com_ui_hide_code": "Skjul kode", "com_ui_hide_image_details": "Skjul bildedetaljer", "com_ui_hide_password": "Skjul passord", "com_ui_hide_qr": "Skjul QR-kode", @@ -920,18 +1062,26 @@ "com_ui_import_conversation_file_type_error": "Importtypen støttes ikke.", "com_ui_import_conversation_info": "Importer samtaler fra en JSON-fil.", "com_ui_import_conversation_success": "Samtalene ble importert.", + "com_ui_import_conversation_upload_error": "Feil under opplasting av fil. Vennligst prøv igjen.", + "com_ui_importing": "Importerer", "com_ui_include_shadcnui": "Inkluder instruksjoner for shadcn/ui-komponenter", "com_ui_initializing": "Initialiserer...", "com_ui_input": "Inndata", "com_ui_instructions": "Instruksjoner", "com_ui_key": "Nøkkel", + "com_ui_key_required": "API-nøkkel påkrevd", + "com_ui_last_used": "Sist brukt", "com_ui_late_night": "God senkveld", "com_ui_latest_footer": "Én KI for alle.", "com_ui_latest_production_version": "Siste produksjonsversjon", "com_ui_latest_version": "Siste versjon", + "com_ui_leave_blank_to_keep": "La stå tomt for å beholde eksisterende", "com_ui_librechat_code_api_key": "Få din LibreChat Kodetolk API-nøkkel", "com_ui_librechat_code_api_subtitle": "Sikker. Flerspråklig. Fil-input/output.", "com_ui_librechat_code_api_title": "Kjør KI-kode", + "com_ui_light_theme_enabled": "Lyst tema aktivert", + "com_ui_link_copied": "Lenke kopiert", + "com_ui_link_refreshed": "Lenken er oppdatert", "com_ui_loading": "Laster ...", "com_ui_locked": "Låst", "com_ui_logo": "{{0}}-logo", @@ -939,8 +1089,12 @@ "com_ui_manage": "Administrer", "com_ui_marketplace": "Markedsplass", "com_ui_marketplace_allow_use": "Tillat bruk av markedsplass", + "com_ui_max": "Maks", + "com_ui_max_favorites_reached": "Maksimalt antall festede gjenstander nådd ({{0}}). Fjern noen gjenstander for å legge til flere.", + "com_ui_max_file_size": "PNG, JPG eller JPEG (maks {{0}})", "com_ui_max_tags": "Maksimalt antall er {{0}}. Bruker siste verdier.", "com_ui_mcp_authenticated_success": "MCP-serveren '{{0}}' ble autentisert.", + "com_ui_mcp_click_to_defer": "Klikk for å utsette – verktøyet vil være synlig via søk, men ikke lastet inn før det trengs", "com_ui_mcp_configure_server": "Konfigurer {{0}}", "com_ui_mcp_configure_server_description": "Konfigurer egendefinerte variabler for {{0}}", "com_ui_mcp_enter_var": "Skriv inn verdi for {{0}}", diff --git a/client/src/store/favorites.ts b/client/src/store/favorites.ts index b3744f52b0..9065f1ca4e 100644 --- a/client/src/store/favorites.ts +++ b/client/src/store/favorites.ts @@ -1,4 +1,4 @@ -import { createStorageAtom } from './jotai-utils'; +import { createTabIsolatedAtom } from './jotai-utils'; export type Favorite = { agentId?: string; @@ -16,4 +16,4 @@ export type FavoritesState = Favorite[]; /** * This atom stores the user's favorite models/agents */ -export const favoritesAtom = createStorageAtom('favorites', []); +export const favoritesAtom = createTabIsolatedAtom('favorites', []); diff --git a/client/src/store/jotai-utils.ts b/client/src/store/jotai-utils.ts index d3ca9d817c..5d2769d7e9 100644 --- a/client/src/store/jotai-utils.ts +++ b/client/src/store/jotai-utils.ts @@ -1,5 +1,6 @@ import { atom } from 'jotai'; import { atomWithStorage } from 'jotai/utils'; +import type { SyncStorage } from 'jotai/vanilla/utils/atomWithStorage'; /** * Create a simple atom with localStorage persistence @@ -42,6 +43,68 @@ export function createStorageAtomWithEffect( ); } +/** + * Create a SyncStorage adapter that reads/writes to localStorage but does NOT + * subscribe to browser `storage` events. This prevents cross-tab synchronization + * for atoms where each tab should maintain independent state. + * + * Use this for atoms that represent per-tab working state (e.g., favorites toggle, + * MCP server selections) rather than user preferences. + */ +export function createTabIsolatedStorage(): SyncStorage { + return { + getItem(key: string, initialValue: Value): Value { + if (typeof window === 'undefined') { + return initialValue; + } + try { + const stored = localStorage.getItem(key); + if (stored === null) { + return initialValue; + } + return JSON.parse(stored) as Value; + } catch { + return initialValue; + } + }, + setItem(key: string, newValue: Value): void { + if (typeof window === 'undefined') { + return; + } + try { + localStorage.setItem(key, JSON.stringify(newValue)); + } catch { + // quota exceeded or other write error — silently ignore + } + }, + removeItem(key: string): void { + if (typeof window === 'undefined') { + return; + } + try { + localStorage.removeItem(key); + } catch { + // silently ignore + } + }, + // subscribe intentionally omitted — prevents cross-tab sync via storage events + }; +} + +/** + * Create an atom with localStorage persistence that does NOT sync across tabs. + * Parallels `createStorageAtom` but uses tab-isolated storage. + * + * @param key - localStorage key + * @param defaultValue - default value if no saved value exists + * @returns Jotai atom with localStorage persistence, isolated per tab + */ +export function createTabIsolatedAtom(key: string, defaultValue: T) { + return atomWithStorage(key, defaultValue, createTabIsolatedStorage(), { + getOnInit: true, + }); +} + /** * Initialize a value from localStorage and optionally apply it * Useful for applying saved values on app startup (e.g., theme, fontSize) diff --git a/client/src/store/mcp.ts b/client/src/store/mcp.ts index e540b167e4..793e1cebd0 100644 --- a/client/src/store/mcp.ts +++ b/client/src/store/mcp.ts @@ -1,6 +1,14 @@ import { atom } from 'jotai'; import { atomFamily, atomWithStorage } from 'jotai/utils'; import { Constants, LocalStorageKeys } from 'librechat-data-provider'; +import { createTabIsolatedStorage } from './jotai-utils'; + +/** + * Tab-isolated storage for MCP values — prevents cross-tab sync so that + * each tab's MCP server selections are independent (especially for new chats + * which all share the same `LAST_MCP_new` localStorage key). + */ +const mcpTabIsolatedStorage = createTabIsolatedStorage(); /** * Creates a storage atom for MCP values per conversation @@ -10,7 +18,7 @@ export const mcpValuesAtomFamily = atomFamily((conversationId: string | null) => const key = conversationId ?? Constants.NEW_CONVO; const storageKey = `${LocalStorageKeys.LAST_MCP_}${key}`; - return atomWithStorage(storageKey, [], undefined, { getOnInit: true }); + return atomWithStorage(storageKey, [], mcpTabIsolatedStorage, { getOnInit: true }); }); /** diff --git a/client/src/style.css b/client/src/style.css index 689c05423d..cf3ea50294 100644 --- a/client/src/style.css +++ b/client/src/style.css @@ -70,6 +70,7 @@ html { --text-secondary-alt: var(--gray-500); --text-tertiary: var(--gray-500); --text-warning: var(--amber-500); + --text-destructive: var(--red-600); --ring-primary: var(--gray-500); --header-primary: var(--white); --header-hover: var(--gray-50); @@ -96,6 +97,7 @@ html { --border-medium: var(--gray-300); --border-heavy: var(--gray-400); --border-xheavy: var(--gray-500); + --border-destructive: var(--red-600); /* These are test styles */ --background: 0 0% 100%; @@ -131,6 +133,7 @@ html { --text-secondary-alt: var(--gray-400); --text-tertiary: var(--gray-500); --text-warning: var(--amber-500); + --text-destructive: var(--red-600); --header-primary: var(--gray-700); --header-hover: var(--gray-600); --header-button-hover: var(--gray-700); @@ -156,6 +159,7 @@ html { --border-medium: var(--gray-600); --border-heavy: var(--gray-500); --border-xheavy: var(--gray-400); + --border-destructive: var(--red-500); /* These are test styles */ --background: 0 0% 7%; diff --git a/client/src/utils/__tests__/applyModelSpecEphemeralAgent.test.ts b/client/src/utils/__tests__/applyModelSpecEphemeralAgent.test.ts new file mode 100644 index 0000000000..44bfbb82f7 --- /dev/null +++ b/client/src/utils/__tests__/applyModelSpecEphemeralAgent.test.ts @@ -0,0 +1,274 @@ +import { Constants, LocalStorageKeys } from 'librechat-data-provider'; +import type { TModelSpec, TEphemeralAgent } from 'librechat-data-provider'; +import { applyModelSpecEphemeralAgent } from '../endpoints'; +import { setTimestamp } from '../timestamps'; + +/** + * Tests for applyModelSpecEphemeralAgent — the function responsible for + * constructing the ephemeral agent state when navigating to a spec conversation. + * + * Desired behaviors: + * - New conversations always get the admin's exact spec configuration + * - Existing conversations merge per-conversation localStorage overrides on top of spec + * - Cleared localStorage for existing conversations falls back to fresh spec config + */ + +const createModelSpec = (overrides: Partial = {}): TModelSpec => + ({ + name: 'test-spec', + label: 'Test Spec', + preset: { endpoint: 'agents' }, + mcpServers: ['spec-server1'], + webSearch: true, + executeCode: true, + fileSearch: false, + artifacts: true, + ...overrides, + }) as TModelSpec; + +/** Write a value + fresh timestamp to localStorage (simulates a user toggle) */ +function writeToolToggle(storagePrefix: string, convoId: string, value: unknown): void { + const key = `${storagePrefix}${convoId}`; + localStorage.setItem(key, JSON.stringify(value)); + setTimestamp(key); +} + +describe('applyModelSpecEphemeralAgent', () => { + let updateEphemeralAgent: jest.Mock; + + beforeEach(() => { + localStorage.clear(); + updateEphemeralAgent = jest.fn(); + }); + + // ─── New Conversations ───────────────────────────────────────────── + + describe('new conversations always get fresh admin spec config', () => { + it('should apply exactly the admin-configured tools and MCP servers', () => { + const modelSpec = createModelSpec({ + mcpServers: ['clickhouse', 'github'], + executeCode: true, + webSearch: false, + fileSearch: true, + artifacts: true, + }); + + applyModelSpecEphemeralAgent({ + convoId: null, + modelSpec, + updateEphemeralAgent, + }); + + expect(updateEphemeralAgent).toHaveBeenCalledWith(Constants.NEW_CONVO, { + mcp: ['clickhouse', 'github'], + execute_code: true, + web_search: false, + file_search: true, + artifacts: 'default', + }); + }); + + it('should not read from localStorage even if stale values exist', () => { + // Simulate stale localStorage from a previous session + writeToolToggle(LocalStorageKeys.LAST_CODE_TOGGLE_, Constants.NEW_CONVO, false); + writeToolToggle(LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_, Constants.NEW_CONVO, true); + localStorage.setItem( + `${LocalStorageKeys.LAST_MCP_}${Constants.NEW_CONVO}`, + JSON.stringify(['stale-server']), + ); + + const modelSpec = createModelSpec({ executeCode: true, webSearch: false, mcpServers: [] }); + + applyModelSpecEphemeralAgent({ + convoId: null, + modelSpec, + updateEphemeralAgent, + }); + + const agent = updateEphemeralAgent.mock.calls[0][1] as TEphemeralAgent; + // Should be spec values, NOT localStorage values + expect(agent.execute_code).toBe(true); + expect(agent.web_search).toBe(false); + expect(agent.mcp).toEqual([]); + }); + + it('should handle spec with no MCP servers', () => { + const modelSpec = createModelSpec({ mcpServers: undefined }); + + applyModelSpecEphemeralAgent({ convoId: null, modelSpec, updateEphemeralAgent }); + + const agent = updateEphemeralAgent.mock.calls[0][1] as TEphemeralAgent; + expect(agent.mcp).toEqual([]); + }); + + it('should map artifacts: true to "default" string', () => { + const modelSpec = createModelSpec({ artifacts: true }); + + applyModelSpecEphemeralAgent({ convoId: null, modelSpec, updateEphemeralAgent }); + + const agent = updateEphemeralAgent.mock.calls[0][1] as TEphemeralAgent; + expect(agent.artifacts).toBe('default'); + }); + + it('should pass through artifacts string value directly', () => { + const modelSpec = createModelSpec({ artifacts: 'custom-renderer' as any }); + + applyModelSpecEphemeralAgent({ convoId: null, modelSpec, updateEphemeralAgent }); + + const agent = updateEphemeralAgent.mock.calls[0][1] as TEphemeralAgent; + expect(agent.artifacts).toBe('custom-renderer'); + }); + }); + + // ─── Existing Conversations: Per-Conversation Persistence ────────── + + describe('existing conversations merge user overrides from localStorage', () => { + const convoId = 'convo-abc-123'; + + it('should preserve user tool modifications across navigation', () => { + // User previously toggled off code execution and enabled file search + writeToolToggle(LocalStorageKeys.LAST_CODE_TOGGLE_, convoId, false); + writeToolToggle(LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_, convoId, true); + + const modelSpec = createModelSpec({ + executeCode: true, + fileSearch: false, + webSearch: true, + }); + + applyModelSpecEphemeralAgent({ convoId, modelSpec, updateEphemeralAgent }); + + const agent = updateEphemeralAgent.mock.calls[0][1] as TEphemeralAgent; + expect(agent.execute_code).toBe(false); // user override + expect(agent.file_search).toBe(true); // user override + expect(agent.web_search).toBe(true); // not overridden, spec value + }); + + it('should preserve user-added MCP servers across navigation', () => { + // Spec has clickhouse, user also added github during the conversation + localStorage.setItem( + `${LocalStorageKeys.LAST_MCP_}${convoId}`, + JSON.stringify(['clickhouse', 'github']), + ); + + const modelSpec = createModelSpec({ mcpServers: ['clickhouse'] }); + + applyModelSpecEphemeralAgent({ convoId, modelSpec, updateEphemeralAgent }); + + const agent = updateEphemeralAgent.mock.calls[0][1] as TEphemeralAgent; + expect(agent.mcp).toEqual(['clickhouse', 'github']); + }); + + it('should preserve user-removed MCP servers (empty array) across navigation', () => { + // User removed all MCP servers during the conversation + localStorage.setItem(`${LocalStorageKeys.LAST_MCP_}${convoId}`, JSON.stringify([])); + + const modelSpec = createModelSpec({ mcpServers: ['clickhouse', 'github'] }); + + applyModelSpecEphemeralAgent({ convoId, modelSpec, updateEphemeralAgent }); + + const agent = updateEphemeralAgent.mock.calls[0][1] as TEphemeralAgent; + expect(agent.mcp).toEqual([]); + }); + + it('should only override keys that exist in localStorage, leaving the rest as spec defaults', () => { + // User only changed artifacts, nothing else + writeToolToggle(LocalStorageKeys.LAST_ARTIFACTS_TOGGLE_, convoId, ''); + + const modelSpec = createModelSpec({ + executeCode: true, + webSearch: true, + fileSearch: false, + artifacts: true, + mcpServers: ['server1'], + }); + + applyModelSpecEphemeralAgent({ convoId, modelSpec, updateEphemeralAgent }); + + const agent = updateEphemeralAgent.mock.calls[0][1] as TEphemeralAgent; + expect(agent.execute_code).toBe(true); // spec default (not in localStorage) + expect(agent.web_search).toBe(true); // spec default + expect(agent.file_search).toBe(false); // spec default + expect(agent.artifacts).toBe(''); // user override + expect(agent.mcp).toEqual(['server1']); // spec default (not in localStorage) + }); + }); + + // ─── Existing Conversations: Cleared localStorage ────────────────── + + describe('existing conversations with cleared localStorage get fresh spec config', () => { + const convoId = 'convo-cleared-456'; + + it('should fall back to pure spec values when localStorage is empty', () => { + // localStorage.clear() was already called in beforeEach + + const modelSpec = createModelSpec({ + executeCode: true, + webSearch: false, + fileSearch: true, + artifacts: true, + mcpServers: ['server1', 'server2'], + }); + + applyModelSpecEphemeralAgent({ convoId, modelSpec, updateEphemeralAgent }); + + expect(updateEphemeralAgent).toHaveBeenCalledWith(convoId, { + mcp: ['server1', 'server2'], + execute_code: true, + web_search: false, + file_search: true, + artifacts: 'default', + }); + }); + + it('should fall back to spec values when timestamps have expired (>2 days)', () => { + // Write values with expired timestamps (3 days old) + const expiredTimestamp = (Date.now() - 3 * 24 * 60 * 60 * 1000).toString(); + const codeKey = `${LocalStorageKeys.LAST_CODE_TOGGLE_}${convoId}`; + localStorage.setItem(codeKey, JSON.stringify(false)); + localStorage.setItem(`${codeKey}_TIMESTAMP`, expiredTimestamp); + + const modelSpec = createModelSpec({ executeCode: true }); + + applyModelSpecEphemeralAgent({ convoId, modelSpec, updateEphemeralAgent }); + + const agent = updateEphemeralAgent.mock.calls[0][1] as TEphemeralAgent; + // Expired override should be ignored — spec value wins + expect(agent.execute_code).toBe(true); + }); + }); + + // ─── Guard Clauses ───────────────────────────────────────────────── + + describe('guard clauses', () => { + it('should not call updateEphemeralAgent when modelSpec is undefined', () => { + applyModelSpecEphemeralAgent({ + convoId: 'convo-1', + modelSpec: undefined, + updateEphemeralAgent, + }); + + expect(updateEphemeralAgent).not.toHaveBeenCalled(); + }); + + it('should not throw when updateEphemeralAgent is undefined', () => { + expect(() => + applyModelSpecEphemeralAgent({ + convoId: 'convo-1', + modelSpec: createModelSpec(), + updateEphemeralAgent: undefined, + }), + ).not.toThrow(); + }); + + it('should use NEW_CONVO key when convoId is empty string', () => { + applyModelSpecEphemeralAgent({ + convoId: '', + modelSpec: createModelSpec(), + updateEphemeralAgent, + }); + + expect(updateEphemeralAgent).toHaveBeenCalledWith(Constants.NEW_CONVO, expect.any(Object)); + }); + }); +}); diff --git a/client/src/utils/__tests__/buildDefaultConvo.test.ts b/client/src/utils/__tests__/buildDefaultConvo.test.ts new file mode 100644 index 0000000000..00a4d6313b --- /dev/null +++ b/client/src/utils/__tests__/buildDefaultConvo.test.ts @@ -0,0 +1,202 @@ +import { EModelEndpoint } from 'librechat-data-provider'; +import type { TConversation } from 'librechat-data-provider'; +import buildDefaultConvo from '../buildDefaultConvo'; + +jest.mock('../localStorage', () => ({ + getLocalStorageItems: jest.fn(() => ({ + lastSelectedModel: {}, + lastSelectedTools: [], + lastConversationSetup: {}, + })), +})); + +const baseConversation: TConversation = { + conversationId: 'test-convo-id', + title: 'Test Conversation', + createdAt: '2024-01-01T00:00:00Z', + updatedAt: '2024-01-01T00:00:00Z', + endpoint: null, +}; + +describe('buildDefaultConvo - defaultParamsEndpoint', () => { + describe('custom endpoint with defaultParamsEndpoint: anthropic', () => { + const models = ['anthropic/claude-opus-4.5', 'anthropic/claude-sonnet-4']; + + it('should preserve maxOutputTokens from model spec preset', () => { + const preset: TConversation = { + ...baseConversation, + endpoint: 'AnthropicClaude' as EModelEndpoint, + endpointType: EModelEndpoint.custom, + model: 'anthropic/claude-opus-4.5', + temperature: 0.7, + maxOutputTokens: 8192, + topP: 0.9, + maxContextTokens: 50000, + }; + + const result = buildDefaultConvo({ + models, + conversation: baseConversation, + endpoint: 'AnthropicClaude' as EModelEndpoint, + lastConversationSetup: preset, + defaultParamsEndpoint: EModelEndpoint.anthropic, + }); + + expect(result.maxOutputTokens).toBe(8192); + expect(result.topP).toBe(0.9); + expect(result.temperature).toBe(0.7); + expect(result.maxContextTokens).toBe(50000); + expect(result.model).toBe('anthropic/claude-opus-4.5'); + }); + + it('should strip maxOutputTokens without defaultParamsEndpoint', () => { + const preset: TConversation = { + ...baseConversation, + endpoint: 'AnthropicClaude' as EModelEndpoint, + endpointType: EModelEndpoint.custom, + model: 'anthropic/claude-opus-4.5', + temperature: 0.7, + maxOutputTokens: 8192, + }; + + const result = buildDefaultConvo({ + models, + conversation: baseConversation, + endpoint: 'AnthropicClaude' as EModelEndpoint, + lastConversationSetup: preset, + }); + + expect(result.maxOutputTokens).toBeUndefined(); + expect(result.temperature).toBe(0.7); + }); + + it('should strip OpenAI-specific fields when using anthropic params', () => { + const preset: TConversation = { + ...baseConversation, + endpoint: 'AnthropicClaude' as EModelEndpoint, + endpointType: EModelEndpoint.custom, + model: 'anthropic/claude-opus-4.5', + max_tokens: 4096, + top_p: 0.9, + presence_penalty: 0.5, + frequency_penalty: 0.3, + }; + + const result = buildDefaultConvo({ + models, + conversation: baseConversation, + endpoint: 'AnthropicClaude' as EModelEndpoint, + lastConversationSetup: preset, + defaultParamsEndpoint: EModelEndpoint.anthropic, + }); + + expect(result.max_tokens).toBeUndefined(); + expect(result.top_p).toBeUndefined(); + expect(result.presence_penalty).toBeUndefined(); + expect(result.frequency_penalty).toBeUndefined(); + }); + }); + + describe('custom endpoint without defaultParamsEndpoint (OpenAI default)', () => { + const models = ['gpt-4o', 'gpt-4.1']; + + it('should preserve OpenAI fields and strip anthropic fields', () => { + const preset: TConversation = { + ...baseConversation, + endpoint: 'MyOpenRouterEndpoint' as EModelEndpoint, + endpointType: EModelEndpoint.custom, + model: 'gpt-4o', + temperature: 0.7, + max_tokens: 4096, + top_p: 0.9, + maxOutputTokens: 8192, + }; + + const result = buildDefaultConvo({ + models, + conversation: baseConversation, + endpoint: 'MyOpenRouterEndpoint' as EModelEndpoint, + lastConversationSetup: preset, + }); + + expect(result.max_tokens).toBe(4096); + expect(result.top_p).toBe(0.9); + expect(result.temperature).toBe(0.7); + expect(result.maxOutputTokens).toBeUndefined(); + }); + }); + + describe('custom endpoint with defaultParamsEndpoint: google', () => { + const models = ['gemini-pro', 'gemini-1.5-pro']; + + it('should preserve Google-specific fields', () => { + const preset: TConversation = { + ...baseConversation, + endpoint: 'MyGoogleEndpoint' as EModelEndpoint, + endpointType: EModelEndpoint.custom, + model: 'gemini-pro', + temperature: 0.7, + maxOutputTokens: 8192, + topP: 0.9, + topK: 40, + }; + + const result = buildDefaultConvo({ + models, + conversation: baseConversation, + endpoint: 'MyGoogleEndpoint' as EModelEndpoint, + lastConversationSetup: preset, + defaultParamsEndpoint: EModelEndpoint.google, + }); + + expect(result.maxOutputTokens).toBe(8192); + expect(result.topP).toBe(0.9); + expect(result.topK).toBe(40); + }); + }); + + describe('cross-endpoint field isolation', () => { + it('should not carry bedrock region to a custom endpoint', () => { + const preset: TConversation = { + ...baseConversation, + endpoint: 'MyChatEndpoint' as EModelEndpoint, + endpointType: EModelEndpoint.custom, + model: 'gpt-4o', + temperature: 0.7, + region: 'us-east-1', + }; + + const result = buildDefaultConvo({ + models: ['gpt-4o'], + conversation: baseConversation, + endpoint: 'MyChatEndpoint' as EModelEndpoint, + lastConversationSetup: preset, + }); + + expect(result.region).toBeUndefined(); + expect(result.temperature).toBe(0.7); + }); + + it('should not carry bedrock region even with anthropic defaultParamsEndpoint', () => { + const preset: TConversation = { + ...baseConversation, + endpoint: 'MyChatEndpoint' as EModelEndpoint, + endpointType: EModelEndpoint.custom, + model: 'claude-3-opus', + region: 'us-east-1', + maxOutputTokens: 8192, + }; + + const result = buildDefaultConvo({ + models: ['claude-3-opus'], + conversation: baseConversation, + endpoint: 'MyChatEndpoint' as EModelEndpoint, + lastConversationSetup: preset, + defaultParamsEndpoint: EModelEndpoint.anthropic, + }); + + expect(result.region).toBeUndefined(); + expect(result.maxOutputTokens).toBe(8192); + }); + }); +}); diff --git a/client/src/utils/__tests__/cleanupPreset.integration.test.ts b/client/src/utils/__tests__/cleanupPreset.integration.test.ts new file mode 100644 index 0000000000..1e1219bc7a --- /dev/null +++ b/client/src/utils/__tests__/cleanupPreset.integration.test.ts @@ -0,0 +1,119 @@ +import { EModelEndpoint } from 'librechat-data-provider'; +import cleanupPreset from '../cleanupPreset'; + +/** + * Integration tests for cleanupPreset — NO mocks. + * Uses the real parseConvo to verify actual schema behavior + * with defaultParamsEndpoint for custom endpoints. + */ +describe('cleanupPreset - real parsing with defaultParamsEndpoint', () => { + it('should preserve maxOutputTokens when defaultParamsEndpoint is anthropic', () => { + const preset = { + presetId: 'test-id', + title: 'Claude Opus', + endpoint: 'AnthropicClaude', + endpointType: EModelEndpoint.custom, + model: 'anthropic/claude-opus-4.5', + temperature: 0.7, + maxOutputTokens: 8192, + topP: 0.9, + maxContextTokens: 50000, + }; + + const result = cleanupPreset({ + preset, + defaultParamsEndpoint: EModelEndpoint.anthropic, + }); + + expect(result.maxOutputTokens).toBe(8192); + expect(result.topP).toBe(0.9); + expect(result.temperature).toBe(0.7); + expect(result.maxContextTokens).toBe(50000); + expect(result.model).toBe('anthropic/claude-opus-4.5'); + }); + + it('should strip maxOutputTokens without defaultParamsEndpoint (OpenAI schema)', () => { + const preset = { + presetId: 'test-id', + title: 'GPT Custom', + endpoint: 'MyOpenRouter', + endpointType: EModelEndpoint.custom, + model: 'gpt-4o', + temperature: 0.7, + maxOutputTokens: 8192, + max_tokens: 4096, + }; + + const result = cleanupPreset({ preset }); + + expect(result.maxOutputTokens).toBeUndefined(); + expect(result.max_tokens).toBe(4096); + expect(result.temperature).toBe(0.7); + }); + + it('should strip OpenAI-specific fields when using anthropic params', () => { + const preset = { + presetId: 'test-id', + title: 'Claude Custom', + endpoint: 'AnthropicClaude', + endpointType: EModelEndpoint.custom, + model: 'anthropic/claude-3-opus', + max_tokens: 4096, + top_p: 0.9, + presence_penalty: 0.5, + frequency_penalty: 0.3, + temperature: 0.7, + }; + + const result = cleanupPreset({ + preset, + defaultParamsEndpoint: EModelEndpoint.anthropic, + }); + + expect(result.max_tokens).toBeUndefined(); + expect(result.top_p).toBeUndefined(); + expect(result.presence_penalty).toBeUndefined(); + expect(result.frequency_penalty).toBeUndefined(); + expect(result.temperature).toBe(0.7); + }); + + it('should not carry bedrock region to custom endpoint', () => { + const preset = { + presetId: 'test-id', + title: 'Custom', + endpoint: 'MyEndpoint', + endpointType: EModelEndpoint.custom, + model: 'gpt-4o', + temperature: 0.7, + region: 'us-east-1', + }; + + const result = cleanupPreset({ preset }); + + expect(result.region).toBeUndefined(); + expect(result.temperature).toBe(0.7); + }); + + it('should preserve Google-specific fields when defaultParamsEndpoint is google', () => { + const preset = { + presetId: 'test-id', + title: 'Gemini Custom', + endpoint: 'MyGoogleEndpoint', + endpointType: EModelEndpoint.custom, + model: 'gemini-pro', + temperature: 0.7, + maxOutputTokens: 8192, + topP: 0.9, + topK: 40, + }; + + const result = cleanupPreset({ + preset, + defaultParamsEndpoint: EModelEndpoint.google, + }); + + expect(result.maxOutputTokens).toBe(8192); + expect(result.topP).toBe(0.9); + expect(result.topK).toBe(40); + }); +}); diff --git a/client/src/utils/__tests__/cleanupPreset.test.ts b/client/src/utils/__tests__/cleanupPreset.test.ts index a03477de15..766bb872ac 100644 --- a/client/src/utils/__tests__/cleanupPreset.test.ts +++ b/client/src/utils/__tests__/cleanupPreset.test.ts @@ -1,12 +1,9 @@ -import { EModelEndpoint } from 'librechat-data-provider'; +import { EModelEndpoint, parseConvo } from 'librechat-data-provider'; import cleanupPreset from '../cleanupPreset'; -import type { TPreset } from 'librechat-data-provider'; - // Mock parseConvo since we're focusing on testing the chatGptLabel migration logic jest.mock('librechat-data-provider', () => ({ ...jest.requireActual('librechat-data-provider'), parseConvo: jest.fn((input) => { - // Return a simplified mock that passes through most properties const { conversation } = input; return { ...conversation, @@ -221,4 +218,41 @@ describe('cleanupPreset', () => { expect(result.presetId).toBeNull(); }); }); + + describe('defaultParamsEndpoint threading', () => { + it('should pass defaultParamsEndpoint to parseConvo', () => { + const preset = { + ...basePreset, + endpoint: 'MyCustomEndpoint', + endpointType: EModelEndpoint.custom, + }; + + cleanupPreset({ + preset, + defaultParamsEndpoint: EModelEndpoint.anthropic, + }); + + expect(parseConvo).toHaveBeenCalledWith( + expect.objectContaining({ + defaultParamsEndpoint: EModelEndpoint.anthropic, + }), + ); + }); + + it('should pass undefined defaultParamsEndpoint when not provided', () => { + const preset = { + ...basePreset, + endpoint: 'MyCustomEndpoint', + endpointType: EModelEndpoint.custom, + }; + + cleanupPreset({ preset }); + + expect(parseConvo).toHaveBeenCalledWith( + expect.objectContaining({ + defaultParamsEndpoint: undefined, + }), + ); + }); + }); }); diff --git a/client/src/utils/buildDefaultConvo.ts b/client/src/utils/buildDefaultConvo.ts index 025bec24eb..c2d2871912 100644 --- a/client/src/utils/buildDefaultConvo.ts +++ b/client/src/utils/buildDefaultConvo.ts @@ -14,11 +14,13 @@ const buildDefaultConvo = ({ conversation, endpoint = null, lastConversationSetup, + defaultParamsEndpoint, }: { models: string[]; conversation: TConversation; endpoint?: EModelEndpoint | null; lastConversationSetup: TConversation | null; + defaultParamsEndpoint?: string | null; }): TConversation => { const { lastSelectedModel, lastSelectedTools } = getLocalStorageItems(); const endpointType = lastConversationSetup?.endpointType ?? conversation.endpointType; @@ -49,6 +51,7 @@ const buildDefaultConvo = ({ possibleValues: { models: possibleModels, }, + defaultParamsEndpoint, }); const defaultConvo = { diff --git a/client/src/utils/cleanupPreset.ts b/client/src/utils/cleanupPreset.ts index c158d935fa..ad44726064 100644 --- a/client/src/utils/cleanupPreset.ts +++ b/client/src/utils/cleanupPreset.ts @@ -4,9 +4,10 @@ import type { TPreset } from 'librechat-data-provider'; type UIPreset = Partial & { presetOverride?: Partial }; type TCleanupPreset = { preset?: UIPreset; + defaultParamsEndpoint?: string | null; }; -const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => { +const cleanupPreset = ({ preset: _preset, defaultParamsEndpoint }: TCleanupPreset): TPreset => { const { endpoint, endpointType } = _preset ?? ({} as UIPreset); if (endpoint == null || endpoint === '') { console.error(`Unknown endpoint ${endpoint}`, _preset); @@ -35,8 +36,13 @@ const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => { delete preset.chatGptLabel; } - /* @ts-ignore: endpoint can be a custom defined name */ - const parsedPreset = parseConvo({ endpoint, endpointType, conversation: preset }); + const parsedPreset = parseConvo({ + /* @ts-ignore: endpoint can be a custom defined name */ + endpoint, + endpointType, + conversation: preset, + defaultParamsEndpoint, + }); return { presetId: _preset?.presetId ?? null, diff --git a/client/src/utils/endpoints.ts b/client/src/utils/endpoints.ts index eb9e60386f..33aa7a8525 100644 --- a/client/src/utils/endpoints.ts +++ b/client/src/utils/endpoints.ts @@ -11,6 +11,7 @@ import { } from 'librechat-data-provider'; import type * as t from 'librechat-data-provider'; import type { LocalizeFunction, IconsRecord } from '~/common'; +import { getTimestampedValue } from './timestamps'; /** * Clears model for non-ephemeral agent conversations. @@ -219,12 +220,51 @@ export function applyModelSpecEphemeralAgent({ if (!modelSpec || !updateEphemeralAgent) { return; } - updateEphemeralAgent((convoId ?? Constants.NEW_CONVO) || Constants.NEW_CONVO, { - mcp: modelSpec.mcpServers ?? [Constants.mcp_clear as string], + const key = (convoId ?? Constants.NEW_CONVO) || Constants.NEW_CONVO; + const agent: t.TEphemeralAgent = { + mcp: modelSpec.mcpServers ?? [], web_search: modelSpec.webSearch ?? false, file_search: modelSpec.fileSearch ?? false, execute_code: modelSpec.executeCode ?? false, - }); + artifacts: modelSpec.artifacts === true ? 'default' : modelSpec.artifacts || '', + }; + + // For existing conversations, layer per-conversation localStorage overrides + // on top of spec defaults so user modifications persist across navigation. + // If localStorage is empty (e.g., cleared), spec values stand alone. + if (key !== Constants.NEW_CONVO) { + const toolStorageMap: Array<[keyof t.TEphemeralAgent, string]> = [ + ['execute_code', LocalStorageKeys.LAST_CODE_TOGGLE_], + ['web_search', LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_], + ['file_search', LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_], + ['artifacts', LocalStorageKeys.LAST_ARTIFACTS_TOGGLE_], + ]; + + for (const [toolKey, storagePrefix] of toolStorageMap) { + const raw = getTimestampedValue(`${storagePrefix}${key}`); + if (raw !== null) { + try { + agent[toolKey] = JSON.parse(raw) as never; + } catch { + // ignore parse errors + } + } + } + + const mcpRaw = localStorage.getItem(`${LocalStorageKeys.LAST_MCP_}${key}`); + if (mcpRaw !== null) { + try { + const parsed = JSON.parse(mcpRaw); + if (Array.isArray(parsed)) { + agent.mcp = parsed; + } + } catch { + // ignore parse errors + } + } + } + + updateEphemeralAgent(key, agent); } /** diff --git a/client/src/utils/resources.ts b/client/src/utils/resources.ts index 9b68cef3f6..7a1e2b86c1 100644 --- a/client/src/utils/resources.ts +++ b/client/src/utils/resources.ts @@ -19,10 +19,10 @@ export const RESOURCE_CONFIGS: Record = { defaultEditorRoleId: AccessRoleIds.AGENT_EDITOR, defaultOwnerRoleId: AccessRoleIds.AGENT_OWNER, getResourceUrl: (agentId: string) => `${window.location.origin}/c/new?agent_id=${agentId}`, - getResourceName: (name?: string) => (name && name !== '' ? `"${name}"` : 'agent'), - getShareMessage: (name?: string) => (name && name !== '' ? `"${name}"` : 'agent'), + getResourceName: (name?: string) => (name && name !== '' ? name : 'agent'), + getShareMessage: (name?: string) => (name && name !== '' ? name : 'agent'), getManageMessage: (name?: string) => - `Manage permissions for ${name && name !== '' ? `"${name}"` : 'agent'}`, + `Manage permissions for ${name && name !== '' ? name : 'agent'}`, getCopyUrlMessage: () => 'Agent URL copied', }, [ResourceType.PROMPTGROUP]: { @@ -30,10 +30,10 @@ export const RESOURCE_CONFIGS: Record = { defaultViewerRoleId: AccessRoleIds.PROMPTGROUP_VIEWER, defaultEditorRoleId: AccessRoleIds.PROMPTGROUP_EDITOR, defaultOwnerRoleId: AccessRoleIds.PROMPTGROUP_OWNER, - getResourceName: (name?: string) => (name && name !== '' ? `"${name}"` : 'prompt'), - getShareMessage: (name?: string) => (name && name !== '' ? `"${name}"` : 'prompt'), + getResourceName: (name?: string) => (name && name !== '' ? name : 'prompt'), + getShareMessage: (name?: string) => (name && name !== '' ? name : 'prompt'), getManageMessage: (name?: string) => - `Manage permissions for ${name && name !== '' ? `"${name}"` : 'prompt'}`, + `Manage permissions for ${name && name !== '' ? name : 'prompt'}`, getCopyUrlMessage: () => 'Prompt URL copied', }, [ResourceType.MCPSERVER]: { @@ -41,10 +41,10 @@ export const RESOURCE_CONFIGS: Record = { defaultViewerRoleId: AccessRoleIds.MCPSERVER_VIEWER, defaultEditorRoleId: AccessRoleIds.MCPSERVER_EDITOR, defaultOwnerRoleId: AccessRoleIds.MCPSERVER_OWNER, - getResourceName: (name?: string) => (name && name !== '' ? `"${name}"` : 'MCP server'), - getShareMessage: (name?: string) => (name && name !== '' ? `"${name}"` : 'MCP server'), + getResourceName: (name?: string) => (name && name !== '' ? name : 'MCP server'), + getShareMessage: (name?: string) => (name && name !== '' ? name : 'MCP server'), getManageMessage: (name?: string) => - `Manage permissions for ${name && name !== '' ? `"${name}"` : 'MCP server'}`, + `Manage permissions for ${name && name !== '' ? name : 'MCP server'}`, getCopyUrlMessage: () => 'MCP Server URL copied', }, [ResourceType.REMOTE_AGENT]: { diff --git a/client/tailwind.config.cjs b/client/tailwind.config.cjs index c30d2ca703..624998e9d8 100644 --- a/client/tailwind.config.cjs +++ b/client/tailwind.config.cjs @@ -92,6 +92,7 @@ module.exports = { 'text-secondary-alt': 'var(--text-secondary-alt)', 'text-tertiary': 'var(--text-tertiary)', 'text-warning': 'var(--text-warning)', + 'text-destructive': 'var(--text-destructive)', 'ring-primary': 'var(--ring-primary)', 'header-primary': 'var(--header-primary)', 'header-hover': 'var(--header-hover)', @@ -118,6 +119,7 @@ module.exports = { 'border-medium-alt': 'var(--border-medium-alt)', 'border-heavy': 'var(--border-heavy)', 'border-xheavy': 'var(--border-xheavy)', + 'border-destructive': 'var(--border-destructive)', /* These are test styles */ border: 'hsl(var(--border))', input: 'hsl(var(--input))', diff --git a/config/smart-reinstall.js b/config/smart-reinstall.js new file mode 100644 index 0000000000..18fe689127 --- /dev/null +++ b/config/smart-reinstall.js @@ -0,0 +1,235 @@ +#!/usr/bin/env node +/** + * Smart Reinstall for LibreChat + * + * Combines cached dependency installation with Turborepo-powered builds. + * + * Dependencies (npm ci): + * Hashes package-lock.json and stores a marker in node_modules. + * Skips npm ci entirely when the lockfile hasn't changed. + * + * Package builds (Turborepo): + * Turbo hashes each package's source/config inputs, caches build + * outputs (dist/), and restores from cache when inputs match. + * Turbo v2 uses a global cache (~/.cache/turbo) that survives + * npm ci and is shared across worktrees. + * + * Usage: + * npm run smart-reinstall # Smart cached mode + * npm run smart-reinstall -- --force # Full clean reinstall, bust all caches + * npm run smart-reinstall -- --skip-client # Skip frontend (Vite) build + * npm run smart-reinstall -- --clean-cache # Wipe turbo build cache + * npm run smart-reinstall -- --verbose # Turbo verbose output + */ + +const crypto = require('crypto'); +const fs = require('fs'); +const path = require('path'); +const { execSync } = require('child_process'); + +// Adds console.green, console.purple, etc. +require('./helpers'); + +// ─── Configuration ─────────────────────────────────────────────────────────── + +const ROOT_DIR = path.resolve(__dirname, '..'); +const DEPS_HASH_MARKER = path.join(ROOT_DIR, 'node_modules', '.librechat-deps-hash'); + +const flags = { + force: process.argv.includes('--force'), + cleanCache: process.argv.includes('--clean-cache'), + skipClient: process.argv.includes('--skip-client'), + verbose: process.argv.includes('--verbose'), +}; + +// Workspace directories whose node_modules should be cleaned during reinstall +const NODE_MODULES_DIRS = [ + ROOT_DIR, + path.join(ROOT_DIR, 'packages', 'data-provider'), + path.join(ROOT_DIR, 'packages', 'data-schemas'), + path.join(ROOT_DIR, 'packages', 'client'), + path.join(ROOT_DIR, 'packages', 'api'), + path.join(ROOT_DIR, 'client'), + path.join(ROOT_DIR, 'api'), +]; + +// ─── Helpers ───────────────────────────────────────────────────────────────── + +function hashFile(filePath) { + return crypto.createHash('sha256').update(fs.readFileSync(filePath)).digest('hex').slice(0, 16); +} + +function exec(cmd, opts = {}) { + execSync(cmd, { cwd: ROOT_DIR, stdio: 'inherit', ...opts }); +} + +// ─── Dependency Installation ───────────────────────────────────────────────── + +function checkDeps() { + const lockfile = path.join(ROOT_DIR, 'package-lock.json'); + if (!fs.existsSync(lockfile)) { + return { needsInstall: true, hash: 'missing' }; + } + + const hash = hashFile(lockfile); + + if (!fs.existsSync(path.join(ROOT_DIR, 'node_modules'))) { + return { needsInstall: true, hash }; + } + if (!fs.existsSync(DEPS_HASH_MARKER)) { + return { needsInstall: true, hash }; + } + + const stored = fs.readFileSync(DEPS_HASH_MARKER, 'utf-8').trim(); + return { needsInstall: stored !== hash, hash }; +} + +function installDeps(hash) { + const { deleteNodeModules } = require('./helpers'); + NODE_MODULES_DIRS.forEach(deleteNodeModules); + + console.purple('Cleaning npm cache...'); + exec('npm cache clean --force'); + + console.purple('Installing dependencies (npm ci)...'); + exec('npm ci'); + + fs.writeFileSync(DEPS_HASH_MARKER, hash, 'utf-8'); +} + +// ─── Turbo Build ───────────────────────────────────────────────────────────── + +function runTurboBuild() { + const args = ['npx', 'turbo', 'run', 'build']; + + if (flags.skipClient) { + args.push('--filter=!@librechat/frontend'); + } + + if (flags.force) { + args.push('--force'); + } + + if (flags.verbose) { + args.push('--verbosity=2'); + } + + const cmd = args.join(' '); + console.gray(` ${cmd}\n`); + exec(cmd); +} + +/** + * Fallback for when turbo is not installed (e.g., first run before npm ci). + * Runs the same sequential build as the original `npm run frontend`. + */ +function runFallbackBuild() { + console.orange(' turbo not found — using sequential fallback build\n'); + + const scripts = [ + 'build:data-provider', + 'build:data-schemas', + 'build:api', + 'build:client-package', + ]; + + if (!flags.skipClient) { + scripts.push('build:client'); + } + + for (const script of scripts) { + console.purple(` Running ${script}...`); + exec(`npm run ${script}`); + } +} + +function hasTurbo() { + const binDir = path.join(ROOT_DIR, 'node_modules', '.bin'); + return ['turbo', 'turbo.cmd', 'turbo.ps1'].some((name) => fs.existsSync(path.join(binDir, name))); +} + +// ─── Main ──────────────────────────────────────────────────────────────────── + +(async () => { + const startTime = Date.now(); + + console.green('\n Smart Reinstall — LibreChat'); + console.green('─'.repeat(45)); + + // ── Handle --clean-cache ─────────────────────────────────────────────── + if (flags.cleanCache) { + console.purple('Clearing Turborepo cache...'); + if (hasTurbo()) { + try { + exec('npx turbo daemon stop', { stdio: 'pipe' }); + } catch { + // ignore — daemon may not be running + } + } + // Clear local .turbo cache dir + const localTurboCache = path.join(ROOT_DIR, '.turbo'); + if (fs.existsSync(localTurboCache)) { + fs.rmSync(localTurboCache, { recursive: true }); + } + // Clear global turbo cache + if (hasTurbo()) { + try { + exec('npx turbo clean', { stdio: 'pipe' }); + console.green('Turbo cache cleared.'); + } catch { + console.gray('Could not clear global turbo cache (may not exist yet).'); + } + } else { + console.gray('turbo not installed — nothing to clear.'); + } + + if (!flags.force) { + return; + } + } + + // ── Step 1: Dependencies ─────────────────────────────────────────────── + console.purple('\n[1/2] Checking dependencies...'); + + if (flags.force) { + console.orange(' Force mode — reinstalling all dependencies'); + const lockfile = path.join(ROOT_DIR, 'package-lock.json'); + const hash = fs.existsSync(lockfile) ? hashFile(lockfile) : 'none'; + installDeps(hash); + console.green(' Dependencies installed.'); + } else { + const { needsInstall, hash } = checkDeps(); + if (needsInstall) { + console.orange(' package-lock.json changed or node_modules missing'); + installDeps(hash); + console.green(' Dependencies installed.'); + } else { + console.green(' Dependencies up to date — skipping npm ci'); + } + } + + // ── Step 2: Build packages ───────────────────────────────────────────── + console.purple('\n[2/2] Building packages...'); + + if (hasTurbo()) { + runTurboBuild(); + } else { + runFallbackBuild(); + } + + // ── Done ─────────────────────────────────────────────────────────────── + const elapsed = ((Date.now() - startTime) / 1000).toFixed(1); + console.log(''); + console.green('─'.repeat(45)); + console.green(` Done (${elapsed}s)`); + console.green(' Start the app with: npm run backend'); + console.green('─'.repeat(45)); +})().catch((err) => { + console.red(`\nError: ${err.message}`); + if (flags.verbose) { + console.red(err.stack); + } + console.gray(' Tip: run with --force to clean all caches and reinstall from scratch'); + console.gray(' Tip: run with --verbose for detailed output'); + process.exit(1); +}); diff --git a/eslint.config.mjs b/eslint.config.mjs index 9990e0fc35..f53c4e83dd 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -291,6 +291,15 @@ export default [ files: ['./packages/api/**/*.ts'], rules: { 'lines-between-class-members': ['error', 'always', { exceptAfterSingleLine: true }], + '@typescript-eslint/no-unused-vars': [ + 'warn', + { + argsIgnorePattern: '^_', + varsIgnorePattern: '^_', + caughtErrorsIgnorePattern: '^_', + destructuredArrayIgnorePattern: '^_', + }, + ], }, }, { diff --git a/helm/librechat-rag-api/Chart.yaml b/helm/librechat-rag-api/Chart.yaml index 38d1470e49..cc382f0501 100755 --- a/helm/librechat-rag-api/Chart.yaml +++ b/helm/librechat-rag-api/Chart.yaml @@ -14,7 +14,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.5.2 +version: 0.5.3 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/helm/librechat-rag-api/templates/rag-deployment.yaml b/helm/librechat-rag-api/templates/rag-deployment.yaml index 5324ee3f7e..1978260723 100755 --- a/helm/librechat-rag-api/templates/rag-deployment.yaml +++ b/helm/librechat-rag-api/templates/rag-deployment.yaml @@ -26,6 +26,9 @@ spec: imagePullSecrets: {{- toYaml . | nindent 8 }} {{- end }} + {{- if kindIs "bool" .Values.enableServiceLinks }} + enableServiceLinks: {{ .Values.enableServiceLinks }} + {{- end }} securityContext: {{- toYaml .Values.podSecurityContext | nindent 8 }} containers: diff --git a/helm/librechat-rag-api/values.yaml b/helm/librechat-rag-api/values.yaml index cd722bc096..3e1b61208a 100755 --- a/helm/librechat-rag-api/values.yaml +++ b/helm/librechat-rag-api/values.yaml @@ -40,6 +40,11 @@ fullnameOverride: '' podAnnotations: {} podLabels: {} +# Enable or disable injection of service environment variables into pods. +# When running in namespaces with many services, the injected variables can cause +# "argument list too long" errors. Set to false to disable. +enableServiceLinks: true + podSecurityContext: {} # fsGroup: 2000 securityContext: {} diff --git a/helm/librechat/Chart.yaml b/helm/librechat/Chart.yaml index 1e24daa280..52203aa8f4 100755 --- a/helm/librechat/Chart.yaml +++ b/helm/librechat/Chart.yaml @@ -15,7 +15,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 1.9.7 +version: 1.9.8 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to @@ -36,7 +36,11 @@ dependencies: version: "0.11.0" condition: meilisearch.enabled repository: "https://meilisearch.github.io/meilisearch-kubernetes" + - name: redis + version: "24.1.3" + condition: redis.enabled + repository: "https://charts.bitnami.com/bitnami" - name: librechat-rag-api - version: "0.5.2" + version: "0.5.3" condition: librechat-rag-api.enabled repository: file://../librechat-rag-api diff --git a/helm/librechat/templates/configmap-env.yaml b/helm/librechat/templates/configmap-env.yaml index 0817ceeaff..ed5ac822da 100755 --- a/helm/librechat/templates/configmap-env.yaml +++ b/helm/librechat/templates/configmap-env.yaml @@ -12,6 +12,12 @@ data: {{- if and (not (dig "configEnv" "MONGO_URI" "" .Values.librechat)) .Values.mongodb.enabled }} MONGO_URI: mongodb://{{ include "mongodb.service.nameOverride" .Subcharts.mongodb }}.{{ .Release.Namespace | lower }}.svc.cluster.local:27017/LibreChat {{- end }} + {{- if and (not (dig "configEnv" "USE_REDIS" "" .Values.librechat)) .Values.redis.enabled }} + USE_REDIS: "true" + {{- end }} + {{- if and (not (dig "configEnv" "REDIS_URI" "" .Values.librechat)) .Values.redis.enabled }} + REDIS_URI: redis://{{ include "common.names.fullname" .Subcharts.redis }}-master.{{ .Release.Namespace | lower }}.svc.cluster.local:6379 + {{- end }} {{- if .Values.librechat.configEnv }} {{- toYaml .Values.librechat.configEnv | nindent 2 }} {{- end }} \ No newline at end of file diff --git a/helm/librechat/templates/deployment.yaml b/helm/librechat/templates/deployment.yaml index f8d0e58298..279749185b 100755 --- a/helm/librechat/templates/deployment.yaml +++ b/helm/librechat/templates/deployment.yaml @@ -49,6 +49,9 @@ spec: {{- toYaml . | nindent 8 }} {{- end }} serviceAccountName: {{ include "librechat.serviceAccountName" . }} + {{- if kindIs "bool" .Values.enableServiceLinks }} + enableServiceLinks: {{ .Values.enableServiceLinks }} + {{- end }} securityContext: {{- toYaml .Values.podSecurityContext | nindent 8 }} {{- if .Values.initContainers }} diff --git a/helm/librechat/values.yaml b/helm/librechat/values.yaml index c6461ade61..a4c877d64d 100755 --- a/helm/librechat/values.yaml +++ b/helm/librechat/values.yaml @@ -153,6 +153,11 @@ podLabels: {} deploymentAnnotations: {} deploymentLabels: {} +# Enable or disable injection of service environment variables into pods. +# When running in namespaces with many services, the injected variables can cause +# "argument list too long" errors. Set to false to disable. +enableServiceLinks: true + podSecurityContext: fsGroup: 2000 @@ -299,8 +304,15 @@ meilisearch: persistence: enabled: true storageClass: "" - image: + image: tag: "v1.7.3" auth: # Use an existing Kubernetes secret for the MEILI_MASTER_KEY existingMasterKeySecret: "librechat-credentials-env" + +# Redis Parameters +redis: + enabled: false + architecture: standalone + auth: + enabled: false diff --git a/package-lock.json b/package-lock.json index c89cf1a9dd..00ea747ddb 100644 --- a/package-lock.json +++ b/package-lock.json @@ -40,6 +40,7 @@ "lint-staged": "^15.4.3", "prettier": "^3.5.0", "prettier-plugin-tailwindcss": "^0.6.11", + "turbo": "^2.8.7", "typescript-eslint": "^8.24.0" } }, @@ -58,14 +59,14 @@ "@google/genai": "^1.19.0", "@keyv/redis": "^4.3.3", "@langchain/core": "^0.3.80", - "@librechat/agents": "^3.1.38", + "@librechat/agents": "^3.1.50", "@librechat/api": "*", "@librechat/data-schemas": "*", "@microsoft/microsoft-graph-client": "^3.0.7", "@modelcontextprotocol/sdk": "^1.26.0", "@node-saml/passport-saml": "^5.1.0", "@smithy/node-http-handler": "^4.4.5", - "axios": "^1.12.1", + "axios": "^1.13.5", "bcryptjs": "^2.4.3", "compression": "^1.8.1", "connect-redis": "^8.1.0", @@ -457,7 +458,7 @@ "lodash": "^4.17.23", "lucide-react": "^0.394.0", "match-sorter": "^8.1.0", - "mermaid": "^11.12.2", + "mermaid": "^11.12.3", "micromark-extension-llm-math": "^3.1.0", "qrcode.react": "^4.2.0", "rc-input-number": "^7.4.2", @@ -6575,54 +6576,42 @@ "license": "MIT" }, "node_modules/@chevrotain/cst-dts-gen": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.0.3.tgz", - "integrity": "sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.1.1.tgz", + "integrity": "sha512-fRHyv6/f542qQqiRGalrfJl/evD39mAvbJLCekPazhiextEatq1Jx1K/i9gSd5NNO0ds03ek0Cbo/4uVKmOBcw==", "license": "Apache-2.0", "dependencies": { - "@chevrotain/gast": "11.0.3", - "@chevrotain/types": "11.0.3", - "lodash-es": "4.17.21" + "@chevrotain/gast": "11.1.1", + "@chevrotain/types": "11.1.1", + "lodash-es": "4.17.23" } }, - "node_modules/@chevrotain/cst-dts-gen/node_modules/lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", - "license": "MIT" - }, "node_modules/@chevrotain/gast": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.0.3.tgz", - "integrity": "sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.1.1.tgz", + "integrity": "sha512-Ko/5vPEYy1vn5CbCjjvnSO4U7GgxyGm+dfUZZJIWTlQFkXkyym0jFYrWEU10hyCjrA7rQtiHtBr0EaZqvHFZvg==", "license": "Apache-2.0", "dependencies": { - "@chevrotain/types": "11.0.3", - "lodash-es": "4.17.21" + "@chevrotain/types": "11.1.1", + "lodash-es": "4.17.23" } }, - "node_modules/@chevrotain/gast/node_modules/lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", - "license": "MIT" - }, "node_modules/@chevrotain/regexp-to-ast": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.0.3.tgz", - "integrity": "sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.1.1.tgz", + "integrity": "sha512-ctRw1OKSXkOrR8VTvOxrQ5USEc4sNrfwXHa1NuTcR7wre4YbjPcKw+82C2uylg/TEwFRgwLmbhlln4qkmDyteg==", "license": "Apache-2.0" }, "node_modules/@chevrotain/types": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.0.3.tgz", - "integrity": "sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.1.1.tgz", + "integrity": "sha512-wb2ToxG8LkgPYnKe9FH8oGn3TMCBdnwiuNC5l5y+CtlaVRbCytU0kbVsk6CGrqTL4ZN4ksJa0TXOYbxpbthtqw==", "license": "Apache-2.0" }, "node_modules/@chevrotain/utils": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.0.3.tgz", - "integrity": "sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.1.1.tgz", + "integrity": "sha512-71eTYMzYXYSFPrbg/ZwftSaSDld7UYlS8OQa3lNnn9jzNtpFbaReRRyghzqS7rI3CDaorqpPJJcXGHK+FE1TVQ==", "license": "Apache-2.0" }, "node_modules/@codemirror/autocomplete": { @@ -11207,9 +11196,9 @@ } }, "node_modules/@librechat/agents": { - "version": "3.1.38", - "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-3.1.38.tgz", - "integrity": "sha512-s8WkS2bXkTWsPGKsQKlUFWUVijMAIQvpv4LZLbNj/rZui0R+82vY/CVnkK3jeUueNMo6GS7GG9Fj01FZmhXslw==", + "version": "3.1.50", + "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-3.1.50.tgz", + "integrity": "sha512-+gdfUJ7X3PJ20/c+8lETY68D6QpxFlCIlGUQBF4A8VKv+Po9J/TO5rWE+OmzmPByYpye7GrcxVCBLfRTvZKraw==", "license": "MIT", "dependencies": { "@anthropic-ai/sdk": "^0.73.0", @@ -11229,7 +11218,7 @@ "@langfuse/otel": "^4.3.0", "@langfuse/tracing": "^4.3.0", "@opentelemetry/sdk-node": "^0.207.0", - "axios": "^1.12.1", + "axios": "^1.13.5", "cheerio": "^1.0.0", "dotenv": "^16.4.7", "https-proxy-agent": "^7.0.6", @@ -11288,12 +11277,12 @@ } }, "node_modules/@mermaid-js/parser": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.6.3.tgz", - "integrity": "sha512-lnjOhe7zyHjc+If7yT4zoedx2vo4sHaTmtkl1+or8BRTnCtDmcTpAjpzDSfCZrshM5bCoz0GyidzadJAH1xobA==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-1.0.0.tgz", + "integrity": "sha512-vvK0Hi/VWndxoh03Mmz6wa1KDriSPjS2XMZL/1l19HFwygiObEEoEwSDxOqyLzzAI6J2PU3261JjTMTO7x+BPw==", "license": "MIT", "dependencies": { - "langium": "3.3.1" + "langium": "^4.0.0" } }, "node_modules/@microsoft/microsoft-graph-client": { @@ -21308,13 +21297,13 @@ } }, "node_modules/axios": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.12.1.tgz", - "integrity": "sha512-Kn4kbSXpkFHCGE6rBFNwIv0GQs4AvDT80jlveJDKFxjbTYMUeB4QtsdPCv6H8Cm19Je7IU6VFtRl2zWZI0rudQ==", + "version": "1.13.5", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz", + "integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==", "license": "MIT", "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.4", + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, @@ -22253,17 +22242,17 @@ } }, "node_modules/chevrotain": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz", - "integrity": "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.1.1.tgz", + "integrity": "sha512-f0yv5CPKaFxfsPTBzX7vGuim4oIC1/gcS7LUGdBSwl2dU6+FON6LVUksdOo1qJjoUvXNn45urgh8C+0a24pACQ==", "license": "Apache-2.0", "dependencies": { - "@chevrotain/cst-dts-gen": "11.0.3", - "@chevrotain/gast": "11.0.3", - "@chevrotain/regexp-to-ast": "11.0.3", - "@chevrotain/types": "11.0.3", - "@chevrotain/utils": "11.0.3", - "lodash-es": "4.17.21" + "@chevrotain/cst-dts-gen": "11.1.1", + "@chevrotain/gast": "11.1.1", + "@chevrotain/regexp-to-ast": "11.1.1", + "@chevrotain/types": "11.1.1", + "@chevrotain/utils": "11.1.1", + "lodash-es": "4.17.23" } }, "node_modules/chevrotain-allstar": { @@ -22278,12 +22267,6 @@ "chevrotain": "^11.0.0" } }, - "node_modules/chevrotain/node_modules/lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", - "license": "MIT" - }, "node_modules/chokidar": { "version": "3.5.3", "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", @@ -25951,9 +25934,9 @@ "license": "BSD-3-Clause" }, "node_modules/fast-xml-parser": { - "version": "5.3.4", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.4.tgz", - "integrity": "sha512-EFd6afGmXlCx8H8WTZHhAoDaWaGyuIBoZJ2mknrNxug+aZKjkp0a0dlars9Izl+jF+7Gu1/5f/2h68cQpe0IiA==", + "version": "5.3.6", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-5.3.6.tgz", + "integrity": "sha512-QNI3sAvSvaOiaMl8FYU4trnEzCwiRr8XMWgAHzlrWpTSj+QaCSvOf1h82OEP1s4hiAXhnbXSyFWCf4ldZzZRVA==", "funding": [ { "type": "github", @@ -26297,9 +26280,9 @@ "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" }, "node_modules/follow-redirects": { - "version": "1.15.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", - "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", "funding": [ { "type": "individual", @@ -26352,9 +26335,9 @@ } }, "node_modules/form-data": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", - "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", "license": "MIT", "dependencies": { "asynckit": "^0.4.0", @@ -30140,13 +30123,14 @@ } }, "node_modules/katex": { - "version": "0.16.21", - "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.21.tgz", - "integrity": "sha512-XvqR7FgOHtWupfMiigNzmh+MgUVmDGU2kXZm899ZkPfcuoPuFxyHmXsgATDpFZDAXCI8tvinaVcDo8PIIJSo4A==", + "version": "0.16.28", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.28.tgz", + "integrity": "sha512-YHzO7721WbmAL6Ov1uzN/l5mY5WWWhJBSW+jq4tkfZfsxmo1hu6frS0EOswvjBUnWE6NtjEs48SFn5CQESRLZg==", "funding": [ "https://opencollective.com/katex", "https://github.com/sponsors/katex" ], + "license": "MIT", "dependencies": { "commander": "^8.3.0" }, @@ -30206,32 +30190,32 @@ "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==" }, "node_modules/langium": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/langium/-/langium-3.3.1.tgz", - "integrity": "sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/langium/-/langium-4.2.1.tgz", + "integrity": "sha512-zu9QWmjpzJcomzdJQAHgDVhLGq5bLosVak1KVa40NzQHXfqr4eAHupvnPOVXEoLkg6Ocefvf/93d//SB7du4YQ==", "license": "MIT", "dependencies": { - "chevrotain": "~11.0.3", - "chevrotain-allstar": "~0.3.0", + "chevrotain": "~11.1.1", + "chevrotain-allstar": "~0.3.1", "vscode-languageserver": "~9.0.1", "vscode-languageserver-textdocument": "~1.0.11", - "vscode-uri": "~3.0.8" + "vscode-uri": "~3.1.0" }, "engines": { - "node": ">=16.0.0" + "node": ">=20.10.0", + "npm": ">=10.2.3" } }, "node_modules/langsmith": { - "version": "0.3.67", - "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.3.67.tgz", - "integrity": "sha512-l4y3RmJ9yWF5a29fLg3eWZQxn6Q6dxTOgLGgQHzPGZHF3NUynn+A+airYIe/Yt4rwjGbuVrABAPsXBkVu/Hi7g==", + "version": "0.4.12", + "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.12.tgz", + "integrity": "sha512-YWt0jcGvKqjUgIvd78rd4QcdMss0lUkeUaqp0UpVRq7H2yNDx8H5jOUO/laWUmaPtWGgcip0qturykXe1g9Gqw==", "license": "MIT", "dependencies": { "@types/uuid": "^10.0.0", "chalk": "^4.1.2", "console-table-printer": "^2.12.1", "p-queue": "^6.6.2", - "p-retry": "4", "semver": "^7.6.3", "uuid": "^10.0.0" }, @@ -30721,9 +30705,9 @@ "license": "MIT" }, "node_modules/lodash-es": { - "version": "4.17.22", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.22.tgz", - "integrity": "sha512-XEawp1t0gxSi9x01glktRZ5HDy0HXqrM0x5pXQM98EaI0NxO6jVM7omDOxsuEo5UIASAnm2bRp1Jt/e0a2XU8Q==", + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz", + "integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==", "license": "MIT" }, "node_modules/lodash.camelcase": { @@ -31906,14 +31890,14 @@ } }, "node_modules/mermaid": { - "version": "11.12.2", - "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.2.tgz", - "integrity": "sha512-n34QPDPEKmaeCG4WDMGy0OT6PSyxKCfy2pJgShP+Qow2KLrvWjclwbc3yXfSIf4BanqWEhQEpngWwNp/XhZt6w==", + "version": "11.12.3", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.3.tgz", + "integrity": "sha512-wN5ZSgJQIC+CHJut9xaKWsknLxaFBwCPwPkGTSUYrTiHORWvpT8RxGk849HPnpUAQ+/9BPRqYb80jTpearrHzQ==", "license": "MIT", "dependencies": { "@braintree/sanitize-url": "^7.1.1", "@iconify/utils": "^3.0.1", - "@mermaid-js/parser": "^0.6.3", + "@mermaid-js/parser": "^1.0.0", "@types/d3": "^7.4.3", "cytoscape": "^3.29.3", "cytoscape-cose-bilkent": "^4.1.0", @@ -31925,7 +31909,7 @@ "dompurify": "^3.2.5", "katex": "^0.16.22", "khroma": "^2.1.0", - "lodash-es": "^4.17.21", + "lodash-es": "^4.17.23", "marked": "^16.2.1", "roughjs": "^4.6.6", "stylis": "^4.3.6", @@ -39915,6 +39899,108 @@ "dev": true, "license": "MIT" }, + "node_modules/turbo": { + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/turbo/-/turbo-2.8.7.tgz", + "integrity": "sha512-RBLh5caMAu1kFdTK1jgH2gH/z+jFsvX5rGbhgJ9nlIAWXSvxlzwId05uDlBA1+pBd3wO/UaKYzaQZQBXDd7kcA==", + "dev": true, + "license": "MIT", + "bin": { + "turbo": "bin/turbo" + }, + "optionalDependencies": { + "turbo-darwin-64": "2.8.7", + "turbo-darwin-arm64": "2.8.7", + "turbo-linux-64": "2.8.7", + "turbo-linux-arm64": "2.8.7", + "turbo-windows-64": "2.8.7", + "turbo-windows-arm64": "2.8.7" + } + }, + "node_modules/turbo-darwin-64": { + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/turbo-darwin-64/-/turbo-darwin-64-2.8.7.tgz", + "integrity": "sha512-Xr4TO/oDDwoozbDtBvunb66g//WK8uHRygl72vUthuwzmiw48pil4IuoG/QbMHd9RE8aBnVmzC0WZEWk/WWt3A==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/turbo-darwin-arm64": { + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/turbo-darwin-arm64/-/turbo-darwin-arm64-2.8.7.tgz", + "integrity": "sha512-p8Xbmb9kZEY/NoshQUcFmQdO80s2PCGoLYj5DbpxjZr3diknipXxzOK7pcmT7l2gNHaMCpFVWLkiFY9nO3EU5w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/turbo-linux-64": { + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/turbo-linux-64/-/turbo-linux-64-2.8.7.tgz", + "integrity": "sha512-nwfEPAH3m5y/nJeYly3j1YJNYU2EG5+2ysZUxvBNM+VBV2LjQaLxB9CsEIpIOKuWKCjnFHKIADTSDPZ3D12J5Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/turbo-linux-arm64": { + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/turbo-linux-arm64/-/turbo-linux-arm64-2.8.7.tgz", + "integrity": "sha512-mgA/M6xiJzyxtXV70TtWGDPh+I6acOKmeQGtOzbFQZYEf794pu5jax26bCk5skAp1gqZu3vacPr6jhYHoHU9IQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/turbo-windows-64": { + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/turbo-windows-64/-/turbo-windows-64-2.8.7.tgz", + "integrity": "sha512-sHTYMaXuCcyHnGUQgfUUt7S8407TWoP14zc/4N2tsM0wZNK6V9h4H2t5jQPtqKEb6Fg8313kygdDgEwuM4vsHg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/turbo-windows-arm64": { + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/turbo-windows-arm64/-/turbo-windows-arm64-2.8.7.tgz", + "integrity": "sha512-WyGiOI2Zp3AhuzVagzQN+T+iq0fWx0oGxDfAWT3ZiLEd4U0cDUkwUZDKVGb3rKqPjDL6lWnuxKKu73ge5xtovQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/type": { "version": "2.7.3", "resolved": "https://registry.npmjs.org/type/-/type-2.7.3.tgz", @@ -41018,9 +41104,9 @@ "license": "MIT" }, "node_modules/vscode-uri": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz", - "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.1.0.tgz", + "integrity": "sha512-/BpdSx+yCQGnCvecbyXdxHDkuk55/G3xwnC0GqY4gmQ3j+A+g8kzzgB4Nk/SINjqn6+waqw3EgbVF2QKExkRxQ==", "license": "MIT" }, "node_modules/w3c-keyname": { @@ -42102,11 +42188,11 @@ "@google/genai": "^1.19.0", "@keyv/redis": "^4.3.3", "@langchain/core": "^0.3.80", - "@librechat/agents": "^3.1.38", + "@librechat/agents": "^3.1.50", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.26.0", "@smithy/node-http-handler": "^4.4.5", - "axios": "^1.12.1", + "axios": "^1.13.5", "connect-redis": "^8.1.0", "eventsource": "^3.0.2", "express": "^5.1.0", @@ -44464,7 +44550,7 @@ "version": "0.8.231", "license": "ISC", "dependencies": { - "axios": "^1.12.1", + "axios": "^1.13.5", "dayjs": "^1.11.13", "js-yaml": "^4.1.1", "zod": "^3.22.4" diff --git a/package.json b/package.json index 80dea27369..ecfb1d601f 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,7 @@ "name": "LibreChat", "version": "v0.8.2", "description": "", + "packageManager": "npm@11.10.0", "workspaces": [ "api", "client", @@ -15,6 +16,7 @@ "user-stats": "node config/user-stats.js", "rebuild:package-lock": "node config/packages", "reinstall": "node config/update.js -l -g", + "smart-reinstall": "node config/smart-reinstall.js", "b:reinstall": "bun config/update.js -b -l -g", "reinstall:docker": "node config/update.js -d -g", "update:local": "node config/update.js -l", @@ -128,6 +130,7 @@ "lint-staged": "^15.4.3", "prettier": "^3.5.0", "prettier-plugin-tailwindcss": "^0.6.11", + "turbo": "^2.8.7", "typescript-eslint": "^8.24.0" }, "overrides": { @@ -135,14 +138,14 @@ "@librechat/agents": { "@langchain/anthropic": { "@anthropic-ai/sdk": "0.73.0", - "fast-xml-parser": "5.3.4" + "fast-xml-parser": "5.3.6" }, "@anthropic-ai/sdk": "0.73.0", - "fast-xml-parser": "5.3.4" + "fast-xml-parser": "5.3.6" }, "axios": "1.12.1", "elliptic": "^6.6.1", - "fast-xml-parser": "5.3.4", + "fast-xml-parser": "5.3.6", "form-data": "^4.0.4", "tslib": "^2.8.1", "mdast-util-gfm-autolink-literal": "2.0.0", @@ -160,7 +163,8 @@ "micromark-extension-math": { "katex": "^0.16.21" } - } + }, + "langsmith": "0.4.12" }, "nodemonConfig": { "ignore": [ diff --git a/packages/api/jest.config.mjs b/packages/api/jest.config.mjs index 10fa4554e4..5506d6e483 100644 --- a/packages/api/jest.config.mjs +++ b/packages/api/jest.config.mjs @@ -10,6 +10,17 @@ export default { ], coverageReporters: ['text', 'cobertura'], testResultsProcessor: 'jest-junit', + transform: { + '\\.[jt]sx?$': [ + 'babel-jest', + { + presets: [ + ['@babel/preset-env', { targets: { node: 'current' } }], + '@babel/preset-typescript', + ], + }, + ], + }, moduleNameMapper: { '^@src/(.*)$': '/src/$1', '~/(.*)': '/src/$1', diff --git a/packages/api/package.json b/packages/api/package.json index 0dd1bfc005..107a660315 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -87,11 +87,11 @@ "@google/genai": "^1.19.0", "@keyv/redis": "^4.3.3", "@langchain/core": "^0.3.80", - "@librechat/agents": "^3.1.38", + "@librechat/agents": "^3.1.50", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.26.0", "@smithy/node-http-handler": "^4.4.5", - "axios": "^1.12.1", + "axios": "^1.13.5", "connect-redis": "^8.1.0", "eventsource": "^3.0.2", "express": "^5.1.0", diff --git a/packages/api/src/agents/__tests__/initialize.test.ts b/packages/api/src/agents/__tests__/initialize.test.ts new file mode 100644 index 0000000000..01310a09c4 --- /dev/null +++ b/packages/api/src/agents/__tests__/initialize.test.ts @@ -0,0 +1,284 @@ +import { Providers } from '@librechat/agents'; +import { EModelEndpoint } from 'librechat-data-provider'; +import type { Agent } from 'librechat-data-provider'; +import type { ServerRequest, InitializeResultBase } from '~/types'; +import type { InitializeAgentDbMethods } from '../initialize'; + +// Mock logger +jest.mock('winston', () => ({ + createLogger: jest.fn(() => ({ + debug: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + })), + format: { + combine: jest.fn(), + colorize: jest.fn(), + simple: jest.fn(), + }, + transports: { + Console: jest.fn(), + }, +})); + +const mockExtractLibreChatParams = jest.fn(); +const mockGetModelMaxTokens = jest.fn(); +const mockOptionalChainWithEmptyCheck = jest.fn(); +const mockGetThreadData = jest.fn(); + +jest.mock('~/utils', () => ({ + extractLibreChatParams: (...args: unknown[]) => mockExtractLibreChatParams(...args), + getModelMaxTokens: (...args: unknown[]) => mockGetModelMaxTokens(...args), + optionalChainWithEmptyCheck: (...args: unknown[]) => mockOptionalChainWithEmptyCheck(...args), + getThreadData: (...args: unknown[]) => mockGetThreadData(...args), +})); + +const mockGetProviderConfig = jest.fn(); +jest.mock('~/endpoints', () => ({ + getProviderConfig: (...args: unknown[]) => mockGetProviderConfig(...args), +})); + +jest.mock('~/files', () => ({ + filterFilesByEndpointConfig: jest.fn(() => []), +})); + +jest.mock('~/prompts', () => ({ + generateArtifactsPrompt: jest.fn(() => null), +})); + +jest.mock('../resources', () => ({ + primeResources: jest.fn().mockResolvedValue({ + attachments: [], + tool_resources: undefined, + }), +})); + +import { initializeAgent } from '../initialize'; + +/** + * Creates minimal mock objects for initializeAgent tests. + */ +function createMocks(overrides?: { + maxContextTokens?: number; + modelDefault?: number; + maxOutputTokens?: number; +}) { + const { maxContextTokens, modelDefault = 200000, maxOutputTokens = 4096 } = overrides ?? {}; + + const agent = { + id: 'agent-1', + model: 'test-model', + provider: Providers.OPENAI, + tools: [], + model_parameters: { model: 'test-model' }, + } as unknown as Agent; + + const req = { + user: { id: 'user-1' }, + config: {}, + } as unknown as ServerRequest; + + const res = {} as unknown as import('express').Response; + + const mockGetOptions = jest.fn().mockResolvedValue({ + llmConfig: { + model: 'test-model', + maxTokens: maxOutputTokens, + }, + endpointTokenConfig: undefined, + } satisfies InitializeResultBase); + + mockGetProviderConfig.mockReturnValue({ + getOptions: mockGetOptions, + overrideProvider: Providers.OPENAI, + }); + + // extractLibreChatParams returns maxContextTokens when provided in model_parameters + mockExtractLibreChatParams.mockReturnValue({ + resendFiles: false, + maxContextTokens, + modelOptions: { model: 'test-model' }, + }); + + // getModelMaxTokens returns the model's default context window + mockGetModelMaxTokens.mockReturnValue(modelDefault); + + // Implement real optionalChainWithEmptyCheck behavior + mockOptionalChainWithEmptyCheck.mockImplementation( + (...values: (string | number | undefined)[]) => { + for (const v of values) { + if (v !== undefined && v !== null && v !== '') { + return v; + } + } + return values[values.length - 1]; + }, + ); + + const loadTools = jest.fn().mockResolvedValue({ + tools: [], + toolContextMap: {}, + userMCPAuthMap: undefined, + toolRegistry: undefined, + toolDefinitions: [], + hasDeferredTools: false, + }); + + const db: InitializeAgentDbMethods = { + getFiles: jest.fn().mockResolvedValue([]), + getConvoFiles: jest.fn().mockResolvedValue([]), + updateFilesUsage: jest.fn().mockResolvedValue([]), + getUserKey: jest.fn().mockResolvedValue('user-1'), + getUserKeyValues: jest.fn().mockResolvedValue([]), + getToolFilesByIds: jest.fn().mockResolvedValue([]), + }; + + return { agent, req, res, loadTools, db }; +} + +describe('initializeAgent — maxContextTokens', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('uses user-configured maxContextTokens when provided via model_parameters', async () => { + const userValue = 50000; + const { agent, req, res, loadTools, db } = createMocks({ + maxContextTokens: userValue, + modelDefault: 200000, + maxOutputTokens: 4096, + }); + + const result = await initializeAgent( + { + req, + res, + agent, + loadTools, + endpointOption: { + endpoint: EModelEndpoint.agents, + model_parameters: { maxContextTokens: userValue }, + }, + allowedProviders: new Set([Providers.OPENAI]), + isInitialAgent: true, + }, + db, + ); + + expect(result.maxContextTokens).toBe(userValue); + }); + + it('falls back to formula when maxContextTokens is NOT provided', async () => { + const modelDefault = 200000; + const maxOutputTokens = 4096; + const { agent, req, res, loadTools, db } = createMocks({ + maxContextTokens: undefined, + modelDefault, + maxOutputTokens, + }); + + const result = await initializeAgent( + { + req, + res, + agent, + loadTools, + endpointOption: { endpoint: EModelEndpoint.agents }, + allowedProviders: new Set([Providers.OPENAI]), + isInitialAgent: true, + }, + db, + ); + + const expected = Math.round((modelDefault - maxOutputTokens) * 0.9); + expect(result.maxContextTokens).toBe(expected); + }); + + it('falls back to formula when maxContextTokens is 0', async () => { + const maxOutputTokens = 4096; + const { agent, req, res, loadTools, db } = createMocks({ + maxContextTokens: 0, + modelDefault: 200000, + maxOutputTokens, + }); + + const result = await initializeAgent( + { + req, + res, + agent, + loadTools, + endpointOption: { + endpoint: EModelEndpoint.agents, + model_parameters: { maxContextTokens: 0 }, + }, + allowedProviders: new Set([Providers.OPENAI]), + isInitialAgent: true, + }, + db, + ); + + // 0 is not used as-is; the formula kicks in. + // optionalChainWithEmptyCheck(0, 200000, 18000) returns 0 (not null/undefined), + // then Number(0) || 18000 = 18000 (the fallback default). + expect(result.maxContextTokens).not.toBe(0); + const expected = Math.round((18000 - maxOutputTokens) * 0.9); + expect(result.maxContextTokens).toBe(expected); + }); + + it('falls back to formula when maxContextTokens is negative', async () => { + const maxOutputTokens = 4096; + const { agent, req, res, loadTools, db } = createMocks({ + maxContextTokens: -1, + modelDefault: 200000, + maxOutputTokens, + }); + + const result = await initializeAgent( + { + req, + res, + agent, + loadTools, + endpointOption: { + endpoint: EModelEndpoint.agents, + model_parameters: { maxContextTokens: -1 }, + }, + allowedProviders: new Set([Providers.OPENAI]), + isInitialAgent: true, + }, + db, + ); + + // -1 is not used as-is; the formula kicks in + expect(result.maxContextTokens).not.toBe(-1); + }); + + it('preserves small user-configured value (e.g. 1000 from modelSpec)', async () => { + const userValue = 1000; + const { agent, req, res, loadTools, db } = createMocks({ + maxContextTokens: userValue, + modelDefault: 128000, + maxOutputTokens: 4096, + }); + + const result = await initializeAgent( + { + req, + res, + agent, + loadTools, + endpointOption: { + endpoint: EModelEndpoint.agents, + model_parameters: { maxContextTokens: userValue }, + }, + allowedProviders: new Set([Providers.OPENAI]), + isInitialAgent: true, + }, + db, + ); + + // Should NOT be overridden to Math.round((128000 - 4096) * 0.9) = 111,514 + expect(result.maxContextTokens).toBe(userValue); + }); +}); diff --git a/packages/api/src/agents/client.ts b/packages/api/src/agents/client.ts new file mode 100644 index 0000000000..fd5d50f211 --- /dev/null +++ b/packages/api/src/agents/client.ts @@ -0,0 +1,162 @@ +import { logger } from '@librechat/data-schemas'; +import { isAgentsEndpoint } from 'librechat-data-provider'; +import { labelContentByAgent, getTokenCountForMessage } from '@librechat/agents'; +import type { MessageContentComplex } from '@librechat/agents'; +import type { Agent, TMessage } from 'librechat-data-provider'; +import type { BaseMessage } from '@langchain/core/messages'; +import type { ServerRequest } from '~/types'; +import Tokenizer from '~/utils/tokenizer'; +import { logAxiosError } from '~/utils'; + +export const omitTitleOptions = new Set([ + 'stream', + 'thinking', + 'streaming', + 'clientOptions', + 'thinkingConfig', + 'thinkingBudget', + 'includeThoughts', + 'maxOutputTokens', + 'additionalModelRequestFields', +]); + +export function payloadParser({ req, endpoint }: { req: ServerRequest; endpoint: string }) { + if (isAgentsEndpoint(endpoint)) { + return; + } + return req.body?.endpointOption?.model_parameters; +} + +export function createTokenCounter(encoding: Parameters[1]) { + return function (message: BaseMessage) { + const countTokens = (text: string) => Tokenizer.getTokenCount(text, encoding); + return getTokenCountForMessage(message, countTokens); + }; +} + +export function logToolError(_graph: unknown, error: unknown, toolId: string) { + logAxiosError({ + error, + message: `[api/server/controllers/agents/client.js #chatCompletion] Tool Error "${toolId}"`, + }); +} + +const AGENT_SUFFIX_PATTERN = /____(\d+)$/; + +/** Finds the primary agent ID within a set of agent IDs (no suffix or lowest suffix number) */ +export function findPrimaryAgentId(agentIds: Set): string | null { + let primaryAgentId: string | null = null; + let lowestSuffixIndex = Infinity; + + for (const agentId of agentIds) { + const suffixMatch = agentId.match(AGENT_SUFFIX_PATTERN); + if (!suffixMatch) { + return agentId; + } + const suffixIndex = parseInt(suffixMatch[1], 10); + if (suffixIndex < lowestSuffixIndex) { + lowestSuffixIndex = suffixIndex; + primaryAgentId = agentId; + } + } + + return primaryAgentId; +} + +type ContentPart = TMessage['content'] extends (infer U)[] | undefined ? U : never; + +/** + * Creates a mapMethod for getMessagesForConversation that processes agent content. + * - Strips agentId/groupId metadata from all content + * - For parallel agents (addedConvo with groupId): filters each group to its primary agent + * - For handoffs (agentId without groupId): keeps all content from all agents + * - For multi-agent: applies agent labels to content + * + * The key distinction: + * - Parallel execution (addedConvo): Parts have both agentId AND groupId + * - Handoffs: Parts only have agentId, no groupId + */ +export function createMultiAgentMapper(primaryAgent: Agent, agentConfigs?: Map) { + const hasMultipleAgents = (primaryAgent.edges?.length ?? 0) > 0 || (agentConfigs?.size ?? 0) > 0; + + let agentNames: Record | null = null; + if (hasMultipleAgents) { + agentNames = { [primaryAgent.id]: primaryAgent.name || 'Assistant' }; + if (agentConfigs) { + for (const [agentId, agentConfig] of agentConfigs.entries()) { + agentNames[agentId] = agentConfig.name || agentConfig.id; + } + } + } + + return (message: TMessage): TMessage => { + if (message.isCreatedByUser || !Array.isArray(message.content)) { + return message; + } + + const hasAgentMetadata = message.content.some( + (part) => + (part as ContentPart & { agentId?: string; groupId?: number })?.agentId || + (part as ContentPart & { groupId?: number })?.groupId != null, + ); + if (!hasAgentMetadata) { + return message; + } + + try { + const groupAgentMap = new Map>(); + + for (const part of message.content) { + const p = part as ContentPart & { agentId?: string; groupId?: number }; + const groupId = p?.groupId; + const agentId = p?.agentId; + if (groupId != null && agentId) { + if (!groupAgentMap.has(groupId)) { + groupAgentMap.set(groupId, new Set()); + } + groupAgentMap.get(groupId)!.add(agentId); + } + } + + const groupPrimaryMap = new Map(); + for (const [groupId, agentIds] of groupAgentMap) { + const primary = findPrimaryAgentId(agentIds); + if (primary) { + groupPrimaryMap.set(groupId, primary); + } + } + + const filteredContent: ContentPart[] = []; + const agentIdMap: Record = {}; + + for (const part of message.content) { + const p = part as ContentPart & { agentId?: string; groupId?: number }; + const agentId = p?.agentId; + const groupId = p?.groupId; + + const isParallelPart = groupId != null; + const groupPrimary = isParallelPart ? groupPrimaryMap.get(groupId) : null; + const shouldInclude = !isParallelPart || !agentId || agentId === groupPrimary; + + if (shouldInclude) { + const newIndex = filteredContent.length; + const { agentId: _a, groupId: _g, ...cleanPart } = p; + filteredContent.push(cleanPart as ContentPart); + if (agentId && hasMultipleAgents) { + agentIdMap[newIndex] = agentId; + } + } + } + + const finalContent = + Object.keys(agentIdMap).length > 0 && agentNames + ? labelContentByAgent(filteredContent as MessageContentComplex[], agentIdMap, agentNames) + : filteredContent; + + return { ...message, content: finalContent as TMessage['content'] }; + } catch (error) { + logger.error('[AgentClient] Error processing multi-agent message:', error); + return message; + } + }; +} diff --git a/packages/api/src/agents/index.ts b/packages/api/src/agents/index.ts index a5a0c340fe..9d13b3dd8e 100644 --- a/packages/api/src/agents/index.ts +++ b/packages/api/src/agents/index.ts @@ -1,5 +1,6 @@ export * from './avatars'; export * from './chain'; +export * from './client'; export * from './context'; export * from './edges'; export * from './handlers'; diff --git a/packages/api/src/agents/initialize.ts b/packages/api/src/agents/initialize.ts index 008aa4c0ba..af604beb81 100644 --- a/packages/api/src/agents/initialize.ts +++ b/packages/api/src/agents/initialize.ts @@ -413,7 +413,10 @@ export async function initializeAgent( toolContextMap: toolContextMap ?? {}, useLegacyContent: !!options.useLegacyContent, tools: (tools ?? []) as GenericTool[] & string[], - maxContextTokens: Math.round((agentMaxContextNum - maxOutputTokensNum) * 0.9), + maxContextTokens: + maxContextTokens != null && maxContextTokens > 0 + ? maxContextTokens + : Math.round((agentMaxContextNum - maxOutputTokensNum) * 0.9), }; return initializedAgent; diff --git a/packages/api/src/agents/responses/__tests__/responses-api.live.test.sh b/packages/api/src/agents/responses/__tests__/responses-api.live.test.sh new file mode 100755 index 0000000000..657e64c8e5 --- /dev/null +++ b/packages/api/src/agents/responses/__tests__/responses-api.live.test.sh @@ -0,0 +1,193 @@ +#!/usr/bin/env bash +# +# Live integration tests for the Responses API endpoint. +# Sends curl requests to a running LibreChat server to verify +# multi-turn conversations with output_text / refusal blocks work. +# +# Usage: +# ./responses-api.live.test.sh +# +# Example: +# ./responses-api.live.test.sh http://localhost:3080 sk-abc123 agent_xyz + +set -euo pipefail + +BASE_URL="${1:?Usage: $0 }" +API_KEY="${2:?Usage: $0 }" +AGENT_ID="${3:?Usage: $0 }" + +ENDPOINT="${BASE_URL}/v1/responses" +PASS=0 +FAIL=0 + +# ── Helpers ─────────────────────────────────────────────────────────── + +post_json() { + local label="$1" + local body="$2" + local stream="${3:-false}" + + echo "──────────────────────────────────────────────" + echo "TEST: ${label}" + echo "──────────────────────────────────────────────" + + local http_code + local response + + if [ "$stream" = "true" ]; then + # For streaming, just check we get a 200 and some SSE data + response=$(curl -s -w "\n%{http_code}" \ + -X POST "${ENDPOINT}" \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer ${API_KEY}" \ + -d "${body}" \ + --max-time 60) + else + response=$(curl -s -w "\n%{http_code}" \ + -X POST "${ENDPOINT}" \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer ${API_KEY}" \ + -d "${body}" \ + --max-time 60) + fi + + http_code=$(echo "$response" | tail -1) + local body_out + body_out=$(echo "$response" | sed '$d') + + if [ "$http_code" = "200" ]; then + echo " ✓ HTTP 200" + PASS=$((PASS + 1)) + else + echo " ✗ HTTP ${http_code}" + echo " Response: ${body_out}" + FAIL=$((FAIL + 1)) + fi + + # Print truncated response for inspection + echo " Response (first 300 chars): ${body_out:0:300}" + echo "" + + # Return the body for chaining + echo "$body_out" +} + +extract_response_id() { + # Extract "id" field from JSON response + echo "$1" | grep -o '"id":"[^"]*"' | head -1 | cut -d'"' -f4 +} + +# ── Test 1: Basic single-turn request ───────────────────────────────── + +RESP1=$(post_json "Basic single-turn request" "$(cat < /dev/null + +# ── Test 3: Multi-turn with refusal blocks ──────────────────────────── + +post_json "Multi-turn with refusal blocks" "$(cat < /dev/null + +# ── Test 4: Streaming request ───────────────────────────────────────── + +post_json "Streaming single-turn request" "$(cat < /dev/null + +# ── Test 5: Back-and-forth using previous_response_id ───────────────── + +RESP5=$(post_json "First turn for previous_response_id chain" "$(cat < /dev/null +else + echo " ⚠ Could not extract response ID, skipping follow-up test" + FAIL=$((FAIL + 1)) +fi + +# ── Summary ─────────────────────────────────────────────────────────── + +echo "══════════════════════════════════════════════" +echo "RESULTS: ${PASS} passed, ${FAIL} failed" +echo "══════════════════════════════════════════════" + +if [ "$FAIL" -gt 0 ]; then + exit 1 +fi diff --git a/packages/api/src/agents/responses/__tests__/service.test.ts b/packages/api/src/agents/responses/__tests__/service.test.ts new file mode 100644 index 0000000000..b9b64d21ee --- /dev/null +++ b/packages/api/src/agents/responses/__tests__/service.test.ts @@ -0,0 +1,333 @@ +import { convertInputToMessages } from '../service'; +import type { InputItem } from '../types'; + +describe('convertInputToMessages', () => { + // ── String input shorthand ───────────────────────────────────────── + it('converts a string input to a single user message', () => { + const result = convertInputToMessages('Hello'); + expect(result).toEqual([{ role: 'user', content: 'Hello' }]); + }); + + // ── Empty input array ────────────────────────────────────────────── + it('returns an empty array for empty input', () => { + const result = convertInputToMessages([]); + expect(result).toEqual([]); + }); + + // ── Role mapping ─────────────────────────────────────────────────── + it('maps developer role to system', () => { + const input: InputItem[] = [ + { type: 'message', role: 'developer', content: 'You are helpful.' }, + ]; + expect(convertInputToMessages(input)).toEqual([ + { role: 'system', content: 'You are helpful.' }, + ]); + }); + + it('maps system role to system', () => { + const input: InputItem[] = [{ type: 'message', role: 'system', content: 'System prompt.' }]; + expect(convertInputToMessages(input)).toEqual([{ role: 'system', content: 'System prompt.' }]); + }); + + it('maps user role to user', () => { + const input: InputItem[] = [{ type: 'message', role: 'user', content: 'Hi' }]; + expect(convertInputToMessages(input)).toEqual([{ role: 'user', content: 'Hi' }]); + }); + + it('maps assistant role to assistant', () => { + const input: InputItem[] = [{ type: 'message', role: 'assistant', content: 'Hello!' }]; + expect(convertInputToMessages(input)).toEqual([{ role: 'assistant', content: 'Hello!' }]); + }); + + it('defaults unknown roles to user', () => { + const input = [ + { type: 'message', role: 'unknown_role', content: 'test' }, + ] as unknown as InputItem[]; + expect(convertInputToMessages(input)[0].role).toBe('user'); + }); + + // ── input_text content blocks ────────────────────────────────────── + it('converts input_text blocks to text blocks', () => { + const input: InputItem[] = [ + { + type: 'message', + role: 'user', + content: [{ type: 'input_text', text: 'Hello world' }], + }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([{ role: 'user', content: [{ type: 'text', text: 'Hello world' }] }]); + }); + + // ── output_text content blocks (the original bug) ────────────────── + it('converts output_text blocks to text blocks', () => { + const input: InputItem[] = [ + { + type: 'message', + role: 'assistant', + content: [{ type: 'output_text', text: 'I can help!', annotations: [], logprobs: [] }], + }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([ + { role: 'assistant', content: [{ type: 'text', text: 'I can help!' }] }, + ]); + }); + + // ── refusal content blocks ───────────────────────────────────────── + it('converts refusal blocks to text blocks', () => { + const input: InputItem[] = [ + { + type: 'message', + role: 'assistant', + content: [{ type: 'refusal', refusal: 'I cannot do that.' }], + }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([ + { role: 'assistant', content: [{ type: 'text', text: 'I cannot do that.' }] }, + ]); + }); + + // ── input_image content blocks ───────────────────────────────────── + it('converts input_image blocks to image_url blocks', () => { + const input: InputItem[] = [ + { + type: 'message', + role: 'user', + content: [ + { type: 'input_image', image_url: 'https://example.com/img.png', detail: 'high' }, + ], + }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([ + { + role: 'user', + content: [ + { + type: 'image_url', + image_url: { url: 'https://example.com/img.png', detail: 'high' }, + }, + ], + }, + ]); + }); + + // ── input_file content blocks ────────────────────────────────────── + it('converts input_file blocks to text placeholders', () => { + const input: InputItem[] = [ + { + type: 'message', + role: 'user', + content: [{ type: 'input_file', filename: 'report.pdf', file_id: 'f_123' }], + }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([ + { role: 'user', content: [{ type: 'text', text: '[File: report.pdf]' }] }, + ]); + }); + + it('uses "unknown" for input_file without filename', () => { + const input: InputItem[] = [ + { + type: 'message', + role: 'user', + content: [{ type: 'input_file', file_id: 'f_123' }], + }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([ + { role: 'user', content: [{ type: 'text', text: '[File: unknown]' }] }, + ]); + }); + + // ── Null / undefined filtering ───────────────────────────────────── + it('filters out null elements in content arrays', () => { + const input = [ + { + type: 'message', + role: 'user', + content: [null, { type: 'input_text', text: 'valid' }, undefined], + }, + ] as unknown as InputItem[]; + const result = convertInputToMessages(input); + expect(result).toEqual([{ role: 'user', content: [{ type: 'text', text: 'valid' }] }]); + }); + + // ── Missing text field defaults to empty string ──────────────────── + it('defaults to empty string when text field is missing on input_text', () => { + const input = [ + { + type: 'message', + role: 'user', + content: [{ type: 'input_text' }], + }, + ] as unknown as InputItem[]; + const result = convertInputToMessages(input); + expect(result).toEqual([{ role: 'user', content: [{ type: 'text', text: '' }] }]); + }); + + it('defaults to empty string when text field is missing on output_text', () => { + const input = [ + { + type: 'message', + role: 'assistant', + content: [{ type: 'output_text' }], + }, + ] as unknown as InputItem[]; + const result = convertInputToMessages(input); + expect(result).toEqual([{ role: 'assistant', content: [{ type: 'text', text: '' }] }]); + }); + + it('defaults to empty string when refusal field is missing on refusal block', () => { + const input = [ + { + type: 'message', + role: 'assistant', + content: [{ type: 'refusal' }], + }, + ] as unknown as InputItem[]; + const result = convertInputToMessages(input); + expect(result).toEqual([{ role: 'assistant', content: [{ type: 'text', text: '' }] }]); + }); + + // ── Unknown block types are filtered out ─────────────────────────── + it('filters out unknown content block types', () => { + const input = [ + { + type: 'message', + role: 'user', + content: [ + { type: 'input_text', text: 'keep me' }, + { type: 'some_future_type', data: 'ignore' }, + ], + }, + ] as unknown as InputItem[]; + const result = convertInputToMessages(input); + expect(result).toEqual([{ role: 'user', content: [{ type: 'text', text: 'keep me' }] }]); + }); + + // ── Mixed valid/invalid content in same array ────────────────────── + it('handles mixed valid and invalid content blocks', () => { + const input = [ + { + type: 'message', + role: 'assistant', + content: [ + { type: 'output_text', text: 'Hello', annotations: [], logprobs: [] }, + null, + { type: 'unknown_type' }, + { type: 'refusal', refusal: 'No can do' }, + ], + }, + ] as unknown as InputItem[]; + const result = convertInputToMessages(input); + expect(result).toEqual([ + { + role: 'assistant', + content: [ + { type: 'text', text: 'Hello' }, + { type: 'text', text: 'No can do' }, + ], + }, + ]); + }); + + // ── Non-array, non-string content defaults to empty string ───────── + it('defaults to empty string for non-array non-string content', () => { + const input = [{ type: 'message', role: 'user', content: 42 }] as unknown as InputItem[]; + const result = convertInputToMessages(input); + expect(result).toEqual([{ role: 'user', content: '' }]); + }); + + // ── Function call items ──────────────────────────────────────────── + it('converts function_call items to assistant messages with tool_calls', () => { + const input: InputItem[] = [ + { + type: 'function_call', + id: 'fc_1', + call_id: 'call_abc', + name: 'get_weather', + arguments: '{"city":"NYC"}', + }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([ + { + role: 'assistant', + content: '', + tool_calls: [ + { + id: 'call_abc', + type: 'function', + function: { name: 'get_weather', arguments: '{"city":"NYC"}' }, + }, + ], + }, + ]); + }); + + // ── Function call output items ───────────────────────────────────── + it('converts function_call_output items to tool messages', () => { + const input: InputItem[] = [ + { + type: 'function_call_output', + call_id: 'call_abc', + output: '{"temp":72}', + }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([ + { + role: 'tool', + content: '{"temp":72}', + tool_call_id: 'call_abc', + }, + ]); + }); + + // ── Item references are skipped ──────────────────────────────────── + it('skips item_reference items', () => { + const input: InputItem[] = [ + { type: 'item_reference', id: 'ref_123' }, + { type: 'message', role: 'user', content: 'Hello' }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([{ role: 'user', content: 'Hello' }]); + }); + + // ── Multi-turn conversation (the real-world scenario) ────────────── + it('handles a full multi-turn conversation with output_text blocks', () => { + const input: InputItem[] = [ + { + type: 'message', + role: 'developer', + content: [{ type: 'input_text', text: 'You are a helpful assistant.' }], + }, + { + type: 'message', + role: 'user', + content: [{ type: 'input_text', text: 'What is 2+2?' }], + }, + { + type: 'message', + role: 'assistant', + content: [{ type: 'output_text', text: '2+2 is 4.', annotations: [], logprobs: [] }], + }, + { + type: 'message', + role: 'user', + content: [{ type: 'input_text', text: 'And 3+3?' }], + }, + ]; + const result = convertInputToMessages(input); + expect(result).toEqual([ + { role: 'system', content: [{ type: 'text', text: 'You are a helpful assistant.' }] }, + { role: 'user', content: [{ type: 'text', text: 'What is 2+2?' }] }, + { role: 'assistant', content: [{ type: 'text', text: '2+2 is 4.' }] }, + { role: 'user', content: [{ type: 'text', text: 'And 3+3?' }] }, + ]); + }); +}); diff --git a/packages/api/src/agents/responses/service.ts b/packages/api/src/agents/responses/service.ts index 842db86679..2e49b1b979 100644 --- a/packages/api/src/agents/responses/service.ts +++ b/packages/api/src/agents/responses/service.ts @@ -6,11 +6,12 @@ */ import type { Response as ServerResponse } from 'express'; import type { - ResponseRequest, RequestValidationResult, - InputItem, - InputContent, + ResponseRequest, ResponseContext, + InputContent, + ModelContent, + InputItem, Response, } from './types'; import { @@ -134,7 +135,7 @@ export function convertInputToMessages(input: string | InputItem[]): InternalMes const messageItem = item as { type: 'message'; role: string; - content: string | InputContent[]; + content: string | (InputContent | ModelContent)[]; }; let content: InternalMessage['content']; @@ -142,21 +143,31 @@ export function convertInputToMessages(input: string | InputItem[]): InternalMes if (typeof messageItem.content === 'string') { content = messageItem.content; } else if (Array.isArray(messageItem.content)) { - content = messageItem.content.map((part) => { - if (part.type === 'input_text') { - return { type: 'text', text: part.text }; - } - if (part.type === 'input_image') { - return { - type: 'image_url', - image_url: { - url: (part as { image_url?: string }).image_url, - detail: (part as { detail?: string }).detail, - }, - }; - } - return { type: part.type }; - }); + content = messageItem.content + .filter((part): part is InputContent | ModelContent => part != null) + .map((part) => { + if (part.type === 'input_text' || part.type === 'output_text') { + return { type: 'text', text: (part as { text?: string }).text ?? '' }; + } + if (part.type === 'refusal') { + return { type: 'text', text: (part as { refusal?: string }).refusal ?? '' }; + } + if (part.type === 'input_image') { + return { + type: 'image_url', + image_url: { + url: (part as { image_url?: string }).image_url, + detail: (part as { detail?: string }).detail, + }, + }; + } + if (part.type === 'input_file') { + const filePart = part as { filename?: string }; + return { type: 'text', text: `[File: ${filePart.filename ?? 'unknown'}]` }; + } + return null; + }) + .filter((part): part is NonNullable => part != null); } else { content = ''; } diff --git a/packages/api/src/app/config.test.ts b/packages/api/src/app/config.test.ts index f85bb8a62c..3e2ee6d143 100644 --- a/packages/api/src/app/config.test.ts +++ b/packages/api/src/app/config.test.ts @@ -1,7 +1,7 @@ -import { getTransactionsConfig, getBalanceConfig } from './config'; +import { getTransactionsConfig, getBalanceConfig, getCustomEndpointConfig } from './config'; import { logger } from '@librechat/data-schemas'; -import { FileSources } from 'librechat-data-provider'; -import type { TCustomConfig } from 'librechat-data-provider'; +import { FileSources, EModelEndpoint } from 'librechat-data-provider'; +import type { TCustomConfig, TEndpoint } from 'librechat-data-provider'; import type { AppConfig } from '@librechat/data-schemas'; // Helper function to create a minimal AppConfig for testing @@ -282,3 +282,75 @@ describe('getBalanceConfig', () => { }); }); }); + +describe('getCustomEndpointConfig', () => { + describe('when appConfig is not provided', () => { + it('should throw an error', () => { + expect(() => getCustomEndpointConfig({ endpoint: 'test' })).toThrow( + 'Config not found for the test custom endpoint.', + ); + }); + }); + + describe('when appConfig is provided', () => { + it('should return undefined when no custom endpoints are configured', () => { + const appConfig = createTestAppConfig(); + const result = getCustomEndpointConfig({ endpoint: 'test', appConfig }); + expect(result).toBeUndefined(); + }); + + it('should return the matching endpoint config when found', () => { + const appConfig = createTestAppConfig({ + endpoints: { + [EModelEndpoint.custom]: [ + { + name: 'TestEndpoint', + apiKey: 'test-key', + } as TEndpoint, + ], + }, + }); + + const result = getCustomEndpointConfig({ endpoint: 'TestEndpoint', appConfig }); + expect(result).toEqual({ + name: 'TestEndpoint', + apiKey: 'test-key', + }); + }); + + it('should handle case-insensitive matching for Ollama endpoint', () => { + const appConfig = createTestAppConfig({ + endpoints: { + [EModelEndpoint.custom]: [ + { + name: 'Ollama', + apiKey: 'ollama-key', + } as TEndpoint, + ], + }, + }); + + const result = getCustomEndpointConfig({ endpoint: 'Ollama', appConfig }); + expect(result).toEqual({ + name: 'Ollama', + apiKey: 'ollama-key', + }); + }); + + it('should handle mixed case endpoint names', () => { + const appConfig = createTestAppConfig({ + endpoints: { + [EModelEndpoint.custom]: [ + { + name: 'CustomAI', + apiKey: 'custom-key', + } as TEndpoint, + ], + }, + }); + + const result = getCustomEndpointConfig({ endpoint: 'customai', appConfig }); + expect(result).toBeUndefined(); + }); + }); +}); diff --git a/packages/api/src/app/config.ts b/packages/api/src/app/config.ts index 38144dee2b..0a2fb3e6f9 100644 --- a/packages/api/src/app/config.ts +++ b/packages/api/src/app/config.ts @@ -64,7 +64,7 @@ export const getCustomEndpointConfig = ({ const customEndpoints = appConfig.endpoints?.[EModelEndpoint.custom] ?? []; return customEndpoints.find( - (endpointConfig) => normalizeEndpointName(endpointConfig.name) === endpoint, + (endpointConfig) => normalizeEndpointName(endpointConfig.name) === normalizeEndpointName(endpoint), ); }; diff --git a/packages/api/src/auth/agent.spec.ts b/packages/api/src/auth/agent.spec.ts new file mode 100644 index 0000000000..9ab2a9aaf9 --- /dev/null +++ b/packages/api/src/auth/agent.spec.ts @@ -0,0 +1,113 @@ +jest.mock('node:dns', () => { + const actual = jest.requireActual('node:dns'); + return { + ...actual, + lookup: jest.fn(), + }; +}); + +import dns from 'node:dns'; +import { createSSRFSafeAgents, createSSRFSafeUndiciConnect } from './agent'; + +type LookupCallback = (err: NodeJS.ErrnoException | null, address: string, family: number) => void; + +const mockedDnsLookup = dns.lookup as jest.MockedFunction; + +function mockDnsResult(address: string, family: number): void { + mockedDnsLookup.mockImplementation((( + _hostname: string, + _options: unknown, + callback: LookupCallback, + ) => { + callback(null, address, family); + }) as never); +} + +function mockDnsError(err: NodeJS.ErrnoException): void { + mockedDnsLookup.mockImplementation((( + _hostname: string, + _options: unknown, + callback: LookupCallback, + ) => { + callback(err, '', 0); + }) as never); +} + +describe('createSSRFSafeAgents', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should return httpAgent and httpsAgent', () => { + const agents = createSSRFSafeAgents(); + expect(agents.httpAgent).toBeDefined(); + expect(agents.httpsAgent).toBeDefined(); + }); + + it('should patch httpAgent createConnection to inject SSRF lookup', () => { + const agents = createSSRFSafeAgents(); + const internal = agents.httpAgent as unknown as { + createConnection: (opts: Record) => unknown; + }; + expect(internal.createConnection).toBeInstanceOf(Function); + }); +}); + +describe('createSSRFSafeUndiciConnect', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should return an object with a lookup function', () => { + const connect = createSSRFSafeUndiciConnect(); + expect(connect).toHaveProperty('lookup'); + expect(connect.lookup).toBeInstanceOf(Function); + }); + + it('lookup should block private IPs', async () => { + mockDnsResult('10.0.0.1', 4); + const connect = createSSRFSafeUndiciConnect(); + + const result = await new Promise<{ err: NodeJS.ErrnoException | null }>((resolve) => { + connect.lookup('evil.example.com', {}, (err) => { + resolve({ err }); + }); + }); + + expect(result.err).toBeTruthy(); + expect(result.err!.code).toBe('ESSRF'); + }); + + it('lookup should allow public IPs', async () => { + mockDnsResult('93.184.216.34', 4); + const connect = createSSRFSafeUndiciConnect(); + + const result = await new Promise<{ err: NodeJS.ErrnoException | null; address: string }>( + (resolve) => { + connect.lookup('example.com', {}, (err, address) => { + resolve({ err, address: address as string }); + }); + }, + ); + + expect(result.err).toBeNull(); + expect(result.address).toBe('93.184.216.34'); + }); + + it('lookup should forward DNS errors', async () => { + const dnsError = Object.assign(new Error('ENOTFOUND'), { + code: 'ENOTFOUND', + }) as NodeJS.ErrnoException; + mockDnsError(dnsError); + const connect = createSSRFSafeUndiciConnect(); + + const result = await new Promise<{ err: NodeJS.ErrnoException | null }>((resolve) => { + connect.lookup('nonexistent.example.com', {}, (err) => { + resolve({ err }); + }); + }); + + expect(result.err).toBeTruthy(); + expect(result.err!.code).toBe('ENOTFOUND'); + }); +}); diff --git a/packages/api/src/auth/agent.ts b/packages/api/src/auth/agent.ts new file mode 100644 index 0000000000..2442aa20fa --- /dev/null +++ b/packages/api/src/auth/agent.ts @@ -0,0 +1,61 @@ +import dns from 'node:dns'; +import http from 'node:http'; +import https from 'node:https'; +import type { LookupFunction } from 'node:net'; +import { isPrivateIP } from './domain'; + +/** DNS lookup wrapper that blocks resolution to private/reserved IP addresses */ +const ssrfSafeLookup: LookupFunction = (hostname, options, callback) => { + dns.lookup(hostname, options, (err, address, family) => { + if (err) { + callback(err, '', 0); + return; + } + if (typeof address === 'string' && isPrivateIP(address)) { + const ssrfError = Object.assign( + new Error(`SSRF protection: ${hostname} resolved to blocked address ${address}`), + { code: 'ESSRF' }, + ) as NodeJS.ErrnoException; + callback(ssrfError, address, family as number); + return; + } + callback(null, address as string, family as number); + }); +}; + +/** Internal agent shape exposing createConnection (exists at runtime but not in TS types) */ +type AgentInternal = { + createConnection: (options: Record, oncreate?: unknown) => unknown; +}; + +/** Patches an agent instance to inject SSRF-safe DNS lookup at connect time */ +function withSSRFProtection(agent: T): T { + const internal = agent as unknown as AgentInternal; + const origCreate = internal.createConnection.bind(agent); + internal.createConnection = (options: Record, oncreate?: unknown) => { + options.lookup = ssrfSafeLookup; + return origCreate(options, oncreate); + }; + return agent; +} + +/** + * Creates HTTP and HTTPS agents that block TCP connections to private/reserved IP addresses. + * Provides TOCTOU-safe SSRF protection by validating the resolved IP at connect time, + * preventing DNS rebinding attacks where a hostname resolves to a public IP during + * pre-validation but to a private IP when the actual connection is made. + */ +export function createSSRFSafeAgents(): { httpAgent: http.Agent; httpsAgent: https.Agent } { + return { + httpAgent: withSSRFProtection(new http.Agent()), + httpsAgent: withSSRFProtection(new https.Agent()), + }; +} + +/** + * Returns undici-compatible `connect` options with SSRF-safe DNS lookup. + * Pass the result as the `connect` property when constructing an undici `Agent`. + */ +export function createSSRFSafeUndiciConnect(): { lookup: LookupFunction } { + return { lookup: ssrfSafeLookup }; +} diff --git a/packages/api/src/auth/domain.spec.ts b/packages/api/src/auth/domain.spec.ts index a2b4c42cd7..5f6187c9b4 100644 --- a/packages/api/src/auth/domain.spec.ts +++ b/packages/api/src/auth/domain.spec.ts @@ -1,12 +1,21 @@ /* eslint-disable @typescript-eslint/ban-ts-comment */ +jest.mock('node:dns/promises', () => ({ + lookup: jest.fn(), +})); + +import { lookup } from 'node:dns/promises'; import { extractMCPServerDomain, isActionDomainAllowed, isEmailDomainAllowed, isMCPDomainAllowed, + isPrivateIP, isSSRFTarget, + resolveHostnameSSRF, } from './domain'; +const mockedLookup = lookup as jest.MockedFunction; + describe('isEmailDomainAllowed', () => { afterEach(() => { jest.clearAllMocks(); @@ -192,7 +201,154 @@ describe('isSSRFTarget', () => { }); }); +describe('isPrivateIP', () => { + describe('IPv4 private ranges', () => { + it('should detect loopback addresses', () => { + expect(isPrivateIP('127.0.0.1')).toBe(true); + expect(isPrivateIP('127.255.255.255')).toBe(true); + }); + + it('should detect 10.x.x.x private range', () => { + expect(isPrivateIP('10.0.0.1')).toBe(true); + expect(isPrivateIP('10.255.255.255')).toBe(true); + }); + + it('should detect 172.16-31.x.x private range', () => { + expect(isPrivateIP('172.16.0.1')).toBe(true); + expect(isPrivateIP('172.31.255.255')).toBe(true); + expect(isPrivateIP('172.15.0.1')).toBe(false); + expect(isPrivateIP('172.32.0.1')).toBe(false); + }); + + it('should detect 192.168.x.x private range', () => { + expect(isPrivateIP('192.168.0.1')).toBe(true); + expect(isPrivateIP('192.168.255.255')).toBe(true); + }); + + it('should detect 169.254.x.x link-local range', () => { + expect(isPrivateIP('169.254.169.254')).toBe(true); + expect(isPrivateIP('169.254.0.1')).toBe(true); + }); + + it('should detect 0.0.0.0', () => { + expect(isPrivateIP('0.0.0.0')).toBe(true); + }); + + it('should allow public IPs', () => { + expect(isPrivateIP('8.8.8.8')).toBe(false); + expect(isPrivateIP('1.1.1.1')).toBe(false); + expect(isPrivateIP('93.184.216.34')).toBe(false); + }); + }); + + describe('IPv6 private ranges', () => { + it('should detect loopback', () => { + expect(isPrivateIP('::1')).toBe(true); + expect(isPrivateIP('::')).toBe(true); + expect(isPrivateIP('[::1]')).toBe(true); + }); + + it('should detect unique local (fc/fd) and link-local (fe80)', () => { + expect(isPrivateIP('fc00::1')).toBe(true); + expect(isPrivateIP('fd00::1')).toBe(true); + expect(isPrivateIP('fe80::1')).toBe(true); + }); + }); + + describe('IPv4-mapped IPv6 addresses', () => { + it('should detect private IPs in IPv4-mapped IPv6 form', () => { + expect(isPrivateIP('::ffff:169.254.169.254')).toBe(true); + expect(isPrivateIP('::ffff:127.0.0.1')).toBe(true); + expect(isPrivateIP('::ffff:10.0.0.1')).toBe(true); + expect(isPrivateIP('::ffff:192.168.1.1')).toBe(true); + }); + + it('should allow public IPs in IPv4-mapped IPv6 form', () => { + expect(isPrivateIP('::ffff:8.8.8.8')).toBe(false); + expect(isPrivateIP('::ffff:93.184.216.34')).toBe(false); + }); + }); +}); + +describe('resolveHostnameSSRF', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should detect domains that resolve to private IPs (nip.io bypass)', async () => { + mockedLookup.mockResolvedValueOnce([{ address: '169.254.169.254', family: 4 }] as never); + expect(await resolveHostnameSSRF('169.254.169.254.nip.io')).toBe(true); + }); + + it('should detect domains that resolve to loopback', async () => { + mockedLookup.mockResolvedValueOnce([{ address: '127.0.0.1', family: 4 }] as never); + expect(await resolveHostnameSSRF('loopback.example.com')).toBe(true); + }); + + it('should detect when any resolved address is private', async () => { + mockedLookup.mockResolvedValueOnce([ + { address: '93.184.216.34', family: 4 }, + { address: '10.0.0.1', family: 4 }, + ] as never); + expect(await resolveHostnameSSRF('dual.example.com')).toBe(true); + }); + + it('should allow domains that resolve to public IPs', async () => { + mockedLookup.mockResolvedValueOnce([{ address: '93.184.216.34', family: 4 }] as never); + expect(await resolveHostnameSSRF('example.com')).toBe(false); + }); + + it('should skip literal IPv4 addresses (handled by isSSRFTarget)', async () => { + expect(await resolveHostnameSSRF('169.254.169.254')).toBe(false); + expect(mockedLookup).not.toHaveBeenCalled(); + }); + + it('should skip literal IPv6 addresses', async () => { + expect(await resolveHostnameSSRF('::1')).toBe(false); + expect(mockedLookup).not.toHaveBeenCalled(); + }); + + it('should fail open on DNS resolution failure', async () => { + mockedLookup.mockRejectedValueOnce(new Error('ENOTFOUND')); + expect(await resolveHostnameSSRF('nonexistent.example.com')).toBe(false); + }); +}); + +describe('isActionDomainAllowed - DNS resolution SSRF protection', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should block domains resolving to cloud metadata IP (169.254.169.254)', async () => { + mockedLookup.mockResolvedValueOnce([{ address: '169.254.169.254', family: 4 }] as never); + expect(await isActionDomainAllowed('169.254.169.254.nip.io', null)).toBe(false); + }); + + it('should block domains resolving to private 10.x range', async () => { + mockedLookup.mockResolvedValueOnce([{ address: '10.0.0.5', family: 4 }] as never); + expect(await isActionDomainAllowed('internal.attacker.com', null)).toBe(false); + }); + + it('should block domains resolving to 172.16.x range', async () => { + mockedLookup.mockResolvedValueOnce([{ address: '172.16.0.1', family: 4 }] as never); + expect(await isActionDomainAllowed('docker.attacker.com', null)).toBe(false); + }); + + it('should allow domains resolving to public IPs when no allowlist', async () => { + mockedLookup.mockResolvedValueOnce([{ address: '93.184.216.34', family: 4 }] as never); + expect(await isActionDomainAllowed('example.com', null)).toBe(true); + }); + + it('should not perform DNS check when allowedDomains is configured', async () => { + expect(await isActionDomainAllowed('example.com', ['example.com'])).toBe(true); + expect(mockedLookup).not.toHaveBeenCalled(); + }); +}); + describe('isActionDomainAllowed', () => { + beforeEach(() => { + mockedLookup.mockResolvedValue([{ address: '93.184.216.34', family: 4 }] as never); + }); afterEach(() => { jest.clearAllMocks(); }); @@ -541,6 +697,9 @@ describe('extractMCPServerDomain', () => { }); describe('isMCPDomainAllowed', () => { + beforeEach(() => { + mockedLookup.mockResolvedValue([{ address: '93.184.216.34', family: 4 }] as never); + }); afterEach(() => { jest.clearAllMocks(); }); diff --git a/packages/api/src/auth/domain.ts b/packages/api/src/auth/domain.ts index 5d9fc51d02..f2e86875d4 100644 --- a/packages/api/src/auth/domain.ts +++ b/packages/api/src/auth/domain.ts @@ -1,3 +1,5 @@ +import { lookup } from 'node:dns/promises'; + /** * @param email * @param allowedDomains @@ -22,6 +24,88 @@ export function isEmailDomainAllowed(email: string, allowedDomains?: string[] | return allowedDomains.some((allowedDomain) => allowedDomain?.toLowerCase() === domain); } +/** Checks if IPv4 octets fall within private, reserved, or link-local ranges */ +function isPrivateIPv4(a: number, b: number, c: number): boolean { + if (a === 127) { + return true; + } + if (a === 10) { + return true; + } + if (a === 172 && b >= 16 && b <= 31) { + return true; + } + if (a === 192 && b === 168) { + return true; + } + if (a === 169 && b === 254) { + return true; + } + if (a === 0 && b === 0 && c === 0) { + return true; + } + return false; +} + +/** + * Checks if an IP address belongs to a private, reserved, or link-local range. + * Handles IPv4, IPv6, and IPv4-mapped IPv6 addresses (::ffff:A.B.C.D). + */ +export function isPrivateIP(ip: string): boolean { + const normalized = ip.toLowerCase().trim(); + + const mappedMatch = normalized.match(/^::ffff:(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/); + if (mappedMatch) { + const [, a, b, c] = mappedMatch.map(Number); + return isPrivateIPv4(a, b, c); + } + + const ipv4Match = normalized.match(/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/); + if (ipv4Match) { + const [, a, b, c] = ipv4Match.map(Number); + return isPrivateIPv4(a, b, c); + } + + const ipv6 = normalized.replace(/^\[|\]$/g, ''); + if ( + ipv6 === '::1' || + ipv6 === '::' || + ipv6.startsWith('fc') || + ipv6.startsWith('fd') || + ipv6.startsWith('fe80') + ) { + return true; + } + + return false; +} + +/** + * Resolves a hostname via DNS and checks if any resolved address is a private/reserved IP. + * Detects DNS-based SSRF bypasses (e.g., nip.io wildcard DNS, attacker-controlled nameservers). + * Fails open: returns false if DNS resolution fails, since hostname-only checks still apply + * and the actual HTTP request would also fail. + */ +export async function resolveHostnameSSRF(hostname: string): Promise { + const normalizedHost = hostname.toLowerCase().trim(); + + if (/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/.test(normalizedHost)) { + return false; + } + + const ipv6Check = normalizedHost.replace(/^\[|\]$/g, ''); + if (ipv6Check.includes(':')) { + return false; + } + + try { + const addresses = await lookup(hostname, { all: true }); + return addresses.some((entry) => isPrivateIP(entry.address)); + } catch { + return false; + } +} + /** * SSRF Protection: Checks if a hostname/IP is a potentially dangerous internal target. * Blocks private IPs, localhost, cloud metadata IPs, and common internal hostnames. @@ -31,7 +115,6 @@ export function isEmailDomainAllowed(email: string, allowedDomains?: string[] | export function isSSRFTarget(hostname: string): boolean { const normalizedHost = hostname.toLowerCase().trim(); - // Block localhost variations if ( normalizedHost === 'localhost' || normalizedHost === 'localhost.localdomain' || @@ -40,51 +123,7 @@ export function isSSRFTarget(hostname: string): boolean { return true; } - // Check if it's an IP address and block private/internal ranges - const ipv4Match = normalizedHost.match(/^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/); - if (ipv4Match) { - const [, a, b, c] = ipv4Match.map(Number); - - // 127.0.0.0/8 - Loopback - if (a === 127) { - return true; - } - - // 10.0.0.0/8 - Private - if (a === 10) { - return true; - } - - // 172.16.0.0/12 - Private (172.16.x.x - 172.31.x.x) - if (a === 172 && b >= 16 && b <= 31) { - return true; - } - - // 192.168.0.0/16 - Private - if (a === 192 && b === 168) { - return true; - } - - // 169.254.0.0/16 - Link-local (includes cloud metadata 169.254.169.254) - if (a === 169 && b === 254) { - return true; - } - - // 0.0.0.0 - Special - if (a === 0 && b === 0 && c === 0) { - return true; - } - } - - // IPv6 loopback and private ranges - const ipv6Normalized = normalizedHost.replace(/^\[|\]$/g, ''); // Remove brackets if present - if ( - ipv6Normalized === '::1' || - ipv6Normalized === '::' || - ipv6Normalized.startsWith('fc') || // fc00::/7 - Unique local - ipv6Normalized.startsWith('fd') || // fd00::/8 - Unique local - ipv6Normalized.startsWith('fe80') // fe80::/10 - Link-local - ) { + if (isPrivateIP(normalizedHost)) { return true; } @@ -257,6 +296,10 @@ async function isDomainAllowedCore( if (isSSRFTarget(inputSpec.hostname)) { return false; } + /** SECURITY: Resolve hostname and block if it points to a private/reserved IP */ + if (await resolveHostnameSSRF(inputSpec.hostname)) { + return false; + } return true; } diff --git a/packages/api/src/auth/index.ts b/packages/api/src/auth/index.ts index d15d94aad2..392605ef50 100644 --- a/packages/api/src/auth/index.ts +++ b/packages/api/src/auth/index.ts @@ -1,3 +1,4 @@ export * from './domain'; export * from './openid'; export * from './exchange'; +export * from './agent'; diff --git a/packages/api/src/cache/__tests__/cacheConfig.spec.ts b/packages/api/src/cache/__tests__/cacheConfig.spec.ts index e24f52fee0..0488cfecfc 100644 --- a/packages/api/src/cache/__tests__/cacheConfig.spec.ts +++ b/packages/api/src/cache/__tests__/cacheConfig.spec.ts @@ -215,16 +215,30 @@ describe('cacheConfig', () => { }).rejects.toThrow('Invalid cache keys in FORCED_IN_MEMORY_CACHE_NAMESPACES: INVALID_KEY'); }); - test('should handle empty string gracefully', async () => { + test('should produce empty array when set to empty string (opt out of defaults)', async () => { process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = ''; const { cacheConfig } = await import('../cacheConfig'); expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]); }); - test('should handle undefined env var gracefully', async () => { + test('should default to CONFIG_STORE and APP_CONFIG when env var is not set', async () => { const { cacheConfig } = await import('../cacheConfig'); - expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual([]); + expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual(['CONFIG_STORE', 'APP_CONFIG']); + }); + + test('should accept TOOL_CACHE as a valid namespace', async () => { + process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = 'TOOL_CACHE'; + + const { cacheConfig } = await import('../cacheConfig'); + expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual(['TOOL_CACHE']); + }); + + test('should accept CONFIG_STORE and APP_CONFIG together for blue/green deployments', async () => { + process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES = 'CONFIG_STORE,APP_CONFIG'; + + const { cacheConfig } = await import('../cacheConfig'); + expect(cacheConfig.FORCED_IN_MEMORY_CACHE_NAMESPACES).toEqual(['CONFIG_STORE', 'APP_CONFIG']); }); }); }); diff --git a/packages/api/src/cache/__tests__/cacheFactory/standardCache.namespace_isolation.spec.ts b/packages/api/src/cache/__tests__/cacheFactory/standardCache.namespace_isolation.spec.ts new file mode 100644 index 0000000000..9a8b4ff3bf --- /dev/null +++ b/packages/api/src/cache/__tests__/cacheFactory/standardCache.namespace_isolation.spec.ts @@ -0,0 +1,135 @@ +import { CacheKeys } from 'librechat-data-provider'; + +const mockKeyvRedisInstance = { + namespace: '', + keyPrefixSeparator: '', + on: jest.fn(), +}; + +const MockKeyvRedis = jest.fn().mockReturnValue(mockKeyvRedisInstance); + +jest.mock('@keyv/redis', () => ({ + default: MockKeyvRedis, +})); + +const mockKeyvRedisClient = { scanIterator: jest.fn() }; + +jest.mock('../../redisClients', () => ({ + keyvRedisClient: mockKeyvRedisClient, + ioredisClient: null, +})); + +jest.mock('../../redisUtils', () => ({ + batchDeleteKeys: jest.fn(), + scanKeys: jest.fn(), +})); + +jest.mock('@librechat/data-schemas', () => ({ + logger: { + error: jest.fn(), + warn: jest.fn(), + debug: jest.fn(), + }, +})); + +describe('standardCache - CONFIG_STORE vs TOOL_CACHE namespace isolation', () => { + afterEach(() => { + jest.resetModules(); + MockKeyvRedis.mockClear(); + }); + + /** + * Core behavioral test for blue/green deployments: + * When CONFIG_STORE and APP_CONFIG are forced in-memory, + * TOOL_CACHE should still use Redis for cross-container sharing. + */ + it('should force CONFIG_STORE to in-memory while TOOL_CACHE uses Redis', async () => { + jest.doMock('../../cacheConfig', () => ({ + cacheConfig: { + FORCED_IN_MEMORY_CACHE_NAMESPACES: [CacheKeys.CONFIG_STORE, CacheKeys.APP_CONFIG], + REDIS_KEY_PREFIX: '', + GLOBAL_PREFIX_SEPARATOR: '>>', + }, + })); + + const { standardCache } = await import('../../cacheFactory'); + + MockKeyvRedis.mockClear(); + + const configCache = standardCache(CacheKeys.CONFIG_STORE); + expect(MockKeyvRedis).not.toHaveBeenCalled(); + expect(configCache).toBeDefined(); + + const appConfigCache = standardCache(CacheKeys.APP_CONFIG); + expect(MockKeyvRedis).not.toHaveBeenCalled(); + expect(appConfigCache).toBeDefined(); + + const toolCache = standardCache(CacheKeys.TOOL_CACHE); + expect(MockKeyvRedis).toHaveBeenCalledTimes(1); + expect(MockKeyvRedis).toHaveBeenCalledWith(mockKeyvRedisClient); + expect(toolCache).toBeDefined(); + }); + + it('CONFIG_STORE and TOOL_CACHE should be independent stores', async () => { + jest.doMock('../../cacheConfig', () => ({ + cacheConfig: { + FORCED_IN_MEMORY_CACHE_NAMESPACES: [CacheKeys.CONFIG_STORE], + REDIS_KEY_PREFIX: '', + GLOBAL_PREFIX_SEPARATOR: '>>', + }, + })); + + const { standardCache } = await import('../../cacheFactory'); + + const configCache = standardCache(CacheKeys.CONFIG_STORE); + const toolCache = standardCache(CacheKeys.TOOL_CACHE); + + await configCache.set('STARTUP_CONFIG', { version: 'v2-green' }); + await toolCache.set('tools:global', { myTool: { type: 'function' } }); + + expect(await configCache.get('STARTUP_CONFIG')).toEqual({ version: 'v2-green' }); + expect(await configCache.get('tools:global')).toBeUndefined(); + + expect(await toolCache.get('STARTUP_CONFIG')).toBeUndefined(); + }); + + it('should use Redis for all namespaces when nothing is forced in-memory', async () => { + jest.doMock('../../cacheConfig', () => ({ + cacheConfig: { + FORCED_IN_MEMORY_CACHE_NAMESPACES: [], + REDIS_KEY_PREFIX: '', + GLOBAL_PREFIX_SEPARATOR: '>>', + }, + })); + + const { standardCache } = await import('../../cacheFactory'); + + MockKeyvRedis.mockClear(); + + standardCache(CacheKeys.CONFIG_STORE); + standardCache(CacheKeys.TOOL_CACHE); + standardCache(CacheKeys.APP_CONFIG); + + expect(MockKeyvRedis).toHaveBeenCalledTimes(3); + }); + + it('forcing TOOL_CACHE to in-memory should not affect CONFIG_STORE', async () => { + jest.doMock('../../cacheConfig', () => ({ + cacheConfig: { + FORCED_IN_MEMORY_CACHE_NAMESPACES: [CacheKeys.TOOL_CACHE], + REDIS_KEY_PREFIX: '', + GLOBAL_PREFIX_SEPARATOR: '>>', + }, + })); + + const { standardCache } = await import('../../cacheFactory'); + + MockKeyvRedis.mockClear(); + + standardCache(CacheKeys.TOOL_CACHE); + expect(MockKeyvRedis).not.toHaveBeenCalled(); + + standardCache(CacheKeys.CONFIG_STORE); + expect(MockKeyvRedis).toHaveBeenCalledTimes(1); + }); +}); diff --git a/packages/api/src/cache/__tests__/cacheFactory/violationCache.cache_integration.spec.ts b/packages/api/src/cache/__tests__/cacheFactory/violationCache.cache_integration.spec.ts index 989008e82e..1978620c24 100644 --- a/packages/api/src/cache/__tests__/cacheFactory/violationCache.cache_integration.spec.ts +++ b/packages/api/src/cache/__tests__/cacheFactory/violationCache.cache_integration.spec.ts @@ -20,6 +20,24 @@ interface ViolationData { }; } +/** Waits for both Redis clients (ioredis + keyv/node-redis) to be ready */ +async function waitForRedisClients() { + const redisClients = await import('../../redisClients'); + const { ioredisClient, keyvRedisClientReady } = redisClients; + + if (ioredisClient && ioredisClient.status !== 'ready') { + await new Promise((resolve) => { + ioredisClient.once('ready', resolve); + }); + } + + if (keyvRedisClientReady) { + await keyvRedisClientReady; + } + + return redisClients; +} + describe('violationCache', () => { let originalEnv: NodeJS.ProcessEnv; @@ -45,17 +63,9 @@ describe('violationCache', () => { test('should create violation cache with Redis when USE_REDIS is true', async () => { const cacheFactory = await import('../../cacheFactory'); - const redisClients = await import('../../redisClients'); - const { ioredisClient } = redisClients; + await waitForRedisClients(); const cache = cacheFactory.violationCache('test-violations', 60000); // 60 second TTL - // Wait for Redis connection to be ready - if (ioredisClient && ioredisClient.status !== 'ready') { - await new Promise((resolve) => { - ioredisClient.once('ready', resolve); - }); - } - // Verify it returns a Keyv instance expect(cache).toBeDefined(); expect(cache.constructor.name).toBe('Keyv'); @@ -112,18 +122,10 @@ describe('violationCache', () => { test('should respect namespace prefixing', async () => { const cacheFactory = await import('../../cacheFactory'); - const redisClients = await import('../../redisClients'); - const { ioredisClient } = redisClients; + await waitForRedisClients(); const cache1 = cacheFactory.violationCache('namespace1'); const cache2 = cacheFactory.violationCache('namespace2'); - // Wait for Redis connection to be ready - if (ioredisClient && ioredisClient.status !== 'ready') { - await new Promise((resolve) => { - ioredisClient.once('ready', resolve); - }); - } - const testKey = 'shared-key'; const value1: ViolationData = { namespace: 1 }; const value2: ViolationData = { namespace: 2 }; @@ -146,18 +148,10 @@ describe('violationCache', () => { test('should respect TTL settings', async () => { const cacheFactory = await import('../../cacheFactory'); - const redisClients = await import('../../redisClients'); - const { ioredisClient } = redisClients; + await waitForRedisClients(); const ttl = 1000; // 1 second TTL const cache = cacheFactory.violationCache('ttl-test', ttl); - // Wait for Redis connection to be ready - if (ioredisClient && ioredisClient.status !== 'ready') { - await new Promise((resolve) => { - ioredisClient.once('ready', resolve); - }); - } - const testKey = 'ttl-key'; const testValue: ViolationData = { data: 'expires soon' }; @@ -178,17 +172,9 @@ describe('violationCache', () => { test('should handle complex violation data structures', async () => { const cacheFactory = await import('../../cacheFactory'); - const redisClients = await import('../../redisClients'); - const { ioredisClient } = redisClients; + await waitForRedisClients(); const cache = cacheFactory.violationCache('complex-violations'); - // Wait for Redis connection to be ready - if (ioredisClient && ioredisClient.status !== 'ready') { - await new Promise((resolve) => { - ioredisClient.once('ready', resolve); - }); - } - const complexData: ViolationData = { userId: 'user123', violations: [ diff --git a/packages/api/src/cache/cacheConfig.ts b/packages/api/src/cache/cacheConfig.ts index 32ea2cddd1..0d4304f5c3 100644 --- a/packages/api/src/cache/cacheConfig.ts +++ b/packages/api/src/cache/cacheConfig.ts @@ -27,9 +27,14 @@ const USE_REDIS_STREAMS = // Comma-separated list of cache namespaces that should be forced to use in-memory storage // even when Redis is enabled. This allows selective performance optimization for specific caches. -const FORCED_IN_MEMORY_CACHE_NAMESPACES = process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES - ? process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES.split(',').map((key) => key.trim()) - : []; +// Defaults to CONFIG_STORE,APP_CONFIG so YAML-derived config stays per-container. +// Set to empty string to force all namespaces through Redis. +const FORCED_IN_MEMORY_CACHE_NAMESPACES = + process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES !== undefined + ? process.env.FORCED_IN_MEMORY_CACHE_NAMESPACES.split(',') + .map((key) => key.trim()) + .filter(Boolean) + : [CacheKeys.CONFIG_STORE, CacheKeys.APP_CONFIG]; // Validate against CacheKeys enum if (FORCED_IN_MEMORY_CACHE_NAMESPACES.length > 0) { diff --git a/packages/api/src/cache/cacheFactory.ts b/packages/api/src/cache/cacheFactory.ts index 9b59afe554..2d7817c2ad 100644 --- a/packages/api/src/cache/cacheFactory.ts +++ b/packages/api/src/cache/cacheFactory.ts @@ -120,7 +120,9 @@ export const limiterCache = (prefix: string): RedisStore | undefined => { if (!cacheConfig.USE_REDIS) { return undefined; } - // TODO: The prefix is not actually applied. Also needs to account for global prefix. + // Note: The `prefix` is applied by RedisStore internally to its key operations. + // The global REDIS_KEY_PREFIX is applied by ioredisClient's keyPrefix setting. + // Combined key format: `{REDIS_KEY_PREFIX}::{prefix}{identifier}` prefix = prefix.endsWith(':') ? prefix : `${prefix}:`; try { diff --git a/packages/api/src/cache/redisClients.ts b/packages/api/src/cache/redisClients.ts index 79489336c4..fca4365f7f 100644 --- a/packages/api/src/cache/redisClients.ts +++ b/packages/api/src/cache/redisClients.ts @@ -29,7 +29,9 @@ if (cacheConfig.USE_REDIS) { ); return null; } - const delay = Math.min(times * 50, cacheConfig.REDIS_RETRY_MAX_DELAY); + const base = Math.min(Math.pow(2, times) * 50, cacheConfig.REDIS_RETRY_MAX_DELAY); + const jitter = Math.floor(Math.random() * Math.min(base, 1000)); + const delay = Math.min(base + jitter, cacheConfig.REDIS_RETRY_MAX_DELAY); logger.info(`ioredis reconnecting... attempt ${times}, delay ${delay}ms`); return delay; }, @@ -71,7 +73,9 @@ if (cacheConfig.USE_REDIS) { ); return null; } - const delay = Math.min(times * 100, cacheConfig.REDIS_RETRY_MAX_DELAY); + const base = Math.min(Math.pow(2, times) * 100, cacheConfig.REDIS_RETRY_MAX_DELAY); + const jitter = Math.floor(Math.random() * Math.min(base, 1000)); + const delay = Math.min(base + jitter, cacheConfig.REDIS_RETRY_MAX_DELAY); logger.info(`ioredis cluster reconnecting... attempt ${times}, delay ${delay}ms`); return delay; }, @@ -149,7 +153,9 @@ if (cacheConfig.USE_REDIS) { ); return new Error('Max reconnection attempts reached'); } - const delay = Math.min(retries * 100, cacheConfig.REDIS_RETRY_MAX_DELAY); + const base = Math.min(Math.pow(2, retries) * 100, cacheConfig.REDIS_RETRY_MAX_DELAY); + const jitter = Math.floor(Math.random() * Math.min(base, 1000)); + const delay = Math.min(base + jitter, cacheConfig.REDIS_RETRY_MAX_DELAY); logger.info(`@keyv/redis reconnecting... attempt ${retries}, delay ${delay}ms`); return delay; }, diff --git a/packages/api/src/endpoints/anthropic/helpers.ts b/packages/api/src/endpoints/anthropic/helpers.ts index d9b1c1ccfe..d33116a2ac 100644 --- a/packages/api/src/endpoints/anthropic/helpers.ts +++ b/packages/api/src/endpoints/anthropic/helpers.ts @@ -65,7 +65,7 @@ function getClaudeHeaders( /** * Configures reasoning-related options for Claude models. - * Models supporting adaptive thinking (Opus 4.6+, Sonnet 5+) use effort control instead of manual budget_tokens. + * Models supporting adaptive thinking (Opus 4.6+, Sonnet 4.6+) use effort control instead of manual budget_tokens. */ function configureReasoning( anthropicInput: AnthropicClientOptions & { max_tokens?: number }, diff --git a/packages/api/src/endpoints/anthropic/llm.spec.ts b/packages/api/src/endpoints/anthropic/llm.spec.ts index 7734097a77..b945eacb34 100644 --- a/packages/api/src/endpoints/anthropic/llm.spec.ts +++ b/packages/api/src/endpoints/anthropic/llm.spec.ts @@ -121,6 +121,39 @@ describe('getLLMConfig', () => { }); }); + it('should add "context-1m" beta header for claude-sonnet-4-6 model', () => { + const modelOptions = { + model: 'claude-sonnet-4-6', + promptCache: true, + }; + const result = getLLMConfig('test-key', { modelOptions }); + const clientOptions = result.llmConfig.clientOptions; + expect(clientOptions?.defaultHeaders).toBeDefined(); + expect(clientOptions?.defaultHeaders).toHaveProperty('anthropic-beta'); + const defaultHeaders = clientOptions?.defaultHeaders as Record; + expect(defaultHeaders['anthropic-beta']).toBe('context-1m-2025-08-07'); + expect(result.llmConfig.promptCache).toBe(true); + }); + + it('should add "context-1m" beta header for claude-sonnet-4-6 model formats', () => { + const modelVariations = [ + 'claude-sonnet-4-6', + 'claude-sonnet-4-6-20260101', + 'anthropic/claude-sonnet-4-6', + ]; + + modelVariations.forEach((model) => { + const modelOptions = { model, promptCache: true }; + const result = getLLMConfig('test-key', { modelOptions }); + const clientOptions = result.llmConfig.clientOptions; + expect(clientOptions?.defaultHeaders).toBeDefined(); + expect(clientOptions?.defaultHeaders).toHaveProperty('anthropic-beta'); + const defaultHeaders = clientOptions?.defaultHeaders as Record; + expect(defaultHeaders['anthropic-beta']).toBe('context-1m-2025-08-07'); + expect(result.llmConfig.promptCache).toBe(true); + }); + }); + it('should pass promptCache boolean for claude-opus-4-5 model (no beta header needed)', () => { const modelOptions = { model: 'claude-opus-4-5', @@ -963,6 +996,51 @@ describe('getLLMConfig', () => { }); }); + it('should use adaptive thinking for Sonnet 4.6 instead of enabled + budget_tokens', () => { + const result = getLLMConfig('test-key', { + modelOptions: { + model: 'claude-sonnet-4-6', + thinking: true, + thinkingBudget: 10000, + }, + }); + + expect((result.llmConfig.thinking as unknown as { type: string }).type).toBe('adaptive'); + expect(result.llmConfig.thinking).not.toHaveProperty('budget_tokens'); + expect(result.llmConfig.maxTokens).toBe(64000); + }); + + it('should set effort via output_config for Sonnet 4.6', () => { + const result = getLLMConfig('test-key', { + modelOptions: { + model: 'claude-sonnet-4-6', + thinking: true, + effort: AnthropicEffort.high, + }, + }); + + expect((result.llmConfig.thinking as unknown as { type: string }).type).toBe('adaptive'); + expect(result.llmConfig.invocationKwargs).toHaveProperty('output_config'); + expect(result.llmConfig.invocationKwargs?.output_config).toEqual({ + effort: AnthropicEffort.high, + }); + }); + + it('should exclude topP/topK for Sonnet 4.6 with adaptive thinking', () => { + const result = getLLMConfig('test-key', { + modelOptions: { + model: 'claude-sonnet-4-6', + thinking: true, + topP: 0.9, + topK: 40, + }, + }); + + expect((result.llmConfig.thinking as unknown as { type: string }).type).toBe('adaptive'); + expect(result.llmConfig).not.toHaveProperty('topP'); + expect(result.llmConfig).not.toHaveProperty('topK'); + }); + it('should NOT set adaptive thinking or effort for non-adaptive models', () => { const nonAdaptiveModels = [ 'claude-opus-4-5', diff --git a/packages/api/src/files/index.ts b/packages/api/src/files/index.ts index 8397878355..3aedc5ba9d 100644 --- a/packages/api/src/files/index.ts +++ b/packages/api/src/files/index.ts @@ -5,5 +5,6 @@ export * from './filter'; export * from './mistral/crud'; export * from './ocr'; export * from './parse'; +export * from './rag'; export * from './validation'; export * from './text'; diff --git a/packages/api/src/files/rag.spec.ts b/packages/api/src/files/rag.spec.ts new file mode 100644 index 0000000000..9d8ea2d4b3 --- /dev/null +++ b/packages/api/src/files/rag.spec.ts @@ -0,0 +1,150 @@ +jest.mock('@librechat/data-schemas', () => ({ + logger: { + debug: jest.fn(), + warn: jest.fn(), + error: jest.fn(), + }, +})); + +jest.mock('~/crypto/jwt', () => ({ + generateShortLivedToken: jest.fn().mockReturnValue('mock-jwt-token'), +})); + +jest.mock('axios', () => ({ + delete: jest.fn(), + interceptors: { + request: { use: jest.fn(), eject: jest.fn() }, + response: { use: jest.fn(), eject: jest.fn() }, + }, +})); + +import axios from 'axios'; +import { deleteRagFile } from './rag'; +import { logger } from '@librechat/data-schemas'; +import { generateShortLivedToken } from '~/crypto/jwt'; + +const mockedAxios = axios as jest.Mocked; +const mockedLogger = logger as jest.Mocked; +const mockedGenerateShortLivedToken = generateShortLivedToken as jest.MockedFunction< + typeof generateShortLivedToken +>; + +describe('deleteRagFile', () => { + const originalEnv = process.env; + + beforeEach(() => { + jest.clearAllMocks(); + process.env = { ...originalEnv }; + process.env.RAG_API_URL = 'http://localhost:8000'; + }); + + afterEach(() => { + process.env = originalEnv; + }); + + describe('when file is embedded and RAG_API_URL is configured', () => { + it('should delete the document from RAG API successfully', async () => { + const file = { file_id: 'file-123', embedded: true }; + mockedAxios.delete.mockResolvedValueOnce({ status: 200 }); + + const result = await deleteRagFile({ userId: 'user123', file }); + + expect(result).toBe(true); + expect(mockedGenerateShortLivedToken).toHaveBeenCalledWith('user123'); + expect(mockedAxios.delete).toHaveBeenCalledWith('http://localhost:8000/documents', { + headers: { + Authorization: 'Bearer mock-jwt-token', + 'Content-Type': 'application/json', + accept: 'application/json', + }, + data: ['file-123'], + }); + expect(mockedLogger.debug).toHaveBeenCalledWith( + '[deleteRagFile] Successfully deleted document file-123 from RAG API', + ); + }); + + it('should return true and log warning when document is not found (404)', async () => { + const file = { file_id: 'file-not-found', embedded: true }; + const error = new Error('Not Found') as Error & { response?: { status?: number } }; + error.response = { status: 404 }; + mockedAxios.delete.mockRejectedValueOnce(error); + + const result = await deleteRagFile({ userId: 'user123', file }); + + expect(result).toBe(true); + expect(mockedLogger.warn).toHaveBeenCalledWith( + '[deleteRagFile] Document file-not-found not found in RAG API, may have been deleted already', + ); + }); + + it('should return false and log error on other errors', async () => { + const file = { file_id: 'file-error', embedded: true }; + const error = new Error('Server Error') as Error & { response?: { status?: number } }; + error.response = { status: 500 }; + mockedAxios.delete.mockRejectedValueOnce(error); + + const result = await deleteRagFile({ userId: 'user123', file }); + + expect(result).toBe(false); + expect(mockedLogger.error).toHaveBeenCalledWith( + '[deleteRagFile] Error deleting document from RAG API:', + 'Server Error', + ); + }); + }); + + describe('when file is not embedded', () => { + it('should skip RAG deletion and return true', async () => { + const file = { file_id: 'file-123', embedded: false }; + + const result = await deleteRagFile({ userId: 'user123', file }); + + expect(result).toBe(true); + expect(mockedAxios.delete).not.toHaveBeenCalled(); + expect(mockedGenerateShortLivedToken).not.toHaveBeenCalled(); + }); + + it('should skip RAG deletion when embedded is undefined', async () => { + const file = { file_id: 'file-123' }; + + const result = await deleteRagFile({ userId: 'user123', file }); + + expect(result).toBe(true); + expect(mockedAxios.delete).not.toHaveBeenCalled(); + }); + }); + + describe('when RAG_API_URL is not configured', () => { + it('should skip RAG deletion and return true', async () => { + delete process.env.RAG_API_URL; + const file = { file_id: 'file-123', embedded: true }; + + const result = await deleteRagFile({ userId: 'user123', file }); + + expect(result).toBe(true); + expect(mockedAxios.delete).not.toHaveBeenCalled(); + }); + }); + + describe('userId handling', () => { + it('should return false when no userId is provided', async () => { + const file = { file_id: 'file-123', embedded: true }; + + const result = await deleteRagFile({ userId: '', file }); + + expect(result).toBe(false); + expect(mockedLogger.error).toHaveBeenCalledWith('[deleteRagFile] No user ID provided'); + expect(mockedAxios.delete).not.toHaveBeenCalled(); + }); + + it('should return false when userId is undefined', async () => { + const file = { file_id: 'file-123', embedded: true }; + + const result = await deleteRagFile({ userId: undefined as unknown as string, file }); + + expect(result).toBe(false); + expect(mockedLogger.error).toHaveBeenCalledWith('[deleteRagFile] No user ID provided'); + }); + }); +}); diff --git a/packages/api/src/files/rag.ts b/packages/api/src/files/rag.ts new file mode 100644 index 0000000000..7155f62c12 --- /dev/null +++ b/packages/api/src/files/rag.ts @@ -0,0 +1,60 @@ +import axios from 'axios'; +import { logger } from '@librechat/data-schemas'; +import { generateShortLivedToken } from '~/crypto/jwt'; + +interface DeleteRagFileParams { + /** The user ID. Required for authentication. If not provided, the function returns false and logs an error. */ + userId: string; + /** The file object. Must have `embedded` and `file_id` properties. */ + file: { + file_id: string; + embedded?: boolean; + }; +} + +/** + * Deletes embedded document(s) from the RAG API. + * This is a shared utility function used by all file storage strategies + * (S3, Azure, Firebase, Local) to delete RAG embeddings when a file is deleted. + * + * @param params - The parameters object. + * @param params.userId - The user ID for authentication. + * @param params.file - The file object. Must have `embedded` and `file_id` properties. + * @returns Returns true if deletion was successful or skipped, false if there was an error. + */ +export async function deleteRagFile({ userId, file }: DeleteRagFileParams): Promise { + if (!file.embedded || !process.env.RAG_API_URL) { + return true; + } + + if (!userId) { + logger.error('[deleteRagFile] No user ID provided'); + return false; + } + + const jwtToken = generateShortLivedToken(userId); + + try { + await axios.delete(`${process.env.RAG_API_URL}/documents`, { + headers: { + Authorization: `Bearer ${jwtToken}`, + 'Content-Type': 'application/json', + accept: 'application/json', + }, + data: [file.file_id], + }); + logger.debug(`[deleteRagFile] Successfully deleted document ${file.file_id} from RAG API`); + return true; + } catch (error) { + const axiosError = error as { response?: { status?: number }; message?: string }; + if (axiosError.response?.status === 404) { + logger.warn( + `[deleteRagFile] Document ${file.file_id} not found in RAG API, may have been deleted already`, + ); + return true; + } else { + logger.error('[deleteRagFile] Error deleting document from RAG API:', axiosError.message); + return false; + } + } +} diff --git a/packages/api/src/mcp/ConnectionsRepository.ts b/packages/api/src/mcp/ConnectionsRepository.ts index e2c48c88ab..b14af57b29 100644 --- a/packages/api/src/mcp/ConnectionsRepository.ts +++ b/packages/api/src/mcp/ConnectionsRepository.ts @@ -4,6 +4,8 @@ import { MCPConnection } from './connection'; import { MCPServersRegistry } from '~/mcp/registry/MCPServersRegistry'; import type * as t from './types'; +const CONNECT_CONCURRENCY = 3; + /** * Manages MCP connections with lazy loading and reconnection. * Maintains a pool of connections and handles connection lifecycle management. @@ -73,6 +75,7 @@ export class ConnectionsRepository { { serverName, serverConfig, + useSSRFProtection: MCPServersRegistry.getInstance().shouldEnableSSRFProtection(), }, this.oauthOpts, ); @@ -83,9 +86,17 @@ export class ConnectionsRepository { /** Gets or creates connections for multiple servers concurrently */ async getMany(serverNames: string[]): Promise> { - const connectionPromises = serverNames.map(async (name) => [name, await this.get(name)]); - const connections = await Promise.all(connectionPromises); - return new Map((connections as [string, MCPConnection][]).filter((v) => !!v[1])); + const results: [string, MCPConnection | null][] = []; + for (let i = 0; i < serverNames.length; i += CONNECT_CONCURRENCY) { + const batch = serverNames.slice(i, i + CONNECT_CONCURRENCY); + const batchResults = await Promise.all( + batch.map( + async (name): Promise<[string, MCPConnection | null]> => [name, await this.get(name)], + ), + ); + results.push(...batchResults); + } + return new Map(results.filter((v): v is [string, MCPConnection] => v[1] != null)); } /** Returns all currently loaded connections without creating new ones */ diff --git a/packages/api/src/mcp/MCPConnectionFactory.ts b/packages/api/src/mcp/MCPConnectionFactory.ts index bcc63b7500..a8f631614d 100644 --- a/packages/api/src/mcp/MCPConnectionFactory.ts +++ b/packages/api/src/mcp/MCPConnectionFactory.ts @@ -29,6 +29,7 @@ export class MCPConnectionFactory { protected readonly serverConfig: t.MCPOptions; protected readonly logPrefix: string; protected readonly useOAuth: boolean; + protected readonly useSSRFProtection: boolean; // OAuth-related properties (only set when useOAuth is true) protected readonly userId?: string; @@ -72,6 +73,7 @@ export class MCPConnectionFactory { serverConfig: this.serverConfig, userId: this.userId, oauthTokens, + useSSRFProtection: this.useSSRFProtection, }); const oauthHandler = async () => { @@ -146,6 +148,7 @@ export class MCPConnectionFactory { serverConfig: this.serverConfig, userId: this.userId, oauthTokens: null, + useSSRFProtection: this.useSSRFProtection, }); unauthConnection.on('oauthRequired', () => { @@ -189,6 +192,7 @@ export class MCPConnectionFactory { }); this.serverName = basic.serverName; this.useOAuth = !!oauth?.useOAuth; + this.useSSRFProtection = basic.useSSRFProtection === true; this.connectionTimeout = oauth?.connectionTimeout; this.logPrefix = oauth?.user ? `[MCP][${basic.serverName}][${oauth.user.id}]` @@ -213,6 +217,7 @@ export class MCPConnectionFactory { serverConfig: this.serverConfig, userId: this.userId, oauthTokens, + useSSRFProtection: this.useSSRFProtection, }); let cleanupOAuthHandlers: (() => void) | null = null; @@ -293,38 +298,45 @@ export class MCPConnectionFactory { const oauthHandler = async (data: { serverUrl?: string }) => { logger.info(`${this.logPrefix} oauthRequired event received`); - // If we just want to initiate OAuth and return, handle it differently if (this.returnOnOAuth) { try { const config = this.serverConfig; - const { authorizationUrl, flowId, flowMetadata } = - await MCPOAuthHandler.initiateOAuthFlow( - this.serverName, - data.serverUrl || '', - this.userId!, - config?.oauth_headers ?? {}, - config?.oauth, + const flowId = MCPOAuthHandler.generateFlowId(this.userId!, this.serverName); + const existingFlow = await this.flowManager!.getFlowState(flowId, 'mcp_oauth'); + + if (existingFlow?.status === 'PENDING') { + logger.debug( + `${this.logPrefix} PENDING OAuth flow already exists, skipping new initiation`, ); + connection.emit('oauthFailed', new Error('OAuth flow initiated - return early')); + return; + } - // Delete any existing flow state to ensure we start fresh - // This prevents stale codeVerifier issues when re-authenticating - await this.flowManager!.deleteFlow(flowId, 'mcp_oauth'); + const { + authorizationUrl, + flowId: newFlowId, + flowMetadata, + } = await MCPOAuthHandler.initiateOAuthFlow( + this.serverName, + data.serverUrl || '', + this.userId!, + config?.oauth_headers ?? {}, + config?.oauth, + ); - // Create the flow state so the OAuth callback can find it - // We spawn this in the background without waiting for it - // Pass signal so the flow can be aborted if the request is cancelled - this.flowManager!.createFlow(flowId, 'mcp_oauth', flowMetadata, this.signal).catch(() => { - // The OAuth callback will resolve this flow, so we expect it to timeout here - // or it will be aborted if the request is cancelled - both are fine - }); + if (existingFlow) { + await this.flowManager!.deleteFlow(newFlowId, 'mcp_oauth'); + } + + this.flowManager!.createFlow(newFlowId, 'mcp_oauth', flowMetadata, this.signal).catch( + () => {}, + ); if (this.oauthStart) { logger.info(`${this.logPrefix} OAuth flow started, issuing authorization URL`); await this.oauthStart(authorizationUrl); } - // Emit oauthFailed to signal that connection should not proceed - // but OAuth was successfully initiated connection.emit('oauthFailed', new Error('OAuth flow initiated - return early')); return; } catch (error) { @@ -386,11 +398,9 @@ export class MCPConnectionFactory { logger.error(`${this.logPrefix} Failed to establish connection.`); } - // Handles connection attempts with retry logic and OAuth error handling private async connectTo(connection: MCPConnection): Promise { const maxAttempts = 3; let attempts = 0; - let oauthHandled = false; while (attempts < maxAttempts) { try { @@ -403,22 +413,6 @@ export class MCPConnectionFactory { attempts++; if (this.useOAuth && this.isOAuthError(error)) { - // For returnOnOAuth mode, let the event handler (handleOAuthEvents) deal with OAuth - // We just need to stop retrying and let the error propagate - if (this.returnOnOAuth) { - logger.info( - `${this.logPrefix} OAuth required (return on OAuth mode), stopping retries`, - ); - throw error; - } - - // Normal flow - wait for OAuth to complete - if (this.oauthStart && !oauthHandled) { - oauthHandled = true; - logger.info(`${this.logPrefix} Handling OAuth`); - await this.handleOAuthRequired(); - } - // Don't retry on OAuth errors - just throw logger.info(`${this.logPrefix} OAuth required, stopping connection attempts`); throw error; } @@ -494,26 +488,15 @@ export class MCPConnectionFactory { /** Check if there's already an ongoing OAuth flow for this flowId */ const existingFlow = await this.flowManager.getFlowState(flowId, 'mcp_oauth'); - // If any flow exists (PENDING, COMPLETED, FAILED), cancel it and start fresh - // This ensures the user always gets a new OAuth URL instead of waiting for stale flows if (existingFlow) { logger.debug( - `${this.logPrefix} Found existing OAuth flow (status: ${existingFlow.status}), cancelling to start fresh`, + `${this.logPrefix} Found existing OAuth flow (status: ${existingFlow.status}), cleaning up to start fresh`, ); try { - if (existingFlow.status === 'PENDING') { - await this.flowManager.failFlow( - flowId, - 'mcp_oauth', - new Error('Cancelled for new OAuth request'), - ); - } else { - await this.flowManager.deleteFlow(flowId, 'mcp_oauth'); - } + await this.flowManager.deleteFlow(flowId, 'mcp_oauth'); } catch (error) { - logger.warn(`${this.logPrefix} Failed to cancel existing OAuth flow`, error); + logger.warn(`${this.logPrefix} Failed to clean up existing OAuth flow`, error); } - // Continue to start a new flow below } logger.debug(`${this.logPrefix} Initiating new OAuth flow for ${this.serverName}...`); diff --git a/packages/api/src/mcp/MCPManager.ts b/packages/api/src/mcp/MCPManager.ts index 211382c032..cab495774a 100644 --- a/packages/api/src/mcp/MCPManager.ts +++ b/packages/api/src/mcp/MCPManager.ts @@ -102,7 +102,8 @@ export class MCPManager extends UserConnectionManager { serverConfig.requiresOAuth || (serverConfig as t.ParsedServerConfig).oauthMetadata, ); - const basic: t.BasicConnectionOptions = { serverName, serverConfig }; + const useSSRFProtection = MCPServersRegistry.getInstance().shouldEnableSSRFProtection(); + const basic: t.BasicConnectionOptions = { serverName, serverConfig, useSSRFProtection }; if (!useOAuth) { const result = await MCPConnectionFactory.discoverTools(basic); diff --git a/packages/api/src/mcp/UserConnectionManager.ts b/packages/api/src/mcp/UserConnectionManager.ts index 25fc753d6b..e5d94689a0 100644 --- a/packages/api/src/mcp/UserConnectionManager.ts +++ b/packages/api/src/mcp/UserConnectionManager.ts @@ -117,6 +117,7 @@ export abstract class UserConnectionManager { { serverName: serverName, serverConfig: config, + useSSRFProtection: MCPServersRegistry.getInstance().shouldEnableSSRFProtection(), }, { useOAuth: true, diff --git a/packages/api/src/mcp/__tests__/ConnectionsRepository.test.ts b/packages/api/src/mcp/__tests__/ConnectionsRepository.test.ts index e722b38375..4240ba12d6 100644 --- a/packages/api/src/mcp/__tests__/ConnectionsRepository.test.ts +++ b/packages/api/src/mcp/__tests__/ConnectionsRepository.test.ts @@ -24,6 +24,7 @@ jest.mock('../connection'); const mockRegistryInstance = { getServerConfig: jest.fn(), getAllServerConfigs: jest.fn(), + shouldEnableSSRFProtection: jest.fn().mockReturnValue(false), }; jest.mock('../registry/MCPServersRegistry', () => ({ @@ -108,6 +109,7 @@ describe('ConnectionsRepository', () => { { serverName: 'server1', serverConfig: mockServerConfigs.server1, + useSSRFProtection: false, }, undefined, ); @@ -129,6 +131,7 @@ describe('ConnectionsRepository', () => { { serverName: 'server1', serverConfig: mockServerConfigs.server1, + useSSRFProtection: false, }, undefined, ); @@ -167,6 +170,7 @@ describe('ConnectionsRepository', () => { { serverName: 'server1', serverConfig: configWithCachedAt, + useSSRFProtection: false, }, undefined, ); diff --git a/packages/api/src/mcp/__tests__/MCPConnectionFactory.test.ts b/packages/api/src/mcp/__tests__/MCPConnectionFactory.test.ts index 0986188e04..263c84357a 100644 --- a/packages/api/src/mcp/__tests__/MCPConnectionFactory.test.ts +++ b/packages/api/src/mcp/__tests__/MCPConnectionFactory.test.ts @@ -84,6 +84,7 @@ describe('MCPConnectionFactory', () => { serverConfig: mockServerConfig, userId: undefined, oauthTokens: null, + useSSRFProtection: false, }); expect(mockConnectionInstance.connect).toHaveBeenCalled(); }); @@ -125,6 +126,7 @@ describe('MCPConnectionFactory', () => { serverConfig: mockServerConfig, userId: 'user123', oauthTokens: mockTokens, + useSSRFProtection: false, }); }); }); @@ -184,6 +186,7 @@ describe('MCPConnectionFactory', () => { serverConfig: mockServerConfig, userId: 'user123', oauthTokens: null, + useSSRFProtection: false, }); expect(mockLogger.debug).toHaveBeenCalledWith( expect.stringContaining('No existing tokens found or error loading tokens'), @@ -267,7 +270,54 @@ describe('MCPConnectionFactory', () => { ); }); - it('should delete existing flow before creating new OAuth flow to prevent stale codeVerifier', async () => { + it('should skip new OAuth flow initiation when a PENDING flow already exists (returnOnOAuth)', async () => { + const basicOptions = { + serverName: 'test-server', + serverConfig: mockServerConfig, + user: mockUser, + }; + + const oauthOptions: t.OAuthConnectionOptions = { + user: mockUser, + useOAuth: true, + returnOnOAuth: true, + oauthStart: jest.fn(), + flowManager: mockFlowManager, + }; + + mockFlowManager.getFlowState.mockResolvedValue({ + status: 'PENDING', + type: 'mcp_oauth', + metadata: { codeVerifier: 'existing-verifier' }, + createdAt: Date.now(), + }); + mockConnectionInstance.isConnected.mockResolvedValue(false); + + let oauthRequiredHandler: (data: Record) => Promise; + mockConnectionInstance.on.mockImplementation((event, handler) => { + if (event === 'oauthRequired') { + oauthRequiredHandler = handler as (data: Record) => Promise; + } + return mockConnectionInstance; + }); + + try { + await MCPConnectionFactory.create(basicOptions, oauthOptions); + } catch { + // Expected to fail + } + + await oauthRequiredHandler!({ serverUrl: 'https://api.example.com' }); + + expect(mockMCPOAuthHandler.initiateOAuthFlow).not.toHaveBeenCalled(); + expect(mockFlowManager.deleteFlow).not.toHaveBeenCalled(); + expect(mockConnectionInstance.emit).toHaveBeenCalledWith( + 'oauthFailed', + expect.objectContaining({ message: 'OAuth flow initiated - return early' }), + ); + }); + + it('should delete stale flow and create new OAuth flow when existing flow is COMPLETED', async () => { const basicOptions = { serverName: 'test-server', serverConfig: mockServerConfig, @@ -300,6 +350,12 @@ describe('MCPConnectionFactory', () => { }, }; + mockFlowManager.getFlowState.mockResolvedValue({ + status: 'COMPLETED', + type: 'mcp_oauth', + metadata: { codeVerifier: 'old-verifier' }, + createdAt: Date.now() - 60000, + }); mockMCPOAuthHandler.initiateOAuthFlow.mockResolvedValue(mockFlowData); mockFlowManager.deleteFlow.mockResolvedValue(true); mockFlowManager.createFlow.mockRejectedValue(new Error('Timeout expected')); @@ -316,21 +372,17 @@ describe('MCPConnectionFactory', () => { try { await MCPConnectionFactory.create(basicOptions, oauthOptions); } catch { - // Expected to fail due to connection not established + // Expected to fail } await oauthRequiredHandler!({ serverUrl: 'https://api.example.com' }); - // Verify deleteFlow was called with correct parameters expect(mockFlowManager.deleteFlow).toHaveBeenCalledWith('user123:test-server', 'mcp_oauth'); - // Verify deleteFlow was called before createFlow const deleteCallOrder = mockFlowManager.deleteFlow.mock.invocationCallOrder[0]; const createCallOrder = mockFlowManager.createFlow.mock.invocationCallOrder[0]; expect(deleteCallOrder).toBeLessThan(createCallOrder); - // Verify createFlow was called with fresh metadata - // 4th arg is the abort signal (undefined in this test since no signal was provided) expect(mockFlowManager.createFlow).toHaveBeenCalledWith( 'user123:test-server', 'mcp_oauth', diff --git a/packages/api/src/mcp/__tests__/MCPManager.test.ts b/packages/api/src/mcp/__tests__/MCPManager.test.ts index caeb9176d3..bf63a6af3c 100644 --- a/packages/api/src/mcp/__tests__/MCPManager.test.ts +++ b/packages/api/src/mcp/__tests__/MCPManager.test.ts @@ -33,6 +33,7 @@ const mockRegistryInstance = { getServerConfig: jest.fn(), getAllServerConfigs: jest.fn(), getOAuthServers: jest.fn(), + shouldEnableSSRFProtection: jest.fn().mockReturnValue(false), }; jest.mock('~/mcp/registry/MCPServersRegistry', () => ({ diff --git a/packages/api/src/mcp/__tests__/zod.spec.ts b/packages/api/src/mcp/__tests__/zod.spec.ts index 71713389bf..684b6de975 100644 --- a/packages/api/src/mcp/__tests__/zod.spec.ts +++ b/packages/api/src/mcp/__tests__/zod.spec.ts @@ -2,7 +2,12 @@ // zod.spec.ts import { z } from 'zod'; import type { JsonSchemaType } from '@librechat/data-schemas'; -import { resolveJsonSchemaRefs, convertJsonSchemaToZod, convertWithResolvedRefs } from '../zod'; +import { + convertWithResolvedRefs, + convertJsonSchemaToZod, + resolveJsonSchemaRefs, + normalizeJsonSchema, +} from '../zod'; describe('convertJsonSchemaToZod', () => { describe('integer type handling', () => { @@ -206,7 +211,7 @@ describe('convertJsonSchemaToZod', () => { type: 'number' as const, enum: [1, 2, 3, 5, 8, 13], }; - const zodSchema = convertWithResolvedRefs(schema as JsonSchemaType); + const zodSchema = convertWithResolvedRefs(schema as unknown as JsonSchemaType); expect(zodSchema?.parse(1)).toBe(1); expect(zodSchema?.parse(13)).toBe(13); @@ -1599,6 +1604,34 @@ describe('convertJsonSchemaToZod', () => { expect(() => zodSchema?.parse(testData)).not.toThrow(); }); + it('should strip $defs from the resolved output', () => { + const schemaWithDefs = { + type: 'object' as const, + properties: { + item: { $ref: '#/$defs/Item' }, + }, + $defs: { + Item: { + type: 'object' as const, + properties: { + name: { type: 'string' as const }, + }, + }, + }, + }; + + const resolved = resolveJsonSchemaRefs(schemaWithDefs); + // $defs should NOT be in the output — it was only used for resolution + expect(resolved).not.toHaveProperty('$defs'); + // The $ref should be resolved inline + expect(resolved.properties?.item).toEqual({ + type: 'object', + properties: { + name: { type: 'string' }, + }, + }); + }); + it('should handle various edge cases safely', () => { // Test with null/undefined expect(resolveJsonSchemaRefs(null as any)).toBeNull(); @@ -2002,3 +2035,329 @@ describe('convertJsonSchemaToZod', () => { }); }); }); + +describe('normalizeJsonSchema', () => { + it('should convert const to enum', () => { + const schema = { type: 'string', const: 'hello' } as any; + const result = normalizeJsonSchema(schema); + expect(result).toEqual({ type: 'string', enum: ['hello'] }); + expect(result).not.toHaveProperty('const'); + }); + + it('should preserve existing enum when const is also present', () => { + const schema = { type: 'string', const: 'hello', enum: ['hello', 'world'] } as any; + const result = normalizeJsonSchema(schema); + expect(result).toEqual({ type: 'string', enum: ['hello', 'world'] }); + expect(result).not.toHaveProperty('const'); + }); + + it('should handle non-string const values (number, boolean, null)', () => { + expect(normalizeJsonSchema({ type: 'number', const: 42 } as any)).toEqual({ + type: 'number', + enum: [42], + }); + expect(normalizeJsonSchema({ type: 'boolean', const: true } as any)).toEqual({ + type: 'boolean', + enum: [true], + }); + expect(normalizeJsonSchema({ type: 'string', const: null } as any)).toEqual({ + type: 'string', + enum: [null], + }); + }); + + it('should recursively normalize nested object properties', () => { + const schema = { + type: 'object', + properties: { + mode: { type: 'string', const: 'advanced' }, + count: { type: 'number', const: 5 }, + name: { type: 'string', description: 'A name' }, + }, + } as any; + + const result = normalizeJsonSchema(schema); + expect(result.properties.mode).toEqual({ type: 'string', enum: ['advanced'] }); + expect(result.properties.count).toEqual({ type: 'number', enum: [5] }); + expect(result.properties.name).toEqual({ type: 'string', description: 'A name' }); + }); + + it('should normalize inside oneOf/anyOf/allOf arrays', () => { + const schema = { + type: 'object', + oneOf: [ + { type: 'object', properties: { kind: { type: 'string', const: 'A' } } }, + { type: 'object', properties: { kind: { type: 'string', const: 'B' } } }, + ], + anyOf: [{ type: 'string', const: 'x' }], + allOf: [{ type: 'number', const: 1 }], + } as any; + + const result = normalizeJsonSchema(schema); + expect(result.oneOf[0].properties.kind).toEqual({ type: 'string', enum: ['A'] }); + expect(result.oneOf[1].properties.kind).toEqual({ type: 'string', enum: ['B'] }); + expect(result.anyOf[0]).toEqual({ type: 'string', enum: ['x'] }); + expect(result.allOf[0]).toEqual({ type: 'number', enum: [1] }); + }); + + it('should normalize array items with const', () => { + const schema = { + type: 'array', + items: { type: 'string', const: 'fixed' }, + } as any; + + const result = normalizeJsonSchema(schema); + expect(result.items).toEqual({ type: 'string', enum: ['fixed'] }); + }); + + it('should normalize additionalProperties with const', () => { + const schema = { + type: 'object', + additionalProperties: { type: 'string', const: 'val' }, + } as any; + + const result = normalizeJsonSchema(schema); + expect(result.additionalProperties).toEqual({ type: 'string', enum: ['val'] }); + }); + + it('should handle null, undefined, and primitive inputs safely', () => { + expect(normalizeJsonSchema(null as any)).toBeNull(); + expect(normalizeJsonSchema(undefined as any)).toBeUndefined(); + expect(normalizeJsonSchema('string' as any)).toBe('string'); + expect(normalizeJsonSchema(42 as any)).toBe(42); + expect(normalizeJsonSchema(true as any)).toBe(true); + }); + + it('should be a no-op when no const is present', () => { + const schema = { + type: 'object', + properties: { + name: { type: 'string', description: 'Name' }, + age: { type: 'number' }, + tags: { type: 'array', items: { type: 'string' } }, + }, + required: ['name'], + } as any; + + const result = normalizeJsonSchema(schema); + expect(result).toEqual(schema); + }); + + it('should handle a Tavily-like schema pattern with const', () => { + const schema = { + type: 'object', + properties: { + query: { + type: 'string', + description: 'The search query', + }, + search_depth: { + type: 'string', + const: 'advanced', + description: 'The depth of the search', + }, + topic: { + type: 'string', + enum: ['general', 'news'], + description: 'The search topic', + }, + include_answer: { + type: 'boolean', + const: true, + }, + max_results: { + type: 'number', + const: 5, + }, + }, + required: ['query'], + } as any; + + const result = normalizeJsonSchema(schema); + + // const fields should be converted to enum + expect(result.properties.search_depth).toEqual({ + type: 'string', + enum: ['advanced'], + description: 'The depth of the search', + }); + expect(result.properties.include_answer).toEqual({ + type: 'boolean', + enum: [true], + }); + expect(result.properties.max_results).toEqual({ + type: 'number', + enum: [5], + }); + + // Existing enum should be preserved + expect(result.properties.topic).toEqual({ + type: 'string', + enum: ['general', 'news'], + description: 'The search topic', + }); + + // Non-const fields should be unchanged + expect(result.properties.query).toEqual({ + type: 'string', + description: 'The search query', + }); + + // Top-level fields preserved + expect(result.required).toEqual(['query']); + expect(result.type).toBe('object'); + }); + + it('should handle arrays at the top level', () => { + const schemas = [ + { type: 'string', const: 'a' }, + { type: 'number', const: 1 }, + ] as any; + + const result = normalizeJsonSchema(schemas); + expect(result).toEqual([ + { type: 'string', enum: ['a'] }, + { type: 'number', enum: [1] }, + ]); + }); + + it('should strip vendor extension fields (x-* prefixed keys)', () => { + const schema = { + type: 'object', + properties: { + travelMode: { + type: 'string', + enum: ['DRIVE', 'BICYCLE', 'TRANSIT', 'WALK'], + 'x-google-enum-descriptions': ['By car', 'By bicycle', 'By public transit', 'By walking'], + description: 'Mode of travel', + }, + }, + } as any; + + const result = normalizeJsonSchema(schema); + expect(result.properties.travelMode).toEqual({ + type: 'string', + enum: ['DRIVE', 'BICYCLE', 'TRANSIT', 'WALK'], + description: 'Mode of travel', + }); + expect(result.properties.travelMode).not.toHaveProperty('x-google-enum-descriptions'); + }); + + it('should strip x-* fields at all nesting levels', () => { + const schema = { + type: 'object', + 'x-custom-root': true, + properties: { + outer: { + type: 'object', + 'x-custom-outer': 'value', + properties: { + inner: { + type: 'string', + 'x-custom-inner': 42, + }, + }, + }, + arr: { + type: 'array', + items: { + type: 'string', + 'x-item-meta': 'something', + }, + }, + }, + } as any; + + const result = normalizeJsonSchema(schema); + expect(result).not.toHaveProperty('x-custom-root'); + expect(result.properties.outer).not.toHaveProperty('x-custom-outer'); + expect(result.properties.outer.properties.inner).not.toHaveProperty('x-custom-inner'); + expect(result.properties.arr.items).not.toHaveProperty('x-item-meta'); + // Standard fields should be preserved + expect(result.type).toBe('object'); + expect(result.properties.outer.type).toBe('object'); + expect(result.properties.outer.properties.inner.type).toBe('string'); + expect(result.properties.arr.items.type).toBe('string'); + }); + + it('should strip $defs and definitions as a safety net', () => { + const schema = { + type: 'object', + properties: { + name: { type: 'string' }, + }, + $defs: { + SomeType: { type: 'string' }, + }, + } as any; + + const result = normalizeJsonSchema(schema); + expect(result).not.toHaveProperty('$defs'); + expect(result.type).toBe('object'); + expect(result.properties.name).toEqual({ type: 'string' }); + }); + + it('should strip x-* fields inside oneOf/anyOf/allOf', () => { + const schema = { + type: 'object', + oneOf: [ + { type: 'string', 'x-meta': 'a' }, + { type: 'number', 'x-meta': 'b' }, + ], + } as any; + + const result = normalizeJsonSchema(schema); + expect(result.oneOf[0]).toEqual({ type: 'string' }); + expect(result.oneOf[1]).toEqual({ type: 'number' }); + }); + + it('should handle a Google Maps MCP-like schema with $defs and x-google-enum-descriptions', () => { + const schema = { + type: 'object', + properties: { + origin: { type: 'string', description: 'Starting address' }, + destination: { type: 'string', description: 'Ending address' }, + travelMode: { + type: 'string', + enum: ['DRIVE', 'BICYCLE', 'TRANSIT', 'WALK'], + 'x-google-enum-descriptions': ['By car', 'By bicycle', 'By public transit', 'By walking'], + }, + waypoints: { + type: 'array', + items: { $ref: '#/$defs/Waypoint' }, + }, + }, + required: ['origin', 'destination'], + $defs: { + Waypoint: { + type: 'object', + properties: { + location: { type: 'string' }, + stopover: { type: 'boolean' }, + }, + }, + }, + } as any; + + // First resolve refs, then normalize + const resolved = resolveJsonSchemaRefs(schema); + const result = normalizeJsonSchema(resolved); + + // $defs should be stripped (by both resolveJsonSchemaRefs and normalizeJsonSchema) + expect(result).not.toHaveProperty('$defs'); + // x-google-enum-descriptions should be stripped + expect(result.properties.travelMode).not.toHaveProperty('x-google-enum-descriptions'); + // $ref should be resolved inline + expect(result.properties.waypoints.items).not.toHaveProperty('$ref'); + expect(result.properties.waypoints.items).toEqual({ + type: 'object', + properties: { + location: { type: 'string' }, + stopover: { type: 'boolean' }, + }, + }); + // Standard fields preserved + expect(result.properties.travelMode.enum).toEqual(['DRIVE', 'BICYCLE', 'TRANSIT', 'WALK']); + expect(result.properties.origin).toEqual({ type: 'string', description: 'Starting address' }); + }); +}); diff --git a/packages/api/src/mcp/connection.ts b/packages/api/src/mcp/connection.ts index b954a2e839..88dbb19b6f 100644 --- a/packages/api/src/mcp/connection.ts +++ b/packages/api/src/mcp/connection.ts @@ -20,6 +20,7 @@ import type { import type { MCPOAuthTokens } from './oauth/types'; import { withTimeout } from '~/utils/promise'; import type * as t from './types'; +import { createSSRFSafeUndiciConnect, resolveHostnameSSRF } from '~/auth'; import { sanitizeUrlForLogging } from './utils'; import { mcpConfig } from './mcpConfig'; @@ -213,6 +214,7 @@ interface MCPConnectionParams { serverConfig: t.MCPOptions; userId?: string; oauthTokens?: MCPOAuthTokens | null; + useSSRFProtection?: boolean; } export class MCPConnection extends EventEmitter { @@ -233,6 +235,7 @@ export class MCPConnection extends EventEmitter { private oauthTokens?: MCPOAuthTokens | null; private requestHeaders?: Record | null; private oauthRequired = false; + private readonly useSSRFProtection: boolean; iconPath?: string; timeout?: number; url?: string; @@ -263,6 +266,7 @@ export class MCPConnection extends EventEmitter { this.options = params.serverConfig; this.serverName = params.serverName; this.userId = params.userId; + this.useSSRFProtection = params.useSSRFProtection === true; this.iconPath = params.serverConfig.iconPath; this.timeout = params.serverConfig.timeout; this.lastPingTime = Date.now(); @@ -301,6 +305,7 @@ export class MCPConnection extends EventEmitter { getHeaders: () => Record | null | undefined, timeout?: number, ): (input: UndiciRequestInfo, init?: UndiciRequestInit) => Promise { + const ssrfConnect = this.useSSRFProtection ? createSSRFSafeUndiciConnect() : undefined; return function customFetch( input: UndiciRequestInfo, init?: UndiciRequestInit, @@ -310,6 +315,7 @@ export class MCPConnection extends EventEmitter { const agent = new Agent({ bodyTimeout: effectiveTimeout, headersTimeout: effectiveTimeout, + ...(ssrfConnect != null ? { connect: ssrfConnect } : {}), }); if (!requestHeaders) { return undiciFetch(input, { ...init, dispatcher: agent }); @@ -342,7 +348,7 @@ export class MCPConnection extends EventEmitter { logger.error(`${this.getLogPrefix()} ${errorContext}: ${errorMessage}`); } - private constructTransport(options: t.MCPOptions): Transport { + private async constructTransport(options: t.MCPOptions): Promise { try { let type: t.MCPOptions['type']; if (isStdioOptions(options)) { @@ -378,6 +384,15 @@ export class MCPConnection extends EventEmitter { throw new Error('Invalid options for websocket transport.'); } this.url = options.url; + if (this.useSSRFProtection) { + const wsHostname = new URL(options.url).hostname; + const isSSRF = await resolveHostnameSSRF(wsHostname); + if (isSSRF) { + throw new Error( + `SSRF protection: WebSocket host "${wsHostname}" resolved to a private/reserved IP address`, + ); + } + } return new WebSocketClientTransport(new URL(options.url)); case 'sse': { @@ -402,6 +417,7 @@ export class MCPConnection extends EventEmitter { * The connect timeout is extended because proxies may delay initial response. */ const sseTimeout = this.timeout || SSE_CONNECT_TIMEOUT; + const ssrfConnect = this.useSSRFProtection ? createSSRFSafeUndiciConnect() : undefined; const transport = new SSEClientTransport(url, { requestInit: { /** User/OAuth headers override SSE defaults */ @@ -420,6 +436,7 @@ export class MCPConnection extends EventEmitter { /** Extended keep-alive for long-lived SSE connections */ keepAliveTimeout: sseTimeout, keepAliveMaxTimeout: sseTimeout * 2, + ...(ssrfConnect != null ? { connect: ssrfConnect } : {}), }); return undiciFetch(url, { ...init, @@ -542,7 +559,11 @@ export class MCPConnection extends EventEmitter { } this.isReconnecting = true; - const backoffDelay = (attempt: number) => Math.min(1000 * Math.pow(2, attempt), 30000); + const backoffDelay = (attempt: number) => { + const base = Math.min(1000 * Math.pow(2, attempt), 30000); + const jitter = Math.floor(Math.random() * 1000); // up to 1s of random jitter + return base + jitter; + }; try { while ( @@ -629,7 +650,7 @@ export class MCPConnection extends EventEmitter { } } - this.transport = this.constructTransport(this.options); + this.transport = await this.constructTransport(this.options); this.setupTransportDebugHandlers(); const connectTimeout = this.options.initTimeout ?? 120000; diff --git a/packages/api/src/mcp/oauth/OAuthReconnectionManager.test.ts b/packages/api/src/mcp/oauth/OAuthReconnectionManager.test.ts index 4b2e82a05f..d3447eaeb8 100644 --- a/packages/api/src/mcp/oauth/OAuthReconnectionManager.test.ts +++ b/packages/api/src/mcp/oauth/OAuthReconnectionManager.test.ts @@ -336,6 +336,69 @@ describe('OAuthReconnectionManager', () => { }); }); + describe('reconnection staggering', () => { + let reconnectionTracker: OAuthReconnectionTracker; + + beforeEach(async () => { + jest.useFakeTimers(); + reconnectionTracker = new OAuthReconnectionTracker(); + reconnectionManager = await OAuthReconnectionManager.createInstance( + flowManager, + tokenMethods, + reconnectionTracker, + ); + }); + + afterEach(() => { + jest.useRealTimers(); + }); + + it('should stagger reconnection attempts for multiple servers', async () => { + const userId = 'user-123'; + const oauthServers = new Set(['server1', 'server2', 'server3']); + (mockRegistryInstance.getOAuthServers as jest.Mock).mockResolvedValue(oauthServers); + + // All servers have valid tokens and are not connected + tokenMethods.findToken.mockImplementation(async ({ identifier }) => { + return { + userId, + identifier, + expiresAt: new Date(Date.now() + 3600000), + } as unknown as MCPOAuthTokens; + }); + + const mockNewConnection = { + isConnected: jest.fn().mockResolvedValue(true), + disconnect: jest.fn(), + }; + mockMCPManager.getUserConnection.mockResolvedValue( + mockNewConnection as unknown as MCPConnection, + ); + (mockRegistryInstance.getServerConfig as jest.Mock).mockResolvedValue( + {} as unknown as MCPOptions, + ); + + await reconnectionManager.reconnectServers(userId); + + // Only the first server should have been attempted immediately + expect(mockMCPManager.getUserConnection).toHaveBeenCalledTimes(1); + expect(mockMCPManager.getUserConnection).toHaveBeenCalledWith( + expect.objectContaining({ serverName: 'server1' }), + ); + + // After advancing all timers, all servers should have been attempted + await jest.runAllTimersAsync(); + + expect(mockMCPManager.getUserConnection).toHaveBeenCalledTimes(3); + expect(mockMCPManager.getUserConnection).toHaveBeenCalledWith( + expect.objectContaining({ serverName: 'server2' }), + ); + expect(mockMCPManager.getUserConnection).toHaveBeenCalledWith( + expect.objectContaining({ serverName: 'server3' }), + ); + }); + }); + describe('reconnection timeout behavior', () => { let reconnectionTracker: OAuthReconnectionTracker; diff --git a/packages/api/src/mcp/oauth/OAuthReconnectionManager.ts b/packages/api/src/mcp/oauth/OAuthReconnectionManager.ts index 186f3652e3..ca9ce5c71f 100644 --- a/packages/api/src/mcp/oauth/OAuthReconnectionManager.ts +++ b/packages/api/src/mcp/oauth/OAuthReconnectionManager.ts @@ -7,6 +7,7 @@ import { MCPManager } from '~/mcp/MCPManager'; import { MCPServersRegistry } from '~/mcp/registry/MCPServersRegistry'; const DEFAULT_CONNECTION_TIMEOUT_MS = 10_000; // ms +const RECONNECT_STAGGER_MS = 500; // ms between each server reconnection export class OAuthReconnectionManager { private static instance: OAuthReconnectionManager | null = null; @@ -84,9 +85,14 @@ export class OAuthReconnectionManager { this.reconnectionsTracker.setActive(userId, serverName); } - // 3. attempt to reconnect the servers - for (const serverName of serversToReconnect) { - void this.tryReconnect(userId, serverName); + // 3. attempt to reconnect the servers with staggered delays to avoid connection storms + for (let i = 0; i < serversToReconnect.length; i++) { + const serverName = serversToReconnect[i]; + if (i === 0) { + void this.tryReconnect(userId, serverName); + } else { + setTimeout(() => void this.tryReconnect(userId, serverName), i * RECONNECT_STAGGER_MS); + } } } diff --git a/packages/api/src/mcp/registry/MCPServerInspector.ts b/packages/api/src/mcp/registry/MCPServerInspector.ts index 2263c10422..50da9cdc25 100644 --- a/packages/api/src/mcp/registry/MCPServerInspector.ts +++ b/packages/api/src/mcp/registry/MCPServerInspector.ts @@ -18,6 +18,7 @@ export class MCPServerInspector { private readonly serverName: string, private readonly config: t.ParsedServerConfig, private connection: MCPConnection | undefined, + private readonly useSSRFProtection: boolean = false, ) {} /** @@ -42,8 +43,9 @@ export class MCPServerInspector { throw new MCPDomainNotAllowedError(domain ?? 'unknown'); } + const useSSRFProtection = !Array.isArray(allowedDomains) || allowedDomains.length === 0; const start = Date.now(); - const inspector = new MCPServerInspector(serverName, rawConfig, connection); + const inspector = new MCPServerInspector(serverName, rawConfig, connection, useSSRFProtection); await inspector.inspectServer(); inspector.config.initDuration = Date.now() - start; return inspector.config; @@ -59,6 +61,7 @@ export class MCPServerInspector { this.connection = await MCPConnectionFactory.create({ serverName: this.serverName, serverConfig: this.config, + useSSRFProtection: this.useSSRFProtection, }); } diff --git a/packages/api/src/mcp/registry/MCPServersRegistry.ts b/packages/api/src/mcp/registry/MCPServersRegistry.ts index 801b3957a0..0264a8ed7a 100644 --- a/packages/api/src/mcp/registry/MCPServersRegistry.ts +++ b/packages/api/src/mcp/registry/MCPServersRegistry.ts @@ -77,6 +77,15 @@ export class MCPServersRegistry { return MCPServersRegistry.instance; } + public getAllowedDomains(): string[] | null | undefined { + return this.allowedDomains; + } + + /** Returns true when no explicit allowedDomains allowlist is configured, enabling SSRF TOCTOU protection */ + public shouldEnableSSRFProtection(): boolean { + return !Array.isArray(this.allowedDomains) || this.allowedDomains.length === 0; + } + public async getServerConfig( serverName: string, userId?: string, diff --git a/packages/api/src/mcp/registry/__tests__/MCPServerInspector.test.ts b/packages/api/src/mcp/registry/__tests__/MCPServerInspector.test.ts index 72bf57857e..42dc4d2005 100644 --- a/packages/api/src/mcp/registry/__tests__/MCPServerInspector.test.ts +++ b/packages/api/src/mcp/registry/__tests__/MCPServerInspector.test.ts @@ -276,6 +276,7 @@ describe('MCPServerInspector', () => { expect(MCPConnectionFactory.create).toHaveBeenCalledWith({ serverName: 'test_server', serverConfig: expect.objectContaining({ type: 'stdio', command: 'node' }), + useSSRFProtection: true, }); // Verify temporary connection was disconnected diff --git a/packages/api/src/mcp/types/index.ts b/packages/api/src/mcp/types/index.ts index 46447c6687..270131036b 100644 --- a/packages/api/src/mcp/types/index.ts +++ b/packages/api/src/mcp/types/index.ts @@ -166,6 +166,7 @@ export type AddServerResult = { export interface BasicConnectionOptions { serverName: string; serverConfig: MCPOptions; + useSSRFProtection?: boolean; } export interface OAuthConnectionOptions { diff --git a/packages/api/src/mcp/zod.ts b/packages/api/src/mcp/zod.ts index a218392755..53cb6e71a8 100644 --- a/packages/api/src/mcp/zod.ts +++ b/packages/api/src/mcp/zod.ts @@ -203,9 +203,9 @@ export function resolveJsonSchemaRefs>( const result: Record = {}; for (const [key, value] of Object.entries(schema)) { - // Skip $defs/definitions at root level to avoid infinite recursion - if ((key === '$defs' || key === 'definitions') && !visited.size) { - result[key] = value; + // Skip $defs/definitions — they are only used for resolving $ref and + // should not appear in the resolved output (e.g. Google/Gemini API rejects them). + if (key === '$defs' || key === 'definitions') { continue; } @@ -248,6 +248,80 @@ export function resolveJsonSchemaRefs>( return result as T; } +/** + * Recursively normalizes a JSON schema for LLM API compatibility. + * + * Transformations applied: + * - Converts `const` values to `enum` arrays (Gemini/Vertex AI rejects `const`) + * - Strips vendor extension fields (`x-*` prefixed keys, e.g. `x-google-enum-descriptions`) + * - Strips leftover `$defs`/`definitions` blocks that may survive ref resolution + * + * @param schema - The JSON schema to normalize + * @returns The normalized schema + */ +export function normalizeJsonSchema>(schema: T): T { + if (!schema || typeof schema !== 'object') { + return schema; + } + + if (Array.isArray(schema)) { + return schema.map((item) => + item && typeof item === 'object' ? normalizeJsonSchema(item) : item, + ) as unknown as T; + } + + const result: Record = {}; + + for (const [key, value] of Object.entries(schema)) { + // Strip vendor extension fields (e.g. x-google-enum-descriptions) — + // these are valid in JSON Schema but rejected by Google/Gemini API. + if (key.startsWith('x-')) { + continue; + } + + // Strip leftover $defs/definitions (should already be resolved by resolveJsonSchemaRefs, + // but strip as a safety net for schemas that bypass ref resolution). + if (key === '$defs' || key === 'definitions') { + continue; + } + + if (key === 'const' && !('enum' in schema)) { + result['enum'] = [value]; + continue; + } + + if (key === 'const' && 'enum' in schema) { + // Skip `const` when `enum` already exists + continue; + } + + if (key === 'properties' && value && typeof value === 'object' && !Array.isArray(value)) { + const newProps: Record = {}; + for (const [propKey, propValue] of Object.entries(value as Record)) { + newProps[propKey] = + propValue && typeof propValue === 'object' + ? normalizeJsonSchema(propValue as Record) + : propValue; + } + result[key] = newProps; + } else if ( + (key === 'items' || key === 'additionalProperties') && + value && + typeof value === 'object' + ) { + result[key] = normalizeJsonSchema(value as Record); + } else if ((key === 'oneOf' || key === 'anyOf' || key === 'allOf') && Array.isArray(value)) { + result[key] = value.map((item) => + item && typeof item === 'object' ? normalizeJsonSchema(item) : item, + ); + } else { + result[key] = value; + } + } + + return result as T; +} + /** * Converts a JSON Schema to a Zod schema. * diff --git a/packages/api/src/middleware/__tests__/concurrency.cache_integration.spec.ts b/packages/api/src/middleware/__tests__/concurrency.cache_integration.spec.ts new file mode 100644 index 0000000000..4c29fdad55 --- /dev/null +++ b/packages/api/src/middleware/__tests__/concurrency.cache_integration.spec.ts @@ -0,0 +1,258 @@ +import type { Redis, Cluster } from 'ioredis'; + +/** + * Integration tests for concurrency middleware atomic Lua scripts. + * + * Tests that the Lua-based check-and-increment / decrement operations + * are truly atomic and eliminate the INCR+check+DECR race window. + * + * Run with: USE_REDIS=true npx jest --config packages/api/jest.config.js concurrency.cache_integration + */ +describe('Concurrency Middleware Integration Tests', () => { + let originalEnv: NodeJS.ProcessEnv; + let ioredisClient: Redis | Cluster | null = null; + let checkAndIncrementPendingRequest: ( + userId: string, + ) => Promise<{ allowed: boolean; pendingRequests: number; limit: number }>; + let decrementPendingRequest: (userId: string) => Promise; + const testPrefix = 'Concurrency-Integration-Test'; + + beforeAll(async () => { + originalEnv = { ...process.env }; + + process.env.USE_REDIS = process.env.USE_REDIS ?? 'true'; + process.env.USE_REDIS_CLUSTER = process.env.USE_REDIS_CLUSTER ?? 'false'; + process.env.REDIS_URI = process.env.REDIS_URI ?? 'redis://127.0.0.1:6379'; + process.env.REDIS_KEY_PREFIX = testPrefix; + process.env.REDIS_PING_INTERVAL = '0'; + process.env.REDIS_RETRY_MAX_ATTEMPTS = '5'; + process.env.LIMIT_CONCURRENT_MESSAGES = 'true'; + process.env.CONCURRENT_MESSAGE_MAX = '2'; + + jest.resetModules(); + + const { ioredisClient: client } = await import('../../cache/redisClients'); + ioredisClient = client; + + if (!ioredisClient) { + console.warn('Redis not available, skipping integration tests'); + return; + } + + // Import concurrency module after Redis client is available + const concurrency = await import('../concurrency'); + checkAndIncrementPendingRequest = concurrency.checkAndIncrementPendingRequest; + decrementPendingRequest = concurrency.decrementPendingRequest; + }); + + afterEach(async () => { + if (!ioredisClient) { + return; + } + + try { + const keys = await ioredisClient.keys(`${testPrefix}*`); + if (keys.length > 0) { + await Promise.all(keys.map((key) => ioredisClient!.del(key))); + } + } catch (error) { + console.warn('Error cleaning up test keys:', error); + } + }); + + afterAll(async () => { + if (ioredisClient) { + try { + await ioredisClient.quit(); + } catch { + try { + ioredisClient.disconnect(); + } catch { + // Ignore + } + } + } + process.env = originalEnv; + }); + + describe('Atomic Check and Increment', () => { + test('should allow requests within the concurrency limit', async () => { + if (!ioredisClient) { + return; + } + + const userId = `user-allow-${Date.now()}`; + + // First request - should be allowed (count = 1, limit = 2) + const result1 = await checkAndIncrementPendingRequest(userId); + expect(result1.allowed).toBe(true); + expect(result1.pendingRequests).toBe(1); + expect(result1.limit).toBe(2); + + // Second request - should be allowed (count = 2, limit = 2) + const result2 = await checkAndIncrementPendingRequest(userId); + expect(result2.allowed).toBe(true); + expect(result2.pendingRequests).toBe(2); + }); + + test('should reject requests over the concurrency limit', async () => { + if (!ioredisClient) { + return; + } + + const userId = `user-reject-${Date.now()}`; + + // Fill up to the limit + await checkAndIncrementPendingRequest(userId); + await checkAndIncrementPendingRequest(userId); + + // Third request - should be rejected (count would be 3, limit = 2) + const result = await checkAndIncrementPendingRequest(userId); + expect(result.allowed).toBe(false); + expect(result.pendingRequests).toBe(3); // Reports the count that was over-limit + }); + + test('should not leave stale counter after rejection (atomic rollback)', async () => { + if (!ioredisClient) { + return; + } + + const userId = `user-rollback-${Date.now()}`; + + // Fill up to the limit + await checkAndIncrementPendingRequest(userId); + await checkAndIncrementPendingRequest(userId); + + // Attempt over-limit (should be rejected and atomically rolled back) + const rejected = await checkAndIncrementPendingRequest(userId); + expect(rejected.allowed).toBe(false); + + // The key value should still be 2, not 3 — verify the Lua script decremented back + const key = `PENDING_REQ:${userId}`; + const rawValue = await ioredisClient.get(key); + expect(rawValue).toBe('2'); + }); + + test('should handle concurrent requests atomically (no over-admission)', async () => { + if (!ioredisClient) { + return; + } + + const userId = `user-concurrent-${Date.now()}`; + + // Fire 20 concurrent requests for the same user (limit = 2) + const results = await Promise.all( + Array.from({ length: 20 }, () => checkAndIncrementPendingRequest(userId)), + ); + + const allowed = results.filter((r) => r.allowed); + const rejected = results.filter((r) => !r.allowed); + + // Exactly 2 should be allowed (the concurrency limit) + expect(allowed.length).toBe(2); + expect(rejected.length).toBe(18); + + // The key value should be exactly 2 after all atomic operations + const key = `PENDING_REQ:${userId}`; + const rawValue = await ioredisClient.get(key); + expect(rawValue).toBe('2'); + + // Clean up + await decrementPendingRequest(userId); + await decrementPendingRequest(userId); + }); + }); + + describe('Atomic Decrement', () => { + test('should decrement pending requests', async () => { + if (!ioredisClient) { + return; + } + + const userId = `user-decrement-${Date.now()}`; + + await checkAndIncrementPendingRequest(userId); + await checkAndIncrementPendingRequest(userId); + + // Decrement once + await decrementPendingRequest(userId); + + const key = `PENDING_REQ:${userId}`; + const rawValue = await ioredisClient.get(key); + expect(rawValue).toBe('1'); + }); + + test('should clean up key when count reaches zero', async () => { + if (!ioredisClient) { + return; + } + + const userId = `user-cleanup-${Date.now()}`; + + await checkAndIncrementPendingRequest(userId); + await decrementPendingRequest(userId); + + // Key should be deleted (not left as "0") + const key = `PENDING_REQ:${userId}`; + const exists = await ioredisClient.exists(key); + expect(exists).toBe(0); + }); + + test('should clean up key on double-decrement (negative protection)', async () => { + if (!ioredisClient) { + return; + } + + const userId = `user-double-decr-${Date.now()}`; + + await checkAndIncrementPendingRequest(userId); + await decrementPendingRequest(userId); + await decrementPendingRequest(userId); // Double-decrement + + // Key should be deleted, not negative + const key = `PENDING_REQ:${userId}`; + const exists = await ioredisClient.exists(key); + expect(exists).toBe(0); + }); + + test('should allow new requests after decrement frees a slot', async () => { + if (!ioredisClient) { + return; + } + + const userId = `user-free-slot-${Date.now()}`; + + // Fill to limit + await checkAndIncrementPendingRequest(userId); + await checkAndIncrementPendingRequest(userId); + + // Verify at limit + const atLimit = await checkAndIncrementPendingRequest(userId); + expect(atLimit.allowed).toBe(false); + + // Free a slot + await decrementPendingRequest(userId); + + // Should now be allowed again + const allowed = await checkAndIncrementPendingRequest(userId); + expect(allowed.allowed).toBe(true); + expect(allowed.pendingRequests).toBe(2); + }); + }); + + describe('TTL Behavior', () => { + test('should set TTL on the concurrency key', async () => { + if (!ioredisClient) { + return; + } + + const userId = `user-ttl-${Date.now()}`; + await checkAndIncrementPendingRequest(userId); + + const key = `PENDING_REQ:${userId}`; + const ttl = await ioredisClient.ttl(key); + expect(ttl).toBeGreaterThan(0); + expect(ttl).toBeLessThanOrEqual(60); + }); + }); +}); diff --git a/packages/api/src/middleware/concurrency.ts b/packages/api/src/middleware/concurrency.ts index 92ac8b7d46..22302e79d0 100644 --- a/packages/api/src/middleware/concurrency.ts +++ b/packages/api/src/middleware/concurrency.ts @@ -9,6 +9,40 @@ const LIMIT_CONCURRENT_MESSAGES = process.env.LIMIT_CONCURRENT_MESSAGES; const CONCURRENT_MESSAGE_MAX = math(process.env.CONCURRENT_MESSAGE_MAX, 2); const CONCURRENT_VIOLATION_SCORE = math(process.env.CONCURRENT_VIOLATION_SCORE, 1); +/** + * Lua script for atomic check-and-increment. + * Increments the key, sets TTL, and if over limit decrements back. + * Returns positive count if allowed, negative count if rejected. + * Single round-trip, fully atomic — eliminates the INCR/check/DECR race window. + */ +const CHECK_AND_INCREMENT_SCRIPT = ` +local key = KEYS[1] +local limit = tonumber(ARGV[1]) +local ttl = tonumber(ARGV[2]) +local current = redis.call('INCR', key) +redis.call('EXPIRE', key, ttl) +if current > limit then + redis.call('DECR', key) + return -current +end +return current +`; + +/** + * Lua script for atomic decrement-and-cleanup. + * Decrements the key and deletes it if the count reaches zero or below. + * Eliminates the DECR-then-DEL race window. + */ +const DECREMENT_SCRIPT = ` +local key = KEYS[1] +local current = redis.call('DECR', key) +if current <= 0 then + redis.call('DEL', key) + return 0 +end +return current +`; + /** Lazily initialized cache for pending requests (used only for in-memory fallback) */ let pendingReqCache: ReturnType | null = null; @@ -80,36 +114,28 @@ export async function checkAndIncrementPendingRequest( return { allowed: true, pendingRequests: 0, limit }; } - // Use atomic Redis INCR when available to prevent race conditions + // Use atomic Lua script when Redis is available to prevent race conditions. + // A single EVAL round-trip atomically increments, checks, and decrements if over-limit. if (USE_REDIS && ioredisClient) { const key = buildKey(userId); try { - // Pipeline ensures INCR and EXPIRE execute atomically in one round-trip - // This prevents edge cases where crash between operations leaves key without TTL - const pipeline = ioredisClient.pipeline(); - pipeline.incr(key); - pipeline.expire(key, 60); - const results = await pipeline.exec(); + const result = (await ioredisClient.eval( + CHECK_AND_INCREMENT_SCRIPT, + 1, + key, + limit, + 60, + )) as number; - if (!results || results[0][0]) { - throw new Error('Pipeline execution failed'); + if (result < 0) { + // Negative return means over-limit (absolute value is the count before decrement) + const count = -result; + logger.debug(`[concurrency] User ${userId} exceeded concurrent limit: ${count}/${limit}`); + return { allowed: false, pendingRequests: count, limit }; } - const newCount = results[0][1] as number; - - if (newCount > limit) { - // Over limit - decrement back and reject - await ioredisClient.decr(key); - logger.debug( - `[concurrency] User ${userId} exceeded concurrent limit: ${newCount}/${limit}`, - ); - return { allowed: false, pendingRequests: newCount, limit }; - } - - logger.debug( - `[concurrency] User ${userId} incremented pending requests: ${newCount}/${limit}`, - ); - return { allowed: true, pendingRequests: newCount, limit }; + logger.debug(`[concurrency] User ${userId} incremented pending requests: ${result}/${limit}`); + return { allowed: true, pendingRequests: result, limit }; } catch (error) { logger.error('[concurrency] Redis atomic increment failed:', error); // On Redis error, allow the request to proceed (fail-open) @@ -164,18 +190,12 @@ export async function decrementPendingRequest(userId: string): Promise { return; } - // Use atomic Redis DECR when available + // Use atomic Lua script to decrement and clean up zero/negative keys in one round-trip if (USE_REDIS && ioredisClient) { const key = buildKey(userId); try { - const newCount = await ioredisClient.decr(key); - if (newCount < 0) { - // Counter went negative - reset to 0 and delete - await ioredisClient.del(key); - logger.debug(`[concurrency] User ${userId} pending requests cleared (was negative)`); - } else if (newCount === 0) { - // Clean up zero-value keys - await ioredisClient.del(key); + const newCount = (await ioredisClient.eval(DECREMENT_SCRIPT, 1, key)) as number; + if (newCount === 0) { logger.debug(`[concurrency] User ${userId} pending requests cleared`); } else { logger.debug(`[concurrency] User ${userId} decremented pending requests: ${newCount}`); diff --git a/packages/api/src/oauth/csrf.spec.ts b/packages/api/src/oauth/csrf.spec.ts new file mode 100644 index 0000000000..b56f1fd38f --- /dev/null +++ b/packages/api/src/oauth/csrf.spec.ts @@ -0,0 +1,99 @@ +import { shouldUseSecureCookie } from './csrf'; + +describe('shouldUseSecureCookie', () => { + const originalEnv = process.env; + + beforeEach(() => { + process.env = { ...originalEnv }; + }); + + afterAll(() => { + process.env = originalEnv; + }); + + it('should return true in production with a non-localhost domain', () => { + process.env.NODE_ENV = 'production'; + process.env.DOMAIN_SERVER = 'https://myapp.example.com'; + expect(shouldUseSecureCookie()).toBe(true); + }); + + it('should return false in development regardless of domain', () => { + process.env.NODE_ENV = 'development'; + process.env.DOMAIN_SERVER = 'https://myapp.example.com'; + expect(shouldUseSecureCookie()).toBe(false); + }); + + it('should return false when NODE_ENV is not set', () => { + delete process.env.NODE_ENV; + process.env.DOMAIN_SERVER = 'https://myapp.example.com'; + expect(shouldUseSecureCookie()).toBe(false); + }); + + describe('localhost detection in production', () => { + beforeEach(() => { + process.env.NODE_ENV = 'production'; + }); + + it('should return false for http://localhost:3080', () => { + process.env.DOMAIN_SERVER = 'http://localhost:3080'; + expect(shouldUseSecureCookie()).toBe(false); + }); + + it('should return false for https://localhost:3080', () => { + process.env.DOMAIN_SERVER = 'https://localhost:3080'; + expect(shouldUseSecureCookie()).toBe(false); + }); + + it('should return false for http://localhost (no port)', () => { + process.env.DOMAIN_SERVER = 'http://localhost'; + expect(shouldUseSecureCookie()).toBe(false); + }); + + it('should return false for http://127.0.0.1:3080', () => { + process.env.DOMAIN_SERVER = 'http://127.0.0.1:3080'; + expect(shouldUseSecureCookie()).toBe(false); + }); + + it('should return true for http://[::1]:3080 (IPv6 loopback — not detected due to URL bracket parsing)', () => { + // Known limitation: new URL('http://[::1]:3080').hostname returns '[::1]' (with brackets) + // but the check compares against '::1' (without brackets). IPv6 localhost is rare in practice. + process.env.DOMAIN_SERVER = 'http://[::1]:3080'; + expect(shouldUseSecureCookie()).toBe(true); + }); + + it('should return false for subdomain of localhost', () => { + process.env.DOMAIN_SERVER = 'http://app.localhost:3080'; + expect(shouldUseSecureCookie()).toBe(false); + }); + + it('should return true for a domain containing "localhost" as a substring but not as hostname', () => { + process.env.DOMAIN_SERVER = 'https://notlocalhost.example.com'; + expect(shouldUseSecureCookie()).toBe(true); + }); + + it('should return true for a regular production domain', () => { + process.env.DOMAIN_SERVER = 'https://chat.example.com'; + expect(shouldUseSecureCookie()).toBe(true); + }); + + it('should return true when DOMAIN_SERVER is empty (conservative default)', () => { + process.env.DOMAIN_SERVER = ''; + expect(shouldUseSecureCookie()).toBe(true); + }); + + it('should return true when DOMAIN_SERVER is not set (conservative default)', () => { + delete process.env.DOMAIN_SERVER; + expect(shouldUseSecureCookie()).toBe(true); + }); + + it('should handle DOMAIN_SERVER without protocol prefix', () => { + process.env.DOMAIN_SERVER = 'localhost:3080'; + expect(shouldUseSecureCookie()).toBe(false); + }); + + it('should handle case-insensitive hostnames', () => { + process.env.DOMAIN_SERVER = 'http://LOCALHOST:3080'; + expect(shouldUseSecureCookie()).toBe(false); + }); + }); +}); diff --git a/packages/api/src/oauth/csrf.ts b/packages/api/src/oauth/csrf.ts new file mode 100644 index 0000000000..6ed63968d1 --- /dev/null +++ b/packages/api/src/oauth/csrf.ts @@ -0,0 +1,119 @@ +import crypto from 'crypto'; +import type { Request, Response, NextFunction } from 'express'; + +export const OAUTH_CSRF_COOKIE = 'oauth_csrf'; +export const OAUTH_CSRF_MAX_AGE = 10 * 60 * 1000; + +export const OAUTH_SESSION_COOKIE = 'oauth_session'; +export const OAUTH_SESSION_MAX_AGE = 24 * 60 * 60 * 1000; +export const OAUTH_SESSION_COOKIE_PATH = '/api'; + +/** + * Determines if secure cookies should be used. + * Returns `true` in production unless the server is running on localhost (HTTP). + * This allows cookies to work on `http://localhost` during local development + * even when `NODE_ENV=production` (common in Docker Compose setups). + */ +export function shouldUseSecureCookie(): boolean { + const isProduction = process.env.NODE_ENV === 'production'; + const domainServer = process.env.DOMAIN_SERVER || ''; + + let hostname = ''; + if (domainServer) { + try { + const normalized = /^https?:\/\//i.test(domainServer) + ? domainServer + : `http://${domainServer}`; + const url = new URL(normalized); + hostname = (url.hostname || '').toLowerCase(); + } catch { + hostname = domainServer.toLowerCase(); + } + } + + const isLocalhost = + hostname === 'localhost' || + hostname === '127.0.0.1' || + hostname === '::1' || + hostname.endsWith('.localhost'); + + return isProduction && !isLocalhost; +} + +/** Generates an HMAC-based token for OAuth CSRF protection */ +export function generateOAuthCsrfToken(flowId: string, secret?: string): string { + const key = secret || process.env.JWT_SECRET; + if (!key) { + throw new Error('JWT_SECRET is required for OAuth CSRF token generation'); + } + return crypto.createHmac('sha256', key).update(flowId).digest('hex').slice(0, 32); +} + +/** Sets a SameSite=Lax CSRF cookie bound to a specific OAuth flow */ +export function setOAuthCsrfCookie(res: Response, flowId: string, cookiePath: string): void { + res.cookie(OAUTH_CSRF_COOKIE, generateOAuthCsrfToken(flowId), { + httpOnly: true, + secure: shouldUseSecureCookie(), + sameSite: 'lax', + maxAge: OAUTH_CSRF_MAX_AGE, + path: cookiePath, + }); +} + +/** + * Validates the per-flow CSRF cookie against the expected HMAC. + * Uses timing-safe comparison and always clears the cookie to prevent replay. + */ +export function validateOAuthCsrf( + req: Request, + res: Response, + flowId: string, + cookiePath: string, +): boolean { + const cookie = (req.cookies as Record | undefined)?.[OAUTH_CSRF_COOKIE]; + res.clearCookie(OAUTH_CSRF_COOKIE, { path: cookiePath }); + if (!cookie) { + return false; + } + const expected = generateOAuthCsrfToken(flowId); + if (cookie.length !== expected.length) { + return false; + } + return crypto.timingSafeEqual(Buffer.from(cookie), Buffer.from(expected)); +} + +/** + * Express middleware that sets the OAuth session cookie after JWT authentication. + * Chain after requireJwtAuth on routes that precede an OAuth redirect (e.g., reinitialize, bind). + */ +export function setOAuthSession(req: Request, res: Response, next: NextFunction): void { + const user = (req as Request & { user?: { id?: string } }).user; + if (user?.id && !(req.cookies as Record | undefined)?.[OAUTH_SESSION_COOKIE]) { + setOAuthSessionCookie(res, user.id); + } + next(); +} + +/** Sets a SameSite=Lax session cookie that binds the browser to the authenticated userId */ +export function setOAuthSessionCookie(res: Response, userId: string): void { + res.cookie(OAUTH_SESSION_COOKIE, generateOAuthCsrfToken(userId), { + httpOnly: true, + secure: shouldUseSecureCookie(), + sameSite: 'lax', + maxAge: OAUTH_SESSION_MAX_AGE, + path: OAUTH_SESSION_COOKIE_PATH, + }); +} + +/** Validates the session cookie against the expected userId using timing-safe comparison */ +export function validateOAuthSession(req: Request, userId: string): boolean { + const cookie = (req.cookies as Record | undefined)?.[OAUTH_SESSION_COOKIE]; + if (!cookie) { + return false; + } + const expected = generateOAuthCsrfToken(userId); + if (cookie.length !== expected.length) { + return false; + } + return crypto.timingSafeEqual(Buffer.from(cookie), Buffer.from(expected)); +} diff --git a/packages/api/src/oauth/index.ts b/packages/api/src/oauth/index.ts index e56053c166..01be92b6e3 100644 --- a/packages/api/src/oauth/index.ts +++ b/packages/api/src/oauth/index.ts @@ -1 +1,2 @@ +export * from './csrf'; export * from './tokens'; diff --git a/packages/api/src/stream/GenerationJobManager.ts b/packages/api/src/stream/GenerationJobManager.ts index fefb0dd207..815133d616 100644 --- a/packages/api/src/stream/GenerationJobManager.ts +++ b/packages/api/src/stream/GenerationJobManager.ts @@ -745,7 +745,6 @@ class GenerationJobManagerClass { const subscription = this.eventTransport.subscribe(streamId, { onChunk: (event) => { const e = event as t.ServerSentEvent; - // Filter out internal events if (!(e as Record)._internal) { onChunk(e); } @@ -754,14 +753,15 @@ class GenerationJobManagerClass { onError, }); - // Check if this is the first subscriber + if (subscription.ready) { + await subscription.ready; + } + const isFirst = this.eventTransport.isFirstSubscriber(streamId); - // First subscriber: replay buffered events and mark as connected if (!runtime.hasSubscriber) { runtime.hasSubscriber = true; - // Replay any events that were emitted before subscriber connected if (runtime.earlyEventBuffer.length > 0) { logger.debug( `[GenerationJobManager] Replaying ${runtime.earlyEventBuffer.length} buffered events for ${streamId}`, @@ -771,6 +771,8 @@ class GenerationJobManagerClass { } runtime.earlyEventBuffer = []; } + + this.eventTransport.syncReorderBuffer?.(streamId); } if (isFirst) { @@ -823,12 +825,13 @@ class GenerationJobManagerClass { } } - // Buffer early events if no subscriber yet (replay when first subscriber connects) if (!runtime.hasSubscriber) { runtime.earlyEventBuffer.push(event); + if (!this._isRedis) { + return; + } } - // Await the transport emit - critical for Redis mode to maintain event order await this.eventTransport.emitChunk(streamId, event); } diff --git a/packages/api/src/stream/__tests__/GenerationJobManager.stream_integration.spec.ts b/packages/api/src/stream/__tests__/GenerationJobManager.stream_integration.spec.ts index 8723f3f000..59fe32e4e5 100644 --- a/packages/api/src/stream/__tests__/GenerationJobManager.stream_integration.spec.ts +++ b/packages/api/src/stream/__tests__/GenerationJobManager.stream_integration.spec.ts @@ -1,4 +1,17 @@ import type { Redis, Cluster } from 'ioredis'; +import type { ServerSentEvent } from '~/types/events'; +import { InMemoryEventTransport } from '~/stream/implementations/InMemoryEventTransport'; +import { RedisEventTransport } from '~/stream/implementations/RedisEventTransport'; +import { InMemoryJobStore } from '~/stream/implementations/InMemoryJobStore'; +import { GenerationJobManagerClass } from '~/stream/GenerationJobManager'; +import { RedisJobStore } from '~/stream/implementations/RedisJobStore'; +import { createStreamServices } from '~/stream/createStreamServices'; +import { GenerationJobManager } from '~/stream/GenerationJobManager'; +import { + ioredisClient as staticRedisClient, + keyvRedisClient as staticKeyvClient, + keyvRedisClientReady, +} from '~/cache/redisClients'; /** * Integration tests for GenerationJobManager. @@ -11,20 +24,23 @@ import type { Redis, Cluster } from 'ioredis'; describe('GenerationJobManager Integration Tests', () => { let originalEnv: NodeJS.ProcessEnv; let ioredisClient: Redis | Cluster | null = null; + let dynamicKeyvClient: unknown = null; + let dynamicKeyvReady: Promise | null = null; const testPrefix = 'JobManager-Integration-Test'; beforeAll(async () => { originalEnv = { ...process.env }; - // Set up test environment process.env.USE_REDIS = process.env.USE_REDIS ?? 'true'; process.env.REDIS_URI = process.env.REDIS_URI ?? 'redis://127.0.0.1:6379'; process.env.REDIS_KEY_PREFIX = testPrefix; jest.resetModules(); - const { ioredisClient: client } = await import('../../cache/redisClients'); - ioredisClient = client; + const redisModule = await import('~/cache/redisClients'); + ioredisClient = redisModule.ioredisClient; + dynamicKeyvClient = redisModule.keyvRedisClient; + dynamicKeyvReady = redisModule.keyvRedisClientReady; }); afterEach(async () => { @@ -45,28 +61,29 @@ describe('GenerationJobManager Integration Tests', () => { }); afterAll(async () => { - if (ioredisClient) { - try { - // Use quit() to gracefully close - waits for pending commands - await ioredisClient.quit(); - } catch { - // Fall back to disconnect if quit fails - try { - ioredisClient.disconnect(); - } catch { - // Ignore - } + for (const ready of [keyvRedisClientReady, dynamicKeyvReady]) { + if (ready) { + await ready.catch(() => {}); } } + + const clients = [ioredisClient, staticRedisClient, staticKeyvClient, dynamicKeyvClient]; + for (const client of clients) { + if (!client) { + continue; + } + try { + await (client as { disconnect: () => void | Promise }).disconnect(); + } catch { + /* ignore */ + } + } + process.env = originalEnv; }); describe('In-Memory Mode', () => { test('should create and manage jobs', async () => { - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { InMemoryJobStore } = await import('../implementations/InMemoryJobStore'); - const { InMemoryEventTransport } = await import('../implementations/InMemoryEventTransport'); - // Configure with in-memory // cleanupOnComplete: false so we can verify completed status GenerationJobManager.configure({ @@ -76,7 +93,7 @@ describe('GenerationJobManager Integration Tests', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `inmem-job-${Date.now()}`; const userId = 'test-user-1'; @@ -108,17 +125,13 @@ describe('GenerationJobManager Integration Tests', () => { }); test('should handle event streaming', async () => { - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { InMemoryJobStore } = await import('../implementations/InMemoryJobStore'); - const { InMemoryEventTransport } = await import('../implementations/InMemoryEventTransport'); - GenerationJobManager.configure({ jobStore: new InMemoryJobStore({ ttlAfterComplete: 60000 }), eventTransport: new InMemoryEventTransport(), isRedis: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `inmem-events-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -165,9 +178,6 @@ describe('GenerationJobManager Integration Tests', () => { return; } - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); - // Create Redis services const services = createStreamServices({ useRedis: true, @@ -177,7 +187,7 @@ describe('GenerationJobManager Integration Tests', () => { expect(services.isRedis).toBe(true); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `redis-job-${Date.now()}`; const userId = 'test-user-redis'; @@ -204,16 +214,13 @@ describe('GenerationJobManager Integration Tests', () => { return; } - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); - const services = createStreamServices({ useRedis: true, redisClient: ioredisClient, }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `redis-chunks-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -262,16 +269,13 @@ describe('GenerationJobManager Integration Tests', () => { return; } - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); - const services = createStreamServices({ useRedis: true, redisClient: ioredisClient, }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `redis-abort-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -314,10 +318,7 @@ describe('GenerationJobManager Integration Tests', () => { const runTestWithMode = async (isRedis: boolean) => { jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); - if (isRedis && ioredisClient) { - const { createStreamServices } = await import('../createStreamServices'); GenerationJobManager.configure({ ...createStreamServices({ useRedis: true, @@ -326,10 +327,6 @@ describe('GenerationJobManager Integration Tests', () => { cleanupOnComplete: false, // Keep job for verification }); } else { - const { InMemoryJobStore } = await import('../implementations/InMemoryJobStore'); - const { InMemoryEventTransport } = await import( - '../implementations/InMemoryEventTransport' - ); GenerationJobManager.configure({ jobStore: new InMemoryJobStore({ ttlAfterComplete: 60000 }), eventTransport: new InMemoryEventTransport(), @@ -338,7 +335,7 @@ describe('GenerationJobManager Integration Tests', () => { }); } - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `consistency-${isRedis ? 'redis' : 'inmem'}-${Date.now()}`; @@ -395,8 +392,6 @@ describe('GenerationJobManager Integration Tests', () => { return; } - const { RedisJobStore } = await import('../implementations/RedisJobStore'); - // === REPLICA A: Creates the job === // Simulate Replica A creating the job directly in Redis // (In real scenario, this happens via GenerationJobManager.createJob on Replica A) @@ -412,8 +407,6 @@ describe('GenerationJobManager Integration Tests', () => { // === REPLICA B: Receives the stream request === // Fresh GenerationJobManager that does NOT have this job in its local runtimeState jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); const services = createStreamServices({ useRedis: true, @@ -421,7 +414,7 @@ describe('GenerationJobManager Integration Tests', () => { }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); // This is what the stream endpoint does: // const job = await GenerationJobManager.getJob(streamId); @@ -464,10 +457,6 @@ describe('GenerationJobManager Integration Tests', () => { return; } - // Simulate two instances - one creates job, other tries to get it - const { createStreamServices } = await import('../createStreamServices'); - const { RedisJobStore } = await import('../implementations/RedisJobStore'); - // Instance 1: Create the job directly in Redis (simulating another replica) const jobStore = new RedisJobStore(ioredisClient); await jobStore.initialize(); @@ -480,7 +469,6 @@ describe('GenerationJobManager Integration Tests', () => { // Instance 2: Fresh GenerationJobManager that doesn't have this job in memory jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); const services = createStreamServices({ useRedis: true, @@ -488,7 +476,7 @@ describe('GenerationJobManager Integration Tests', () => { }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); // This should work even though the job was created by "another instance" // The manager should lazily create runtime state from Redis data @@ -517,16 +505,13 @@ describe('GenerationJobManager Integration Tests', () => { return; } - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); - const services = createStreamServices({ useRedis: true, redisClient: ioredisClient, }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `sync-sent-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -559,9 +544,6 @@ describe('GenerationJobManager Integration Tests', () => { return; } - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); - const services = createStreamServices({ useRedis: true, redisClient: ioredisClient, @@ -571,7 +553,7 @@ describe('GenerationJobManager Integration Tests', () => { ...services, cleanupOnComplete: false, // Keep job for verification }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `final-event-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -604,16 +586,13 @@ describe('GenerationJobManager Integration Tests', () => { return; } - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); - const services = createStreamServices({ useRedis: true, redisClient: ioredisClient, }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `abort-signal-${Date.now()}`; const job = await GenerationJobManager.createJob(streamId, 'user-1'); @@ -649,9 +628,6 @@ describe('GenerationJobManager Integration Tests', () => { // This test validates that jobs created on Replica A and lazily-initialized // on Replica B can still receive and handle abort signals. - const { createStreamServices } = await import('../createStreamServices'); - const { RedisJobStore } = await import('../implementations/RedisJobStore'); - // === Replica A: Create job directly in Redis === const replicaAJobStore = new RedisJobStore(ioredisClient); await replicaAJobStore.initialize(); @@ -661,7 +637,6 @@ describe('GenerationJobManager Integration Tests', () => { // === Replica B: Fresh manager that lazily initializes the job === jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); const services = createStreamServices({ useRedis: true, @@ -669,7 +644,7 @@ describe('GenerationJobManager Integration Tests', () => { }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); // Get job triggers lazy initialization of runtime state const job = await GenerationJobManager.getJob(streamId); @@ -710,19 +685,14 @@ describe('GenerationJobManager Integration Tests', () => { // 2. Replica B receives abort request and emits abort signal // 3. Replica A receives signal and aborts its AbortController - const { createStreamServices } = await import('../createStreamServices'); - const { RedisEventTransport } = await import('../implementations/RedisEventTransport'); - // Create the job on "Replica A" - const { GenerationJobManager } = await import('../GenerationJobManager'); - const services = createStreamServices({ useRedis: true, redisClient: ioredisClient, }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `cross-abort-${Date.now()}`; const job = await GenerationJobManager.createJob(streamId, 'user-1'); @@ -764,9 +734,6 @@ describe('GenerationJobManager Integration Tests', () => { return; } - const { createStreamServices } = await import('../createStreamServices'); - const { RedisJobStore } = await import('../implementations/RedisJobStore'); - // Create job directly in Redis with syncSent: true const jobStore = new RedisJobStore(ioredisClient); await jobStore.initialize(); @@ -777,7 +744,6 @@ describe('GenerationJobManager Integration Tests', () => { // Fresh manager that doesn't have this job locally jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); const services = createStreamServices({ useRedis: true, @@ -785,7 +751,7 @@ describe('GenerationJobManager Integration Tests', () => { }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); // wasSyncSent should check Redis even without local runtime const wasSent = await GenerationJobManager.wasSyncSent(streamId); @@ -813,8 +779,6 @@ describe('GenerationJobManager Integration Tests', () => { } jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); const services = createStreamServices({ useRedis: true, @@ -822,7 +786,7 @@ describe('GenerationJobManager Integration Tests', () => { }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `order-rapid-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -865,8 +829,6 @@ describe('GenerationJobManager Integration Tests', () => { } jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); const services = createStreamServices({ useRedis: true, @@ -874,7 +836,7 @@ describe('GenerationJobManager Integration Tests', () => { }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `tool-args-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -926,8 +888,6 @@ describe('GenerationJobManager Integration Tests', () => { } jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); const services = createStreamServices({ useRedis: true, @@ -935,7 +895,7 @@ describe('GenerationJobManager Integration Tests', () => { }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `step-order-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -991,8 +951,6 @@ describe('GenerationJobManager Integration Tests', () => { } jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { createStreamServices } = await import('../createStreamServices'); const services = createStreamServices({ useRedis: true, @@ -1000,7 +958,7 @@ describe('GenerationJobManager Integration Tests', () => { }); GenerationJobManager.configure(services); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId1 = `concurrent-1-${Date.now()}`; const streamId2 = `concurrent-2-${Date.now()}`; @@ -1057,6 +1015,202 @@ describe('GenerationJobManager Integration Tests', () => { }); }); + describe('Race Condition: Events Before Subscriber Ready', () => { + /** + * These tests verify the fix for the race condition where early events + * (like the 'created' event at seq 0) are lost because the Redis SUBSCRIBE + * command hasn't completed when events are published. + * + * Symptom: "[RedisEventTransport] Stream : timeout waiting for seq 0" + * followed by truncated responses in the UI. + * + * Root cause: RedisEventTransport.subscribe() fired Redis SUBSCRIBE as + * fire-and-forget. GenerationJobManager set hasSubscriber=true immediately, + * disabling the earlyEventBuffer before Redis was actually listening. + * + * Fix: subscribe() now returns a `ready` promise that resolves when the + * Redis subscription is confirmed. earlyEventBuffer stays active until then. + */ + + test('should buffer and replay events emitted before subscribe (in-memory)', async () => { + const manager = new GenerationJobManagerClass(); + manager.configure({ + jobStore: new InMemoryJobStore({ ttlAfterComplete: 60000 }), + eventTransport: new InMemoryEventTransport(), + isRedis: false, + }); + + manager.initialize(); + + const streamId = `early-buf-inmem-${Date.now()}`; + await manager.createJob(streamId, 'user-1'); + + await manager.emitChunk(streamId, { + created: true, + message: { text: 'hello' }, + streamId, + } as unknown as ServerSentEvent); + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: 'First chunk' } } }, + }); + + const receivedEvents: unknown[] = []; + const subscription = await manager.subscribe(streamId, (event: unknown) => + receivedEvents.push(event), + ); + + await new Promise((resolve) => setTimeout(resolve, 50)); + + expect(receivedEvents.length).toBe(2); + expect((receivedEvents[0] as Record).created).toBe(true); + + subscription?.unsubscribe(); + await manager.destroy(); + }); + + test('should buffer and replay events emitted before subscribe (Redis)', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const manager = new GenerationJobManagerClass(); + const services = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + + manager.configure(services); + manager.initialize(); + + const streamId = `early-buf-redis-${Date.now()}`; + await manager.createJob(streamId, 'user-1'); + + await manager.emitChunk(streamId, { + created: true, + message: { text: 'hello' }, + streamId, + } as unknown as ServerSentEvent); + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: 'First' } } }, + }); + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: ' chunk' } } }, + }); + + const receivedEvents: unknown[] = []; + const subscription = await manager.subscribe(streamId, (event: unknown) => + receivedEvents.push(event), + ); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + expect(receivedEvents.length).toBe(3); + expect((receivedEvents[0] as Record).created).toBe(true); + expect( + ((receivedEvents[1] as Record).data as Record).delta, + ).toBeDefined(); + + subscription?.unsubscribe(); + await manager.destroy(); + }); + + test('should not lose events when emitting before and after subscribe (Redis)', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const manager = new GenerationJobManagerClass(); + const services = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + + manager.configure(services); + manager.initialize(); + + const streamId = `no-loss-${Date.now()}`; + await manager.createJob(streamId, 'user-1'); + + await manager.emitChunk(streamId, { + created: true, + message: { text: 'hello' }, + streamId, + } as unknown as ServerSentEvent); + await manager.emitChunk(streamId, { + event: 'on_run_step', + data: { id: 'step-1', type: 'message_creation', index: 0 }, + }); + + const receivedEvents: unknown[] = []; + const subscription = await manager.subscribe(streamId, (event: unknown) => + receivedEvents.push(event), + ); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + for (let i = 0; i < 10; i++) { + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: `word${i} ` } }, index: i }, + }); + } + + await new Promise((resolve) => setTimeout(resolve, 300)); + + expect(receivedEvents.length).toBe(12); + expect((receivedEvents[0] as Record).created).toBe(true); + expect((receivedEvents[1] as Record).event).toBe('on_run_step'); + for (let i = 0; i < 10; i++) { + expect((receivedEvents[i + 2] as Record).event).toBe('on_message_delta'); + } + + subscription?.unsubscribe(); + await manager.destroy(); + }); + + test('RedisEventTransport.subscribe() should return a ready promise', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const subscriber = (ioredisClient as unknown as { duplicate: () => unknown }).duplicate(); + const transport = new RedisEventTransport(ioredisClient as never, subscriber as never); + + const streamId = `ready-promise-${Date.now()}`; + const result = transport.subscribe(streamId, { + onChunk: () => {}, + }); + + expect(result.ready).toBeDefined(); + expect(result.ready).toBeInstanceOf(Promise); + + await result.ready; + + result.unsubscribe(); + transport.destroy(); + (subscriber as { disconnect: () => void }).disconnect(); + }); + + test('InMemoryEventTransport.subscribe() should not have a ready promise', () => { + const transport = new InMemoryEventTransport(); + const streamId = `no-ready-${Date.now()}`; + const result = transport.subscribe(streamId, { + onChunk: () => {}, + }); + + expect(result.ready).toBeUndefined(); + + result.unsubscribe(); + transport.destroy(); + }); + }); + describe('Error Preservation for Late Subscribers', () => { /** * These tests verify the fix for the race condition where errors @@ -1067,10 +1221,6 @@ describe('GenerationJobManager Integration Tests', () => { */ test('should store error in emitError for late-connecting subscribers', async () => { - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { InMemoryJobStore } = await import('../implementations/InMemoryJobStore'); - const { InMemoryEventTransport } = await import('../implementations/InMemoryEventTransport'); - GenerationJobManager.configure({ jobStore: new InMemoryJobStore({ ttlAfterComplete: 60000 }), eventTransport: new InMemoryEventTransport(), @@ -1078,7 +1228,7 @@ describe('GenerationJobManager Integration Tests', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `error-store-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -1099,10 +1249,6 @@ describe('GenerationJobManager Integration Tests', () => { }); test('should NOT delete job immediately when completeJob is called with error', async () => { - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { InMemoryJobStore } = await import('../implementations/InMemoryJobStore'); - const { InMemoryEventTransport } = await import('../implementations/InMemoryEventTransport'); - GenerationJobManager.configure({ jobStore: new InMemoryJobStore({ ttlAfterComplete: 60000 }), eventTransport: new InMemoryEventTransport(), @@ -1110,7 +1256,7 @@ describe('GenerationJobManager Integration Tests', () => { cleanupOnComplete: true, // Default behavior }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `error-no-delete-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -1133,10 +1279,6 @@ describe('GenerationJobManager Integration Tests', () => { }); test('should send stored error to late-connecting subscriber', async () => { - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { InMemoryJobStore } = await import('../implementations/InMemoryJobStore'); - const { InMemoryEventTransport } = await import('../implementations/InMemoryEventTransport'); - GenerationJobManager.configure({ jobStore: new InMemoryJobStore({ ttlAfterComplete: 60000 }), eventTransport: new InMemoryEventTransport(), @@ -1144,7 +1286,7 @@ describe('GenerationJobManager Integration Tests', () => { cleanupOnComplete: true, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `error-late-sub-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -1182,10 +1324,6 @@ describe('GenerationJobManager Integration Tests', () => { }); test('should prioritize error status over finalEvent in subscribe', async () => { - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { InMemoryJobStore } = await import('../implementations/InMemoryJobStore'); - const { InMemoryEventTransport } = await import('../implementations/InMemoryEventTransport'); - GenerationJobManager.configure({ jobStore: new InMemoryJobStore({ ttlAfterComplete: 60000 }), eventTransport: new InMemoryEventTransport(), @@ -1193,7 +1331,7 @@ describe('GenerationJobManager Integration Tests', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `error-priority-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -1237,9 +1375,6 @@ describe('GenerationJobManager Integration Tests', () => { return; } - const { createStreamServices } = await import('../createStreamServices'); - const { RedisJobStore } = await import('../implementations/RedisJobStore'); - // === Replica A: Creates job and emits error === const replicaAJobStore = new RedisJobStore(ioredisClient); await replicaAJobStore.initialize(); @@ -1256,7 +1391,6 @@ describe('GenerationJobManager Integration Tests', () => { // === Replica B: Fresh manager receives client connection === jest.resetModules(); - const { GenerationJobManager } = await import('../GenerationJobManager'); const services = createStreamServices({ useRedis: true, @@ -1267,7 +1401,7 @@ describe('GenerationJobManager Integration Tests', () => { ...services, cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); // Client connects to Replica B (job created on Replica A) let receivedError: string | undefined; @@ -1293,10 +1427,6 @@ describe('GenerationJobManager Integration Tests', () => { }); test('error jobs should be cleaned up by periodic cleanup after TTL', async () => { - const { GenerationJobManager } = await import('../GenerationJobManager'); - const { InMemoryJobStore } = await import('../implementations/InMemoryJobStore'); - const { InMemoryEventTransport } = await import('../implementations/InMemoryEventTransport'); - // Use a very short TTL for testing const jobStore = new InMemoryJobStore({ ttlAfterComplete: 100 }); @@ -1307,7 +1437,7 @@ describe('GenerationJobManager Integration Tests', () => { cleanupOnComplete: true, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `error-cleanup-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -1333,36 +1463,457 @@ describe('GenerationJobManager Integration Tests', () => { }); }); - describe('createStreamServices Auto-Detection', () => { - test('should auto-detect Redis when USE_REDIS is true', async () => { + describe('Cross-Replica Live Streaming (Redis)', () => { + test('should publish events to Redis even when no local subscriber exists', async () => { if (!ioredisClient) { console.warn('Redis not available, skipping test'); return; } - // Force USE_REDIS to true - process.env.USE_REDIS = 'true'; - jest.resetModules(); + const replicaA = new GenerationJobManagerClass(); + const servicesA = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + replicaA.configure(servicesA); + replicaA.initialize(); - const { createStreamServices } = await import('../createStreamServices'); - const services = createStreamServices(); + const replicaB = new GenerationJobManagerClass(); + const servicesB = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + replicaB.configure(servicesB); + replicaB.initialize(); - // Should detect Redis - expect(services.isRedis).toBe(true); + const streamId = `cross-live-${Date.now()}`; + await replicaA.createJob(streamId, 'user-1'); + + const replicaBJobStore = new RedisJobStore(ioredisClient); + await replicaBJobStore.initialize(); + await replicaBJobStore.createJob(streamId, 'user-1'); + + const receivedOnB: unknown[] = []; + const subB = await replicaB.subscribe(streamId, (event: unknown) => receivedOnB.push(event)); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + for (let i = 0; i < 5; i++) { + await replicaA.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: `token${i} ` } }, index: i }, + }); + } + + await new Promise((resolve) => setTimeout(resolve, 300)); + + expect(receivedOnB.length).toBe(5); + for (let i = 0; i < 5; i++) { + expect((receivedOnB[i] as Record).event).toBe('on_message_delta'); + } + + subB?.unsubscribe(); + replicaBJobStore.destroy(); + await replicaA.destroy(); + await replicaB.destroy(); }); - test('should fall back to in-memory when USE_REDIS is false', async () => { - process.env.USE_REDIS = 'false'; - jest.resetModules(); + test('should not cause data loss on cross-replica subscribers when local subscriber joins', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } - const { createStreamServices } = await import('../createStreamServices'); - const services = createStreamServices(); + const replicaA = new GenerationJobManagerClass(); + const servicesA = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + replicaA.configure(servicesA); + replicaA.initialize(); + + const replicaB = new GenerationJobManagerClass(); + const servicesB = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + replicaB.configure(servicesB); + replicaB.initialize(); + + const streamId = `cross-seq-safe-${Date.now()}`; + + await replicaA.createJob(streamId, 'user-1'); + const replicaBJobStore = new RedisJobStore(ioredisClient); + await replicaBJobStore.initialize(); + await replicaBJobStore.createJob(streamId, 'user-1'); + + const receivedOnB: unknown[] = []; + const subB = await replicaB.subscribe(streamId, (event: unknown) => receivedOnB.push(event)); + await new Promise((resolve) => setTimeout(resolve, 100)); + + for (let i = 0; i < 3; i++) { + await replicaA.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: `pre-local-${i}` } }, index: i }, + }); + } + + await new Promise((resolve) => setTimeout(resolve, 300)); + expect(receivedOnB.length).toBe(3); + + const receivedOnA: unknown[] = []; + const subA = await replicaA.subscribe(streamId, (event: unknown) => receivedOnA.push(event)); + await new Promise((resolve) => setTimeout(resolve, 100)); + + expect(receivedOnA.length).toBe(3); + + for (let i = 0; i < 3; i++) { + await replicaA.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: `post-local-${i}` } }, index: i + 3 }, + }); + } + + await new Promise((resolve) => setTimeout(resolve, 300)); + + expect(receivedOnB.length).toBe(6); + expect(receivedOnA.length).toBe(6); + + for (let i = 0; i < 3; i++) { + const data = (receivedOnB[i] as Record).data as Record; + const delta = data.delta as Record; + const content = delta.content as Record; + expect(content.text).toBe(`pre-local-${i}`); + } + for (let i = 0; i < 3; i++) { + const data = (receivedOnB[i + 3] as Record).data as Record< + string, + unknown + >; + const delta = data.delta as Record; + const content = delta.content as Record; + expect(content.text).toBe(`post-local-${i}`); + } + + subA?.unsubscribe(); + subB?.unsubscribe(); + replicaBJobStore.destroy(); + await replicaA.destroy(); + await replicaB.destroy(); + }); + + test('should deliver buffered events locally AND publish live events cross-replica', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const replicaA = new GenerationJobManagerClass(); + const servicesA = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + replicaA.configure(servicesA); + replicaA.initialize(); + + const streamId = `cross-buf-live-${Date.now()}`; + await replicaA.createJob(streamId, 'user-1'); + + await replicaA.emitChunk(streamId, { + created: true, + message: { text: 'hello' }, + streamId, + } as unknown as ServerSentEvent); + + const receivedOnA: unknown[] = []; + const subA = await replicaA.subscribe(streamId, (event: unknown) => receivedOnA.push(event)); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + expect(receivedOnA.length).toBe(1); + expect((receivedOnA[0] as Record).created).toBe(true); + + const replicaB = new GenerationJobManagerClass(); + const servicesB = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + replicaB.configure(servicesB); + replicaB.initialize(); + + const replicaBJobStore = new RedisJobStore(ioredisClient); + await replicaBJobStore.initialize(); + await replicaBJobStore.createJob(streamId, 'user-1'); + + const receivedOnB: unknown[] = []; + const subB = await replicaB.subscribe(streamId, (event: unknown) => receivedOnB.push(event)); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + for (let i = 0; i < 3; i++) { + await replicaA.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: `word${i} ` } }, index: i }, + }); + } + + /** B joined after A published seq 0, so B's reorder buffer force-flushes after REORDER_TIMEOUT_MS (500ms) */ + await new Promise((resolve) => setTimeout(resolve, 700)); + + expect(receivedOnA.length).toBe(4); + expect(receivedOnB.length).toBe(3); + + subA?.unsubscribe(); + subB?.unsubscribe(); + replicaBJobStore.destroy(); + await replicaA.destroy(); + await replicaB.destroy(); + }); + }); + + describe('Concurrent Subscriber Readiness (Redis)', () => { + test('should return ready promise to all concurrent subscribers for same stream', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const subscriber = ( + ioredisClient as unknown as { duplicate: () => typeof ioredisClient } + ).duplicate()!; + const transport = new RedisEventTransport(ioredisClient as never, subscriber as never); + + const streamId = `concurrent-sub-${Date.now()}`; + + const sub1 = transport.subscribe(streamId, { + onChunk: () => {}, + onDone: () => {}, + }); + const sub2 = transport.subscribe(streamId, { + onChunk: () => {}, + onDone: () => {}, + }); + + expect(sub1.ready).toBeDefined(); + expect(sub2.ready).toBeDefined(); + + await Promise.all([sub1.ready, sub2.ready]); + + sub1.unsubscribe(); + sub2.unsubscribe(); + transport.destroy(); + subscriber.disconnect(); + }); + }); + + describe('Sequence Reset Safety (Redis)', () => { + test('should not receive stale pre-subscribe events via Redis after sequence reset', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const manager = new GenerationJobManagerClass(); + const services = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + manager.configure(services); + manager.initialize(); + + const streamId = `seq-stale-${Date.now()}`; + await manager.createJob(streamId, 'user-1'); + + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: 'pre-sub-0' } }, index: 0 }, + }); + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: 'pre-sub-1' } }, index: 1 }, + }); + + const receivedEvents: unknown[] = []; + const sub = await manager.subscribe(streamId, (event: unknown) => receivedEvents.push(event)); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + expect(receivedEvents.length).toBe(2); + expect( + ((receivedEvents[0] as Record).data as Record).delta, + ).toBeDefined(); + + for (let i = 0; i < 5; i++) { + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: `post-sub-${i}` } }, index: i + 2 }, + }); + } + + await new Promise((resolve) => setTimeout(resolve, 300)); + + expect(receivedEvents.length).toBe(7); + + const texts = receivedEvents.map( + (e) => + ( + ((e as Record).data as Record).delta as Record< + string, + unknown + > + ).content as Record, + ); + expect((texts[0] as Record).text).toBe('pre-sub-0'); + expect((texts[1] as Record).text).toBe('pre-sub-1'); + for (let i = 0; i < 5; i++) { + expect((texts[i + 2] as Record).text).toBe(`post-sub-${i}`); + } + + sub?.unsubscribe(); + await manager.destroy(); + }); + + test('should not reset sequence when second subscriber joins mid-stream', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const manager = new GenerationJobManagerClass(); + const services = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + manager.configure({ ...services, cleanupOnComplete: false }); + manager.initialize(); + + const streamId = `seq-2nd-sub-${Date.now()}`; + await manager.createJob(streamId, 'user-1'); + + const eventsA: unknown[] = []; + const subA = await manager.subscribe(streamId, (event: unknown) => eventsA.push(event)); + + await new Promise((resolve) => setTimeout(resolve, 50)); + + for (let i = 0; i < 3; i++) { + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: `chunk-${i}` } }, index: i }, + }); + } + + await new Promise((resolve) => setTimeout(resolve, 100)); + + expect(eventsA.length).toBe(3); + + const eventsB: unknown[] = []; + const subB = await manager.subscribe(streamId, (event: unknown) => eventsB.push(event)); + + for (let i = 3; i < 6; i++) { + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { delta: { content: { type: 'text', text: `chunk-${i}` } }, index: i }, + }); + } + + await new Promise((resolve) => setTimeout(resolve, 300)); + + expect(eventsA.length).toBe(6); + expect(eventsB.length).toBe(3); + + for (let i = 0; i < 6; i++) { + const text = ( + ( + ((eventsA[i] as Record).data as Record) + .delta as Record + ).content as Record + ).text; + expect(text).toBe(`chunk-${i}`); + } + + subA?.unsubscribe(); + subB?.unsubscribe(); + await manager.destroy(); + }); + }); + + describe('Subscribe Error Recovery (Redis)', () => { + test('should allow resubscription after Redis subscribe failure', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const subscriber = ( + ioredisClient as unknown as { duplicate: () => typeof ioredisClient } + ).duplicate()!; + + const realSubscribe = subscriber.subscribe.bind(subscriber); + let callCount = 0; + subscriber.subscribe = ((...args: Parameters) => { + callCount++; + if (callCount === 1) { + return Promise.reject(new Error('Simulated Redis SUBSCRIBE failure')); + } + return realSubscribe(...args); + }) as typeof subscriber.subscribe; + + const transport = new RedisEventTransport(ioredisClient as never, subscriber as never); + + const streamId = `err-retry-${Date.now()}`; + + const sub1 = transport.subscribe(streamId, { + onChunk: () => {}, + onDone: () => {}, + }); + + await sub1.ready; + + const receivedEvents: unknown[] = []; + sub1.unsubscribe(); + + const sub2 = transport.subscribe(streamId, { + onChunk: (event: unknown) => receivedEvents.push(event), + onDone: () => {}, + }); + + expect(sub2.ready).toBeDefined(); + await sub2.ready; + + await transport.emitChunk(streamId, { event: 'test', data: { value: 'hello' } }); + await new Promise((resolve) => setTimeout(resolve, 100)); + + expect(receivedEvents.length).toBe(1); + + sub2.unsubscribe(); + transport.destroy(); + subscriber.disconnect(); + }); + }); + + describe('createStreamServices Auto-Detection', () => { + test('should use Redis when useRedis is true and client is available', () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const services = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + + expect(services.isRedis).toBe(true); + services.eventTransport.destroy(); + }); + + test('should fall back to in-memory when useRedis is false', () => { + const services = createStreamServices({ useRedis: false }); expect(services.isRedis).toBe(false); }); test('should allow forcing in-memory via config override', async () => { - const { createStreamServices } = await import('../createStreamServices'); const services = createStreamServices({ useRedis: false }); expect(services.isRedis).toBe(false); diff --git a/packages/api/src/stream/__tests__/RedisEventTransport.stream_integration.spec.ts b/packages/api/src/stream/__tests__/RedisEventTransport.stream_integration.spec.ts index d1f9467cd0..b5e53dfbff 100644 --- a/packages/api/src/stream/__tests__/RedisEventTransport.stream_integration.spec.ts +++ b/packages/api/src/stream/__tests__/RedisEventTransport.stream_integration.spec.ts @@ -19,8 +19,11 @@ describe('RedisEventTransport Integration Tests', () => { originalEnv = { ...process.env }; process.env.USE_REDIS = process.env.USE_REDIS ?? 'true'; + process.env.USE_REDIS_CLUSTER = process.env.USE_REDIS_CLUSTER ?? 'false'; process.env.REDIS_URI = process.env.REDIS_URI ?? 'redis://127.0.0.1:6379'; process.env.REDIS_KEY_PREFIX = testPrefix; + process.env.REDIS_PING_INTERVAL = '0'; + process.env.REDIS_RETRY_MAX_ATTEMPTS = '5'; jest.resetModules(); @@ -890,4 +893,121 @@ describe('RedisEventTransport Integration Tests', () => { subscriber.disconnect(); }); }); + + describe('Publish Error Propagation', () => { + test('should swallow emitChunk publish errors (callers fire-and-forget)', async () => { + const { RedisEventTransport } = await import('../implementations/RedisEventTransport'); + + const mockPublisher = { + publish: jest.fn().mockRejectedValue(new Error('Redis connection lost')), + }; + const mockSubscriber = { + on: jest.fn(), + subscribe: jest.fn().mockResolvedValue(undefined), + unsubscribe: jest.fn().mockResolvedValue(undefined), + }; + + const transport = new RedisEventTransport( + mockPublisher as unknown as Redis, + mockSubscriber as unknown as Redis, + ); + + const streamId = `error-prop-chunk-${Date.now()}`; + + // emitChunk swallows errors because callers often fire-and-forget (no await). + // Throwing would cause unhandled promise rejections. + await expect(transport.emitChunk(streamId, { data: 'test' })).resolves.toBeUndefined(); + + transport.destroy(); + }); + + test('should throw when emitDone publish fails', async () => { + const { RedisEventTransport } = await import('../implementations/RedisEventTransport'); + + const mockPublisher = { + publish: jest.fn().mockRejectedValue(new Error('Redis connection lost')), + }; + const mockSubscriber = { + on: jest.fn(), + subscribe: jest.fn().mockResolvedValue(undefined), + unsubscribe: jest.fn().mockResolvedValue(undefined), + }; + + const transport = new RedisEventTransport( + mockPublisher as unknown as Redis, + mockSubscriber as unknown as Redis, + ); + + const streamId = `error-prop-done-${Date.now()}`; + + await expect(transport.emitDone(streamId, { finished: true })).rejects.toThrow( + 'Redis connection lost', + ); + + transport.destroy(); + }); + + test('should throw when emitError publish fails', async () => { + const { RedisEventTransport } = await import('../implementations/RedisEventTransport'); + + const mockPublisher = { + publish: jest.fn().mockRejectedValue(new Error('Redis connection lost')), + }; + const mockSubscriber = { + on: jest.fn(), + subscribe: jest.fn().mockResolvedValue(undefined), + unsubscribe: jest.fn().mockResolvedValue(undefined), + }; + + const transport = new RedisEventTransport( + mockPublisher as unknown as Redis, + mockSubscriber as unknown as Redis, + ); + + const streamId = `error-prop-error-${Date.now()}`; + + await expect(transport.emitError(streamId, 'some error')).rejects.toThrow( + 'Redis connection lost', + ); + + transport.destroy(); + }); + + test('should still deliver events successfully when publish succeeds', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const { RedisEventTransport } = await import('../implementations/RedisEventTransport'); + + const subscriber = (ioredisClient as Redis).duplicate(); + const transport = new RedisEventTransport(ioredisClient, subscriber); + + const streamId = `error-prop-success-${Date.now()}`; + const receivedChunks: unknown[] = []; + let doneEvent: unknown = null; + + transport.subscribe(streamId, { + onChunk: (event) => receivedChunks.push(event), + onDone: (event) => { + doneEvent = event; + }, + }); + + await new Promise((resolve) => setTimeout(resolve, 200)); + + // These should NOT throw + await transport.emitChunk(streamId, { text: 'hello' }); + await transport.emitDone(streamId, { finished: true }); + + await new Promise((resolve) => setTimeout(resolve, 200)); + + expect(receivedChunks.length).toBe(1); + expect(doneEvent).toEqual({ finished: true }); + + transport.destroy(); + subscriber.disconnect(); + }); + }); }); diff --git a/packages/api/src/stream/__tests__/RedisJobStore.stream_integration.spec.ts b/packages/api/src/stream/__tests__/RedisJobStore.stream_integration.spec.ts index 89c6f9e92e..a64ba11f26 100644 --- a/packages/api/src/stream/__tests__/RedisJobStore.stream_integration.spec.ts +++ b/packages/api/src/stream/__tests__/RedisJobStore.stream_integration.spec.ts @@ -24,8 +24,11 @@ describe('RedisJobStore Integration Tests', () => { // Set up test environment process.env.USE_REDIS = process.env.USE_REDIS ?? 'true'; + process.env.USE_REDIS_CLUSTER = process.env.USE_REDIS_CLUSTER ?? 'false'; process.env.REDIS_URI = process.env.REDIS_URI ?? 'redis://127.0.0.1:6379'; process.env.REDIS_KEY_PREFIX = testPrefix; + process.env.REDIS_PING_INTERVAL = '0'; + process.env.REDIS_RETRY_MAX_ATTEMPTS = '5'; jest.resetModules(); @@ -880,6 +883,67 @@ describe('RedisJobStore Integration Tests', () => { }); }); + describe('Race Condition: updateJob after deleteJob', () => { + test('should not re-create job hash when updateJob runs after deleteJob', async () => { + if (!ioredisClient) { + return; + } + + const { RedisJobStore } = await import('../implementations/RedisJobStore'); + const store = new RedisJobStore(ioredisClient); + await store.initialize(); + + const streamId = `race-condition-${Date.now()}`; + await store.createJob(streamId, 'user-1', streamId); + + const jobKey = `stream:{${streamId}}:job`; + const ttlBefore = await ioredisClient.ttl(jobKey); + expect(ttlBefore).toBeGreaterThan(0); + + await store.deleteJob(streamId); + + const afterDelete = await ioredisClient.exists(jobKey); + expect(afterDelete).toBe(0); + + await store.updateJob(streamId, { finalEvent: JSON.stringify({ final: true }) }); + + const afterUpdate = await ioredisClient.exists(jobKey); + expect(afterUpdate).toBe(0); + + await store.destroy(); + }); + + test('should not leave orphan keys from concurrent emitDone and deleteJob', async () => { + if (!ioredisClient) { + return; + } + + const { RedisJobStore } = await import('../implementations/RedisJobStore'); + const store = new RedisJobStore(ioredisClient); + await store.initialize(); + + const streamId = `concurrent-race-${Date.now()}`; + await store.createJob(streamId, 'user-1', streamId); + + const jobKey = `stream:{${streamId}}:job`; + + await Promise.all([ + store.updateJob(streamId, { finalEvent: JSON.stringify({ final: true }) }), + store.deleteJob(streamId), + ]); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + const exists = await ioredisClient.exists(jobKey); + const ttl = exists ? await ioredisClient.ttl(jobKey) : -2; + + expect(ttl === -2 || ttl > 0).toBe(true); + expect(ttl).not.toBe(-1); + + await store.destroy(); + }); + }); + describe('Local Graph Cache Optimization', () => { test('should use local cache when available', async () => { if (!ioredisClient) { @@ -972,4 +1036,196 @@ describe('RedisJobStore Integration Tests', () => { await instance2.destroy(); }); }); + + describe('Batched Cleanup', () => { + test('should clean up many stale jobs in parallel batches', async () => { + if (!ioredisClient) { + return; + } + + const { RedisJobStore } = await import('../implementations/RedisJobStore'); + // Very short TTL so jobs are immediately stale + const store = new RedisJobStore(ioredisClient, { runningTtl: 1 }); + await store.initialize(); + + const jobCount = 75; // More than one batch of 50 + const veryOldTimestamp = Date.now() - 10000; // 10 seconds ago + + // Create many stale jobs directly in Redis + for (let i = 0; i < jobCount; i++) { + const streamId = `batch-cleanup-${Date.now()}-${i}`; + const jobKey = `stream:{${streamId}}:job`; + await ioredisClient.hmset(jobKey, { + streamId, + userId: 'batch-user', + status: 'running', + createdAt: veryOldTimestamp.toString(), + syncSent: '0', + }); + await ioredisClient.sadd('stream:running', streamId); + } + + // Verify jobs are in the running set + const runningBefore = await ioredisClient.scard('stream:running'); + expect(runningBefore).toBeGreaterThanOrEqual(jobCount); + + // Run cleanup - should process in batches of 50 + const cleaned = await store.cleanup(); + expect(cleaned).toBeGreaterThanOrEqual(jobCount); + + await store.destroy(); + }); + + test('should not clean up valid running jobs during batch cleanup', async () => { + if (!ioredisClient) { + return; + } + + const { RedisJobStore } = await import('../implementations/RedisJobStore'); + const store = new RedisJobStore(ioredisClient, { runningTtl: 1200 }); + await store.initialize(); + + // Create a mix of valid and stale jobs + const validStreamId = `valid-job-${Date.now()}`; + await store.createJob(validStreamId, 'user-1', validStreamId); + + const staleStreamId = `stale-job-${Date.now()}`; + const jobKey = `stream:{${staleStreamId}}:job`; + await ioredisClient.hmset(jobKey, { + streamId: staleStreamId, + userId: 'user-1', + status: 'running', + createdAt: (Date.now() - 2000000).toString(), // Very old + syncSent: '0', + }); + await ioredisClient.sadd('stream:running', staleStreamId); + + const cleaned = await store.cleanup(); + expect(cleaned).toBeGreaterThanOrEqual(1); + + // Valid job should still exist + const validJob = await store.getJob(validStreamId); + expect(validJob).not.toBeNull(); + expect(validJob?.status).toBe('running'); + + await store.destroy(); + }); + }); + + describe('appendChunk TTL Refresh', () => { + test('should set TTL on the chunk stream', async () => { + if (!ioredisClient) { + return; + } + + const { RedisJobStore } = await import('../implementations/RedisJobStore'); + const store = new RedisJobStore(ioredisClient, { runningTtl: 120 }); + await store.initialize(); + + const streamId = `append-ttl-${Date.now()}`; + await store.createJob(streamId, 'user-1', streamId); + + await store.appendChunk(streamId, { + event: 'on_message_delta', + data: { id: 'step-1', type: 'text', text: 'first' }, + }); + + const chunkKey = `stream:{${streamId}}:chunks`; + const ttl = await ioredisClient.ttl(chunkKey); + expect(ttl).toBeGreaterThan(0); + expect(ttl).toBeLessThanOrEqual(120); + + await store.destroy(); + }); + + test('should refresh TTL on subsequent chunks (not just first)', async () => { + if (!ioredisClient) { + return; + } + + const { RedisJobStore } = await import('../implementations/RedisJobStore'); + const store = new RedisJobStore(ioredisClient, { runningTtl: 120 }); + await store.initialize(); + + const streamId = `append-refresh-${Date.now()}`; + await store.createJob(streamId, 'user-1', streamId); + + // Append first chunk + await store.appendChunk(streamId, { + event: 'on_message_delta', + data: { id: 'step-1', type: 'text', text: 'first' }, + }); + + const chunkKey = `stream:{${streamId}}:chunks`; + const ttl1 = await ioredisClient.ttl(chunkKey); + expect(ttl1).toBeGreaterThan(0); + + // Manually reduce TTL to simulate time passing + await ioredisClient.expire(chunkKey, 30); + const reducedTtl = await ioredisClient.ttl(chunkKey); + expect(reducedTtl).toBeLessThanOrEqual(30); + + // Append another chunk - TTL should be refreshed back to running TTL + await store.appendChunk(streamId, { + event: 'on_message_delta', + data: { id: 'step-1', type: 'text', text: 'second' }, + }); + + const ttl2 = await ioredisClient.ttl(chunkKey); + // Should be refreshed to ~120, not still ~30 + expect(ttl2).toBeGreaterThan(30); + expect(ttl2).toBeLessThanOrEqual(120); + + await store.destroy(); + }); + + test('should store chunks correctly via pipeline', async () => { + if (!ioredisClient) { + return; + } + + const { RedisJobStore } = await import('../implementations/RedisJobStore'); + const store = new RedisJobStore(ioredisClient); + await store.initialize(); + + const streamId = `append-pipeline-${Date.now()}`; + await store.createJob(streamId, 'user-1', streamId); + + const chunks = [ + { + event: 'on_run_step', + data: { + id: 'step-1', + runId: 'run-1', + index: 0, + stepDetails: { type: 'message_creation' }, + }, + }, + { + event: 'on_message_delta', + data: { id: 'step-1', delta: { content: { type: 'text', text: 'Hello ' } } }, + }, + { + event: 'on_message_delta', + data: { id: 'step-1', delta: { content: { type: 'text', text: 'world!' } } }, + }, + ]; + + for (const chunk of chunks) { + await store.appendChunk(streamId, chunk); + } + + // Verify all chunks were stored + const chunkKey = `stream:{${streamId}}:chunks`; + const len = await ioredisClient.xlen(chunkKey); + expect(len).toBe(3); + + // Verify content can be reconstructed + const content = await store.getContentParts(streamId); + expect(content).not.toBeNull(); + expect(content!.content.length).toBeGreaterThan(0); + + await store.destroy(); + }); + }); }); diff --git a/packages/api/src/stream/__tests__/collectedUsage.spec.ts b/packages/api/src/stream/__tests__/collectedUsage.spec.ts index 3e534b537a..d9a9ab95fe 100644 --- a/packages/api/src/stream/__tests__/collectedUsage.spec.ts +++ b/packages/api/src/stream/__tests__/collectedUsage.spec.ts @@ -146,7 +146,7 @@ describe('CollectedUsage - GenerationJobManager', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `manager-test-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -179,7 +179,7 @@ describe('CollectedUsage - GenerationJobManager', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `no-usage-test-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -202,7 +202,7 @@ describe('CollectedUsage - GenerationJobManager', () => { isRedis: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const collectedUsage: UsageMetadata[] = [ { input_tokens: 100, output_tokens: 50, model: 'gpt-4' }, @@ -235,7 +235,7 @@ describe('AbortJob - Text and CollectedUsage', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `text-extract-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -267,7 +267,7 @@ describe('AbortJob - Text and CollectedUsage', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `empty-text-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -291,7 +291,7 @@ describe('AbortJob - Text and CollectedUsage', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `full-abort-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -328,7 +328,7 @@ describe('AbortJob - Text and CollectedUsage', () => { isRedis: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const abortResult = await GenerationJobManager.abortJob('non-existent-job'); @@ -365,7 +365,7 @@ describe('Real-world Scenarios', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `parallel-abort-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -419,7 +419,7 @@ describe('Real-world Scenarios', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `cache-abort-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); @@ -459,7 +459,7 @@ describe('Real-world Scenarios', () => { cleanupOnComplete: false, }); - await GenerationJobManager.initialize(); + GenerationJobManager.initialize(); const streamId = `sequential-abort-${Date.now()}`; await GenerationJobManager.createJob(streamId, 'user-1'); diff --git a/packages/api/src/stream/__tests__/reconnect-reorder-desync.stream_integration.spec.ts b/packages/api/src/stream/__tests__/reconnect-reorder-desync.stream_integration.spec.ts new file mode 100644 index 0000000000..effb7c5c7d --- /dev/null +++ b/packages/api/src/stream/__tests__/reconnect-reorder-desync.stream_integration.spec.ts @@ -0,0 +1,450 @@ +import type { Redis, Cluster } from 'ioredis'; +import { RedisEventTransport } from '~/stream/implementations/RedisEventTransport'; +import { GenerationJobManagerClass } from '~/stream/GenerationJobManager'; +import { createStreamServices } from '~/stream/createStreamServices'; +import { + ioredisClient as staticRedisClient, + keyvRedisClient as staticKeyvClient, + keyvRedisClientReady, +} from '~/cache/redisClients'; + +/** + * Regression tests for the reconnect reorder buffer desync bug. + * + * Bug: When a user disconnects and reconnects to a stream multiple times, + * the second+ reconnect lost chunks because the transport deleted stream state + * on last unsubscribe, destroying the allSubscribersLeftCallbacks registered + * by createJob(). This prevented hasSubscriber from being reset, which in turn + * prevented syncReorderBuffer from being called on reconnect. + * + * Fix: Preserve stream state (callbacks, abort handlers) across reconnect cycles + * instead of deleting it. The state is fully cleaned up by cleanup() when the + * job completes. + * + * Run with: USE_REDIS=true npx jest reconnect-reorder-desync + */ +describe('Reconnect Reorder Buffer Desync (Regression)', () => { + describe('Callback preservation across reconnect cycles (Unit)', () => { + test('allSubscribersLeft callback fires on every disconnect, not just the first', () => { + const mockPublisher = { + publish: jest.fn().mockResolvedValue(1), + }; + const mockSubscriber = { + on: jest.fn(), + subscribe: jest.fn().mockResolvedValue(undefined), + unsubscribe: jest.fn().mockResolvedValue(undefined), + }; + + const transport = new RedisEventTransport( + mockPublisher as unknown as Redis, + mockSubscriber as unknown as Redis, + ); + + const streamId = 'callback-persist-test'; + let callbackFireCount = 0; + + // Register callback (simulates what createJob does) + transport.onAllSubscribersLeft(streamId, () => { + callbackFireCount++; + }); + + // First subscribe/unsubscribe cycle + const sub1 = transport.subscribe(streamId, { onChunk: () => {} }); + sub1.unsubscribe(); + + expect(callbackFireCount).toBe(1); + + // Second subscribe/unsubscribe cycle — callback must still fire + const sub2 = transport.subscribe(streamId, { onChunk: () => {} }); + sub2.unsubscribe(); + + expect(callbackFireCount).toBe(2); + + // Third cycle — continues to work + const sub3 = transport.subscribe(streamId, { onChunk: () => {} }); + sub3.unsubscribe(); + + expect(callbackFireCount).toBe(3); + + transport.destroy(); + }); + + test('abort callback survives across reconnect cycles', () => { + const mockPublisher = { + publish: jest.fn().mockResolvedValue(1), + }; + const mockSubscriber = { + on: jest.fn(), + subscribe: jest.fn().mockResolvedValue(undefined), + unsubscribe: jest.fn().mockResolvedValue(undefined), + }; + + const transport = new RedisEventTransport( + mockPublisher as unknown as Redis, + mockSubscriber as unknown as Redis, + ); + + const streamId = 'abort-callback-persist-test'; + let abortCallbackFired = false; + + // Register abort callback (simulates what createJob does) + transport.onAbort(streamId, () => { + abortCallbackFired = true; + }); + + // Subscribe/unsubscribe cycle + const sub1 = transport.subscribe(streamId, { onChunk: () => {} }); + sub1.unsubscribe(); + + // Re-subscribe and receive an abort signal + const sub2 = transport.subscribe(streamId, { onChunk: () => {} }); + + const messageHandler = mockSubscriber.on.mock.calls.find( + (call) => call[0] === 'message', + )?.[1] as (channel: string, message: string) => void; + + const channel = `stream:{${streamId}}:events`; + messageHandler(channel, JSON.stringify({ type: 'abort' })); + + // Abort callback should fire — it was preserved across the reconnect + expect(abortCallbackFired).toBe(true); + + sub2.unsubscribe(); + transport.destroy(); + }); + }); + + describe('Reorder buffer sync on reconnect (Unit)', () => { + /** + * After the fix, the allSubscribersLeft callback fires on every disconnect, + * which resets hasSubscriber. GenerationJobManager.subscribe() then enters + * the if (!runtime.hasSubscriber) block and calls syncReorderBuffer. + * + * This test verifies at the transport level that when syncReorderBuffer IS + * called (as it now will be on every reconnect), messages are delivered + * immediately regardless of how many reconnect cycles have occurred. + */ + test('syncReorderBuffer works correctly on third+ reconnect', async () => { + const mockPublisher = { + publish: jest.fn().mockResolvedValue(1), + }; + const mockSubscriber = { + on: jest.fn(), + subscribe: jest.fn().mockResolvedValue(undefined), + unsubscribe: jest.fn().mockResolvedValue(undefined), + }; + + const transport = new RedisEventTransport( + mockPublisher as unknown as Redis, + mockSubscriber as unknown as Redis, + ); + + const streamId = 'reorder-multi-reconnect-test'; + + transport.onAllSubscribersLeft(streamId, () => { + // Simulates the callback from createJob + }); + + const messageHandler = mockSubscriber.on.mock.calls.find( + (call) => call[0] === 'message', + )?.[1] as (channel: string, message: string) => void; + + const channel = `stream:{${streamId}}:events`; + + // Run 3 full subscribe/emit/unsubscribe cycles + for (let cycle = 0; cycle < 3; cycle++) { + const chunks: unknown[] = []; + const sub = transport.subscribe(streamId, { + onChunk: (event) => chunks.push(event), + }); + + // Sync reorder buffer (as GenerationJobManager.subscribe does) + transport.syncReorderBuffer(streamId); + + const baseSeq = cycle * 10; + + // Emit 10 chunks (advances publisher sequence) + for (let i = 0; i < 10; i++) { + await transport.emitChunk(streamId, { index: baseSeq + i }); + } + + // Deliver messages via pub/sub handler + for (let i = 0; i < 10; i++) { + messageHandler( + channel, + JSON.stringify({ type: 'chunk', seq: baseSeq + i, data: { index: baseSeq + i } }), + ); + } + + // Messages should be delivered immediately on every cycle + expect(chunks.length).toBe(10); + expect(chunks.map((c) => (c as { index: number }).index)).toEqual( + Array.from({ length: 10 }, (_, i) => baseSeq + i), + ); + + sub.unsubscribe(); + } + + transport.destroy(); + }); + + test('reorder buffer works correctly when syncReorderBuffer IS called', async () => { + const mockPublisher = { + publish: jest.fn().mockResolvedValue(1), + }; + const mockSubscriber = { + on: jest.fn(), + subscribe: jest.fn().mockResolvedValue(undefined), + unsubscribe: jest.fn().mockResolvedValue(undefined), + }; + + const transport = new RedisEventTransport( + mockPublisher as unknown as Redis, + mockSubscriber as unknown as Redis, + ); + + const streamId = 'reorder-sync-test'; + + // Emit 20 chunks to advance publisher sequence + for (let i = 0; i < 20; i++) { + await transport.emitChunk(streamId, { index: i }); + } + + // Subscribe and sync the reorder buffer + const chunks: unknown[] = []; + const sub = transport.subscribe(streamId, { + onChunk: (event) => chunks.push(event), + }); + + // This is the critical call - sync nextSeq to match publisher + transport.syncReorderBuffer(streamId); + + // Deliver messages starting at seq 20 + const messageHandler = mockSubscriber.on.mock.calls.find( + (call) => call[0] === 'message', + )?.[1] as (channel: string, message: string) => void; + + const channel = `stream:{${streamId}}:events`; + + for (let i = 20; i < 25; i++) { + messageHandler(channel, JSON.stringify({ type: 'chunk', seq: i, data: { index: i } })); + } + + // Messages should be delivered IMMEDIATELY (no 500ms wait) + // because nextSeq was synced to 20 + expect(chunks.length).toBe(5); + expect(chunks.map((c) => (c as { index: number }).index)).toEqual([20, 21, 22, 23, 24]); + + sub.unsubscribe(); + transport.destroy(); + }); + }); + + describe('End-to-end reconnect with GenerationJobManager (Integration)', () => { + let originalEnv: NodeJS.ProcessEnv; + let ioredisClient: Redis | Cluster | null = null; + let dynamicKeyvClient: unknown = null; + let dynamicKeyvReady: Promise | null = null; + const testPrefix = 'ReconnectDesync-Test'; + + beforeAll(async () => { + originalEnv = { ...process.env }; + + process.env.USE_REDIS = process.env.USE_REDIS ?? 'true'; + process.env.REDIS_URI = process.env.REDIS_URI ?? 'redis://127.0.0.1:6379'; + process.env.REDIS_KEY_PREFIX = testPrefix; + + jest.resetModules(); + + const redisModule = await import('~/cache/redisClients'); + ioredisClient = redisModule.ioredisClient; + dynamicKeyvClient = redisModule.keyvRedisClient; + dynamicKeyvReady = redisModule.keyvRedisClientReady; + }); + + afterEach(async () => { + jest.resetModules(); + + if (ioredisClient) { + try { + const keys = await ioredisClient.keys(`${testPrefix}*`); + const streamKeys = await ioredisClient.keys('stream:*'); + const allKeys = [...keys, ...streamKeys]; + await Promise.all(allKeys.map((key) => ioredisClient!.del(key))); + } catch { + // Ignore cleanup errors + } + } + }); + + afterAll(async () => { + for (const ready of [keyvRedisClientReady, dynamicKeyvReady]) { + if (ready) { + await ready.catch(() => {}); + } + } + + const clients = [ioredisClient, staticRedisClient, staticKeyvClient, dynamicKeyvClient]; + for (const client of clients) { + if (!client) { + continue; + } + try { + await (client as { disconnect: () => void | Promise }).disconnect(); + } catch { + /* ignore */ + } + } + + process.env = originalEnv; + }); + + /** + * Verifies that all reconnect cycles deliver chunks immediately — + * not just the first reconnect. + */ + test('chunks are delivered immediately on every reconnect cycle', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const manager = new GenerationJobManagerClass(); + const services = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + + manager.configure(services); + manager.initialize(); + + const streamId = `reconnect-fixed-${Date.now()}`; + await manager.createJob(streamId, 'user-1'); + + // Run 3 subscribe/emit/unsubscribe cycles + for (let cycle = 0; cycle < 3; cycle++) { + const chunks: unknown[] = []; + const sub = await manager.subscribe(streamId, (event) => chunks.push(event)); + + await new Promise((resolve) => setTimeout(resolve, 100)); + + // Emit 10 chunks + for (let i = 0; i < 10; i++) { + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { + delta: { content: { type: 'text', text: `c${cycle}-${i}` } }, + index: cycle * 10 + i, + }, + }); + } + + // Chunks should arrive within 200ms (well under the 500ms force-flush timeout) + await new Promise((resolve) => setTimeout(resolve, 200)); + + expect(chunks.length).toBe(10); + + sub!.unsubscribe(); + await new Promise((resolve) => setTimeout(resolve, 100)); + } + + await manager.destroy(); + }); + + /** + * Verifies that syncSent is correctly reset on every disconnect, + * proving the onAllSubscribersLeft callback survives reconnect cycles. + */ + test('onAllSubscribersLeft callback resets state on every disconnect', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const manager = new GenerationJobManagerClass(); + const services = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + + manager.configure(services); + manager.initialize(); + + const streamId = `callback-persist-integ-${Date.now()}`; + await manager.createJob(streamId, 'user-1'); + + for (let cycle = 0; cycle < 3; cycle++) { + const sub = await manager.subscribe(streamId, () => {}); + await new Promise((resolve) => setTimeout(resolve, 50)); + + // Mark sync as sent + manager.markSyncSent(streamId); + await new Promise((resolve) => setTimeout(resolve, 50)); + + let syncSent = await manager.wasSyncSent(streamId); + expect(syncSent).toBe(true); + + // Disconnect + sub!.unsubscribe(); + await new Promise((resolve) => setTimeout(resolve, 100)); + + // Callback should reset syncSent on every disconnect + syncSent = await manager.wasSyncSent(streamId); + expect(syncSent).toBe(false); + } + + await manager.destroy(); + }); + + /** + * Verifies all reconnect cycles deliver chunks immediately with no + * increasing gap pattern. + */ + test('no increasing gap pattern across reconnect cycles', async () => { + if (!ioredisClient) { + console.warn('Redis not available, skipping test'); + return; + } + + const manager = new GenerationJobManagerClass(); + const services = createStreamServices({ + useRedis: true, + redisClient: ioredisClient, + }); + + manager.configure(services); + manager.initialize(); + + const streamId = `no-gaps-${Date.now()}`; + await manager.createJob(streamId, 'user-1'); + + const chunksPerCycle = 15; + + for (let cycle = 0; cycle < 4; cycle++) { + const chunks: unknown[] = []; + const sub = await manager.subscribe(streamId, (event) => chunks.push(event)); + await new Promise((resolve) => setTimeout(resolve, 100)); + + // Emit chunks + for (let i = 0; i < chunksPerCycle; i++) { + await manager.emitChunk(streamId, { + event: 'on_message_delta', + data: { + delta: { content: { type: 'text', text: `c${cycle}-${i}` } }, + index: cycle * chunksPerCycle + i, + }, + }); + } + + // All chunks should arrive within 200ms on every cycle + await new Promise((resolve) => setTimeout(resolve, 200)); + expect(chunks.length).toBe(chunksPerCycle); + + sub!.unsubscribe(); + await new Promise((resolve) => setTimeout(resolve, 100)); + } + + await manager.destroy(); + }); + }); +}); diff --git a/packages/api/src/stream/implementations/InMemoryEventTransport.ts b/packages/api/src/stream/implementations/InMemoryEventTransport.ts index 39b3d6029d..c2e7ba01fd 100644 --- a/packages/api/src/stream/implementations/InMemoryEventTransport.ts +++ b/packages/api/src/stream/implementations/InMemoryEventTransport.ts @@ -32,7 +32,7 @@ export class InMemoryEventTransport implements IEventTransport { onDone?: (event: unknown) => void; onError?: (error: string) => void; }, - ): { unsubscribe: () => void } { + ): { unsubscribe: () => void; ready?: Promise } { const state = this.getOrCreateStream(streamId); const chunkHandler = (event: unknown) => handlers.onChunk(event); @@ -58,9 +58,11 @@ export class InMemoryEventTransport implements IEventTransport { // Check if all subscribers left - cleanup and notify if (currentState.emitter.listenerCount('chunk') === 0) { currentState.allSubscribersLeftCallback?.(); - // Auto-cleanup the stream entry when no subscribers remain + /* Remove all EventEmitter listeners but preserve stream state + * (including allSubscribersLeftCallback) for reconnection. + * State is fully cleaned up by cleanup() when the job completes. + */ currentState.emitter.removeAllListeners(); - this.streams.delete(streamId); } } }, diff --git a/packages/api/src/stream/implementations/RedisEventTransport.ts b/packages/api/src/stream/implementations/RedisEventTransport.ts index 78f545c18e..3682a9a749 100644 --- a/packages/api/src/stream/implementations/RedisEventTransport.ts +++ b/packages/api/src/stream/implementations/RedisEventTransport.ts @@ -92,8 +92,8 @@ export class RedisEventTransport implements IEventTransport { private subscriber: Redis | Cluster; /** Track subscribers per stream */ private streams = new Map(); - /** Track which channels we're subscribed to */ - private subscribedChannels = new Set(); + /** Track channel subscription state: resolved promise = active, pending = in-flight */ + private channelSubscriptions = new Map>(); /** Counter for generating unique subscriber IDs */ private subscriberIdCounter = 0; /** Sequence counters per stream for publishing (ensures ordered delivery in cluster mode) */ @@ -122,9 +122,32 @@ export class RedisEventTransport implements IEventTransport { return current; } - /** Reset sequence counter for a stream */ - private resetSequence(streamId: string): void { + /** Reset publish sequence counter and subscriber reorder state for a stream (full cleanup only) */ + resetSequence(streamId: string): void { this.sequenceCounters.delete(streamId); + const state = this.streams.get(streamId); + if (state) { + if (state.reorderBuffer.flushTimeout) { + clearTimeout(state.reorderBuffer.flushTimeout); + state.reorderBuffer.flushTimeout = null; + } + state.reorderBuffer.nextSeq = 0; + state.reorderBuffer.pending.clear(); + } + } + + /** Advance subscriber reorder buffer to current publisher sequence without resetting publisher (cross-replica safe) */ + syncReorderBuffer(streamId: string): void { + const currentSeq = this.sequenceCounters.get(streamId) ?? 0; + const state = this.streams.get(streamId); + if (state) { + if (state.reorderBuffer.flushTimeout) { + clearTimeout(state.reorderBuffer.flushTimeout); + state.reorderBuffer.flushTimeout = null; + } + state.reorderBuffer.nextSeq = currentSeq; + state.reorderBuffer.pending.clear(); + } } /** @@ -331,7 +354,7 @@ export class RedisEventTransport implements IEventTransport { onDone?: (event: unknown) => void; onError?: (error: string) => void; }, - ): { unsubscribe: () => void } { + ): { unsubscribe: () => void; ready?: Promise } { const channel = CHANNELS.events(streamId); const subscriberId = `sub_${++this.subscriberIdCounter}`; @@ -354,16 +377,23 @@ export class RedisEventTransport implements IEventTransport { streamState.count++; streamState.handlers.set(subscriberId, handlers); - // Subscribe to Redis channel if this is first subscriber - if (!this.subscribedChannels.has(channel)) { - this.subscribedChannels.add(channel); - this.subscriber.subscribe(channel).catch((err) => { - logger.error(`[RedisEventTransport] Failed to subscribe to ${channel}:`, err); - }); + let readyPromise = this.channelSubscriptions.get(channel); + + if (!readyPromise) { + readyPromise = this.subscriber + .subscribe(channel) + .then(() => { + logger.debug(`[RedisEventTransport] Subscription active for channel ${channel}`); + }) + .catch((err) => { + this.channelSubscriptions.delete(channel); + logger.error(`[RedisEventTransport] Failed to subscribe to ${channel}:`, err); + }); + this.channelSubscriptions.set(channel, readyPromise); } - // Return unsubscribe function return { + ready: readyPromise, unsubscribe: () => { const state = this.streams.get(streamId); if (!state) { @@ -385,7 +415,7 @@ export class RedisEventTransport implements IEventTransport { this.subscriber.unsubscribe(channel).catch((err) => { logger.error(`[RedisEventTransport] Failed to unsubscribe from ${channel}:`, err); }); - this.subscribedChannels.delete(channel); + this.channelSubscriptions.delete(channel); // Call all-subscribers-left callbacks for (const callback of state.allSubscribersLeftCallbacks) { @@ -395,8 +425,15 @@ export class RedisEventTransport implements IEventTransport { logger.error(`[RedisEventTransport] Error in allSubscribersLeft callback:`, err); } } - - this.streams.delete(streamId); + /** + * Preserve stream state (callbacks, abort handlers) for reconnection. + * Previously this deleted the entire state, which lost the + * allSubscribersLeftCallbacks and abortCallbacks registered by + * GenerationJobManager.createJob(). On the next subscribe() call, + * fresh state was created without those callbacks, causing + * hasSubscriber to never reset and syncReorderBuffer to be skipped. + * State is fully cleaned up by cleanup() when the job completes. + */ } }, }; @@ -431,6 +468,7 @@ export class RedisEventTransport implements IEventTransport { await this.publisher.publish(channel, JSON.stringify(message)); } catch (err) { logger.error(`[RedisEventTransport] Failed to publish done:`, err); + throw err; } } @@ -447,6 +485,7 @@ export class RedisEventTransport implements IEventTransport { await this.publisher.publish(channel, JSON.stringify(message)); } catch (err) { logger.error(`[RedisEventTransport] Failed to publish error:`, err); + throw err; } } @@ -532,12 +571,15 @@ export class RedisEventTransport implements IEventTransport { state.abortCallbacks.push(callback); - // Subscribe to Redis channel if not already subscribed - if (!this.subscribedChannels.has(channel)) { - this.subscribedChannels.add(channel); - this.subscriber.subscribe(channel).catch((err) => { - logger.error(`[RedisEventTransport] Failed to subscribe to ${channel}:`, err); - }); + if (!this.channelSubscriptions.has(channel)) { + const ready = this.subscriber + .subscribe(channel) + .then(() => {}) + .catch((err) => { + this.channelSubscriptions.delete(channel); + logger.error(`[RedisEventTransport] Failed to subscribe to ${channel}:`, err); + }); + this.channelSubscriptions.set(channel, ready); } } @@ -571,12 +613,11 @@ export class RedisEventTransport implements IEventTransport { // Reset sequence counter for this stream this.resetSequence(streamId); - // Unsubscribe from Redis channel - if (this.subscribedChannels.has(channel)) { + if (this.channelSubscriptions.has(channel)) { this.subscriber.unsubscribe(channel).catch((err) => { logger.error(`[RedisEventTransport] Failed to cleanup ${channel}:`, err); }); - this.subscribedChannels.delete(channel); + this.channelSubscriptions.delete(channel); } this.streams.delete(streamId); @@ -595,18 +636,20 @@ export class RedisEventTransport implements IEventTransport { state.reorderBuffer.pending.clear(); } - // Unsubscribe from all channels - for (const channel of this.subscribedChannels) { - this.subscriber.unsubscribe(channel).catch(() => { - // Ignore errors during shutdown - }); + for (const channel of this.channelSubscriptions.keys()) { + this.subscriber.unsubscribe(channel).catch(() => {}); } - this.subscribedChannels.clear(); + this.channelSubscriptions.clear(); this.streams.clear(); this.sequenceCounters.clear(); - // Note: Don't close Redis connections - they may be shared + try { + this.subscriber.disconnect(); + } catch { + /* ignore */ + } + logger.info('[RedisEventTransport] Destroyed'); } } diff --git a/packages/api/src/stream/implementations/RedisJobStore.ts b/packages/api/src/stream/implementations/RedisJobStore.ts index cce636d5a1..727fe066eb 100644 --- a/packages/api/src/stream/implementations/RedisJobStore.ts +++ b/packages/api/src/stream/implementations/RedisJobStore.ts @@ -156,13 +156,13 @@ export class RedisJobStore implements IJobStore { // For cluster mode, we can't pipeline keys on different slots // The job key uses hash tag {streamId}, runningJobs and userJobs are on different slots if (this.isCluster) { - await this.redis.hmset(key, this.serializeJob(job)); + await this.redis.hset(key, this.serializeJob(job)); await this.redis.expire(key, this.ttl.running); await this.redis.sadd(KEYS.runningJobs, streamId); await this.redis.sadd(userJobsKey, streamId); } else { const pipeline = this.redis.pipeline(); - pipeline.hmset(key, this.serializeJob(job)); + pipeline.hset(key, this.serializeJob(job)); pipeline.expire(key, this.ttl.running); pipeline.sadd(KEYS.runningJobs, streamId); pipeline.sadd(userJobsKey, streamId); @@ -183,17 +183,23 @@ export class RedisJobStore implements IJobStore { async updateJob(streamId: string, updates: Partial): Promise { const key = KEYS.job(streamId); - const exists = await this.redis.exists(key); - if (!exists) { - return; - } const serialized = this.serializeJob(updates as SerializableJobData); if (Object.keys(serialized).length === 0) { return; } - await this.redis.hmset(key, serialized); + const fields = Object.entries(serialized).flat(); + const updated = await this.redis.eval( + 'if redis.call("EXISTS", KEYS[1]) == 1 then redis.call("HSET", KEYS[1], unpack(ARGV)) return 1 else return 0 end', + 1, + key, + ...fields, + ); + + if (updated === 0) { + return; + } // If status changed to complete/error/aborted, update TTL and remove from running set // Note: userJobs cleanup is handled lazily via self-healing in getActiveJobIdsByUser @@ -296,32 +302,46 @@ export class RedisJobStore implements IJobStore { } } - for (const streamId of streamIds) { - const job = await this.getJob(streamId); + // Process in batches of 50 to avoid sequential per-job round-trips + const BATCH_SIZE = 50; + for (let i = 0; i < streamIds.length; i += BATCH_SIZE) { + const batch = streamIds.slice(i, i + BATCH_SIZE); + const results = await Promise.allSettled( + batch.map(async (streamId) => { + const job = await this.getJob(streamId); - // Job no longer exists (TTL expired) - remove from set - if (!job) { - await this.redis.srem(KEYS.runningJobs, streamId); - this.localGraphCache.delete(streamId); - this.localCollectedUsageCache.delete(streamId); - cleaned++; - continue; - } + // Job no longer exists (TTL expired) - remove from set + if (!job) { + await this.redis.srem(KEYS.runningJobs, streamId); + this.localGraphCache.delete(streamId); + this.localCollectedUsageCache.delete(streamId); + return 1; + } - // Job completed but still in running set (shouldn't happen, but handle it) - if (job.status !== 'running') { - await this.redis.srem(KEYS.runningJobs, streamId); - this.localGraphCache.delete(streamId); - this.localCollectedUsageCache.delete(streamId); - cleaned++; - continue; - } + // Job completed but still in running set (shouldn't happen, but handle it) + if (job.status !== 'running') { + await this.redis.srem(KEYS.runningJobs, streamId); + this.localGraphCache.delete(streamId); + this.localCollectedUsageCache.delete(streamId); + return 1; + } - // Stale running job (failsafe - running for > configured TTL) - if (now - job.createdAt > this.ttl.running * 1000) { - logger.warn(`[RedisJobStore] Cleaning up stale job: ${streamId}`); - await this.deleteJob(streamId); - cleaned++; + // Stale running job (failsafe - running for > configured TTL) + if (now - job.createdAt > this.ttl.running * 1000) { + logger.warn(`[RedisJobStore] Cleaning up stale job: ${streamId}`); + await this.deleteJob(streamId); + return 1; + } + + return 0; + }), + ); + for (const result of results) { + if (result.status === 'fulfilled') { + cleaned += result.value; + } else { + logger.warn(`[RedisJobStore] Cleanup failed for a job:`, result.reason); + } } } @@ -586,16 +606,14 @@ export class RedisJobStore implements IJobStore { */ async appendChunk(streamId: string, event: unknown): Promise { const key = KEYS.chunks(streamId); - const added = await this.redis.xadd(key, '*', 'event', JSON.stringify(event)); - - // Set TTL on first chunk (when stream is created) - // Subsequent chunks inherit the stream's TTL - if (added) { - const len = await this.redis.xlen(key); - if (len === 1) { - await this.redis.expire(key, this.ttl.running); - } - } + // Pipeline XADD + EXPIRE in a single round-trip. + // EXPIRE is O(1) and idempotent — refreshing TTL on every chunk is better than + // only setting it once, since the original approach could let the TTL expire + // during long-running streams. + const pipeline = this.redis.pipeline(); + pipeline.xadd(key, '*', 'event', JSON.stringify(event)); + pipeline.expire(key, this.ttl.running); + await pipeline.exec(); } /** diff --git a/packages/api/src/stream/interfaces/IJobStore.ts b/packages/api/src/stream/interfaces/IJobStore.ts index d990283925..5486b941eb 100644 --- a/packages/api/src/stream/interfaces/IJobStore.ts +++ b/packages/api/src/stream/interfaces/IJobStore.ts @@ -286,7 +286,7 @@ export interface IJobStore { * Implementations can use EventEmitter, Redis Pub/Sub, etc. */ export interface IEventTransport { - /** Subscribe to events for a stream */ + /** Subscribe to events for a stream. `ready` resolves once the transport can receive messages. */ subscribe( streamId: string, handlers: { @@ -294,7 +294,7 @@ export interface IEventTransport { onDone?: (event: unknown) => void; onError?: (error: string) => void; }, - ): { unsubscribe: () => void }; + ): { unsubscribe: () => void; ready?: Promise }; /** Publish a chunk event - returns Promise in Redis mode for ordered delivery */ emitChunk(streamId: string, event: unknown): void | Promise; @@ -329,6 +329,12 @@ export interface IEventTransport { /** Listen for all subscribers leaving */ onAllSubscribersLeft(streamId: string, callback: () => void): void; + /** Reset publish sequence counter for a stream (used during full stream cleanup) */ + resetSequence?(streamId: string): void; + + /** Advance subscriber reorder buffer to match publisher sequence (cross-replica safe: doesn't reset publisher counter) */ + syncReorderBuffer?(streamId: string): void; + /** Cleanup transport resources for a specific stream */ cleanup(streamId: string): void; diff --git a/packages/api/src/tools/definitions.ts b/packages/api/src/tools/definitions.ts index 97312883f0..a5b35ac7d8 100644 --- a/packages/api/src/tools/definitions.ts +++ b/packages/api/src/tools/definitions.ts @@ -8,9 +8,10 @@ import { Constants, actionDelimiter } from 'librechat-data-provider'; import type { AgentToolOptions } from 'librechat-data-provider'; import type { LCToolRegistry, JsonSchemaType, LCTool, GenericTool } from '@librechat/agents'; -import { buildToolClassification, type ToolDefinition } from './classification'; +import type { ToolDefinition } from './classification'; +import { resolveJsonSchemaRefs, normalizeJsonSchema } from '~/mcp/zod'; +import { buildToolClassification } from './classification'; import { getToolDefinition } from './registry/definitions'; -import { resolveJsonSchemaRefs } from '~/mcp/zod'; export interface MCPServerTool { function?: { @@ -138,7 +139,7 @@ export async function loadToolDefinitions( name: actualToolName, description: toolDef.function.description, parameters: toolDef.function.parameters - ? resolveJsonSchemaRefs(toolDef.function.parameters) + ? normalizeJsonSchema(resolveJsonSchemaRefs(toolDef.function.parameters)) : undefined, serverName, }); @@ -153,7 +154,7 @@ export async function loadToolDefinitions( name: toolName, description: toolDef.function.description, parameters: toolDef.function.parameters - ? resolveJsonSchemaRefs(toolDef.function.parameters) + ? normalizeJsonSchema(resolveJsonSchemaRefs(toolDef.function.parameters)) : undefined, serverName, }); diff --git a/packages/api/src/types/http.ts b/packages/api/src/types/http.ts index 6544447310..c304e9089e 100644 --- a/packages/api/src/types/http.ts +++ b/packages/api/src/types/http.ts @@ -1,5 +1,6 @@ -import type { Request } from 'express'; import type { IUser, AppConfig } from '@librechat/data-schemas'; +import type { TEndpointOption } from 'librechat-data-provider'; +import type { Request } from 'express'; /** * LibreChat-specific request body type that extends Express Request body @@ -11,8 +12,10 @@ export type RequestBody = { conversationId?: string; parentMessageId?: string; endpoint?: string; + endpointType?: string; model?: string; key?: string; + endpointOption?: Partial; }; export type ServerRequest = Request & { diff --git a/packages/api/src/utils/oidc.spec.ts b/packages/api/src/utils/oidc.spec.ts index a5312e9c69..0d7216304b 100644 --- a/packages/api/src/utils/oidc.spec.ts +++ b/packages/api/src/utils/oidc.spec.ts @@ -427,6 +427,35 @@ describe('OpenID Token Utilities', () => { expect(result).toContain('User:'); }); + it('should resolve LIBRECHAT_OPENID_ID_TOKEN and LIBRECHAT_OPENID_ACCESS_TOKEN to different values', () => { + const user: Partial = { + id: 'user-123', + provider: 'openid', + openidId: 'oidc-sub-456', + email: 'test@example.com', + name: 'Test User', + federatedTokens: { + access_token: 'my-access-token', + id_token: 'my-id-token', + refresh_token: 'my-refresh-token', + expires_at: Math.floor(Date.now() / 1000) + 3600, + }, + }; + + const tokenInfo = extractOpenIDTokenInfo(user); + expect(tokenInfo).not.toBeNull(); + expect(tokenInfo!.accessToken).toBe('my-access-token'); + expect(tokenInfo!.idToken).toBe('my-id-token'); + expect(tokenInfo!.accessToken).not.toBe(tokenInfo!.idToken); + + const input = 'ACCESS={{LIBRECHAT_OPENID_ACCESS_TOKEN}}, ID={{LIBRECHAT_OPENID_ID_TOKEN}}'; + const result = processOpenIDPlaceholders(input, tokenInfo!); + + expect(result).toBe('ACCESS=my-access-token, ID=my-id-token'); + // Verify they are not the same value (the reported bug) + expect(result).not.toBe('ACCESS=my-access-token, ID=my-access-token'); + }); + it('should handle expired tokens correctly', () => { const user: Partial = { id: 'user-123', diff --git a/packages/api/src/utils/tokens.ts b/packages/api/src/utils/tokens.ts index 49f1640a7a..a824afa489 100644 --- a/packages/api/src/utils/tokens.ts +++ b/packages/api/src/utils/tokens.ts @@ -148,6 +148,7 @@ const anthropicModels = { 'claude-3.5-sonnet-latest': 200000, 'claude-haiku-4-5': 200000, 'claude-sonnet-4': 1000000, + 'claude-sonnet-4-6': 1000000, 'claude-4': 200000, 'claude-opus-4': 200000, 'claude-opus-4-5': 200000, @@ -197,6 +198,8 @@ const moonshotModels = { 'moonshot.kimi-k2.5': 262144, 'moonshot.kimi-k2-thinking': 262144, 'moonshot.kimi-k2-0711': 131072, + 'moonshotai.kimi': 262144, + 'moonshotai.kimi-k2.5': 262144, }; const metaModels = { @@ -308,6 +311,11 @@ const amazonModels = { 'nova-premier': 995000, // -5000 from max }; +const openAIBedrockModels = { + 'openai.gpt-oss-20b': 128000, + 'openai.gpt-oss-120b': 128000, +}; + const bedrockModels = { ...anthropicModels, ...mistralModels, @@ -317,6 +325,7 @@ const bedrockModels = { ...metaModels, ...ai21Models, ...amazonModels, + ...openAIBedrockModels, }; const xAIModels = { @@ -393,6 +402,7 @@ const anthropicMaxOutputs = { 'claude-3-opus': 4096, 'claude-haiku-4-5': 64000, 'claude-sonnet-4': 64000, + 'claude-sonnet-4-6': 64000, 'claude-opus-4': 32000, 'claude-opus-4-5': 64000, 'claude-opus-4-6': 128000, diff --git a/packages/client/src/components/OGDialogTemplate.tsx b/packages/client/src/components/OGDialogTemplate.tsx index 8bf2cea090..300ae5b194 100644 --- a/packages/client/src/components/OGDialogTemplate.tsx +++ b/packages/client/src/components/OGDialogTemplate.tsx @@ -1,4 +1,4 @@ -import { forwardRef, ReactNode, Ref } from 'react'; +import { forwardRef, isValidElement, ReactNode, Ref } from 'react'; import { OGDialogTitle, OGDialogClose, @@ -19,13 +19,39 @@ type SelectionProps = { isLoading?: boolean; }; +/** + * Type guard to check if selection is a legacy SelectionProps object + */ +function isSelectionProps(selection: unknown): selection is SelectionProps { + return ( + typeof selection === 'object' && + selection !== null && + !isValidElement(selection) && + ('selectHandler' in selection || + 'selectClasses' in selection || + 'selectText' in selection || + 'isLoading' in selection) + ); +} + type DialogTemplateProps = { title: string; description?: string; main?: ReactNode; buttons?: ReactNode; leftButtons?: ReactNode; - selection?: SelectionProps; + /** + * Selection button configuration. Can be either: + * - An object with selectHandler, selectClasses, selectText, isLoading (legacy) + * - A ReactNode for custom selection component + * @example + * // Legacy usage + * selection={{ selectHandler: () => {}, selectText: 'Confirm' }} + * @example + * // Custom component + * selection={} + */ + selection?: SelectionProps | ReactNode; className?: string; overlayClassName?: string; headerClassName?: string; @@ -49,14 +75,39 @@ const OGDialogTemplate = forwardRef((props: DialogTemplateProps, ref: Ref + {isLoading === true ? ( + + ) : ( + (selectText as React.JSX.Element) + )} + + ); + } else if (selection) { + selectionContent = selection; + } + return (
{main != null ? main : null}
-
- {leftButtons != null ? ( -
- {leftButtons} -
- ) : null} -
-
- {showCancelButton && ( - - - - )} - {buttons != null ? buttons : null} - {selection ? ( - - {isLoading === true ? ( - - ) : ( - (selectText as React.JSX.Element) - )} - - ) : null} -
+ {leftButtons != null ? ( +
{leftButtons}
+ ) : null} + {showCancelButton && ( + + + + )} + {buttons != null ? buttons : null} + {selectionContent}
); diff --git a/packages/client/src/components/Radio.tsx b/packages/client/src/components/Radio.tsx index b4c9c21259..2f52387981 100644 --- a/packages/client/src/components/Radio.tsx +++ b/packages/client/src/components/Radio.tsx @@ -14,6 +14,7 @@ interface RadioProps { disabled?: boolean; className?: string; fullWidth?: boolean; + 'aria-labelledby'?: string; } const Radio = memo(function Radio({ @@ -23,6 +24,7 @@ const Radio = memo(function Radio({ disabled = false, className = '', fullWidth = false, + 'aria-labelledby': ariaLabelledBy, }: RadioProps) { const localize = useLocalize(); const buttonRefs = useRef<(HTMLButtonElement | null)[]>([]); @@ -79,6 +81,7 @@ const Radio = memo(function Radio({
{localize('com_ui_no_options')} @@ -93,6 +96,7 @@ const Radio = memo(function Radio({
{selectedIndex >= 0 && isMounted && (
= { system: