From 3af2666890bbf291cb7b9f3e03592d54714f0ff5 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Sat, 14 Jun 2025 11:24:30 -0400 Subject: [PATCH 01/16] =?UTF-8?q?=F0=9F=AA=90=20refactor:=20Migrate=20Shar?= =?UTF-8?q?e=20Functionality=20to=20Type-Safe=20Methods=20(#7903)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update import for isEnabled utility in convoAccess middleware * refactor: Migrate Share functionality to new methods structure in `@librechat/data-schemas` - Deleted the old Share.js model and moved its functionality to a new share.ts file within the data-schemas package. - Updated imports across the codebase to reflect the new structure. - Enhanced error handling and logging in shared link operations. - Introduced TypeScript types for shared links and related operations to improve type safety and maintainability. * chore: Update promptGroupSchema validation with typing * fix: error handling and logging in createSharedLink * fix: don't allow empty shared link or shared link without messages * ci: add tests for shared link methods * chore: Bump version of @librechat/data-schemas to 0.0.9 in package.json and package-lock.json * chore: Add nanoid as peer dependency - Introduced `nanoid` as a dependency in `package.json` and `package-lock.json`. - Replaced UUID generation with `nanoid` for creating unique conversation and message IDs in share methods tests. --- api/models/Share.js | 346 ------ api/server/controllers/UserController.js | 2 +- api/server/middleware/validate/convoAccess.js | 2 +- api/server/routes/share.js | 23 +- package-lock.json | 154 ++- packages/data-schemas/package.json | 10 +- packages/data-schemas/src/methods/index.ts | 11 +- .../data-schemas/src/methods/share.test.ts | 1043 +++++++++++++++++ packages/data-schemas/src/methods/share.ts | 442 +++++++ .../src/models/plugins/mongoMeili.ts | 25 +- .../data-schemas/src/schema/promptGroup.ts | 6 +- packages/data-schemas/src/types/index.ts | 1 + packages/data-schemas/src/types/share.ts | 66 ++ 13 files changed, 1720 insertions(+), 411 deletions(-) delete mode 100644 api/models/Share.js create mode 100644 packages/data-schemas/src/methods/share.test.ts create mode 100644 packages/data-schemas/src/methods/share.ts create mode 100644 packages/data-schemas/src/types/share.ts diff --git a/api/models/Share.js b/api/models/Share.js deleted file mode 100644 index f8712c36ac..0000000000 --- a/api/models/Share.js +++ /dev/null @@ -1,346 +0,0 @@ -const { nanoid } = require('nanoid'); -const { logger } = require('@librechat/data-schemas'); -const { Constants } = require('librechat-data-provider'); -const { Conversation, SharedLink } = require('~/db/models'); -const { getMessages } = require('./Message'); - -class ShareServiceError extends Error { - constructor(message, code) { - super(message); - this.name = 'ShareServiceError'; - this.code = code; - } -} - -const memoizedAnonymizeId = (prefix) => { - const memo = new Map(); - return (id) => { - if (!memo.has(id)) { - memo.set(id, `${prefix}_${nanoid()}`); - } - return memo.get(id); - }; -}; - -const anonymizeConvoId = memoizedAnonymizeId('convo'); -const anonymizeAssistantId = memoizedAnonymizeId('a'); -const anonymizeMessageId = (id) => - id === Constants.NO_PARENT ? id : memoizedAnonymizeId('msg')(id); - -function anonymizeConvo(conversation) { - if (!conversation) { - return null; - } - - const newConvo = { ...conversation }; - if (newConvo.assistant_id) { - newConvo.assistant_id = anonymizeAssistantId(newConvo.assistant_id); - } - return newConvo; -} - -function anonymizeMessages(messages, newConvoId) { - if (!Array.isArray(messages)) { - return []; - } - - const idMap = new Map(); - return messages.map((message) => { - const newMessageId = anonymizeMessageId(message.messageId); - idMap.set(message.messageId, newMessageId); - - const anonymizedAttachments = message.attachments?.map((attachment) => { - return { - ...attachment, - messageId: newMessageId, - conversationId: newConvoId, - }; - }); - - return { - ...message, - messageId: newMessageId, - parentMessageId: - idMap.get(message.parentMessageId) || anonymizeMessageId(message.parentMessageId), - conversationId: newConvoId, - model: message.model?.startsWith('asst_') - ? anonymizeAssistantId(message.model) - : message.model, - attachments: anonymizedAttachments, - }; - }); -} - -async function getSharedMessages(shareId) { - try { - const share = await SharedLink.findOne({ shareId, isPublic: true }) - .populate({ - path: 'messages', - select: '-_id -__v -user', - }) - .select('-_id -__v -user') - .lean(); - - if (!share?.conversationId || !share.isPublic) { - return null; - } - - const newConvoId = anonymizeConvoId(share.conversationId); - const result = { - ...share, - conversationId: newConvoId, - messages: anonymizeMessages(share.messages, newConvoId), - }; - - return result; - } catch (error) { - logger.error('[getShare] Error getting share link', { - error: error.message, - shareId, - }); - throw new ShareServiceError('Error getting share link', 'SHARE_FETCH_ERROR'); - } -} - -async function getSharedLinks(user, pageParam, pageSize, isPublic, sortBy, sortDirection, search) { - try { - const query = { user, isPublic }; - - if (pageParam) { - if (sortDirection === 'desc') { - query[sortBy] = { $lt: pageParam }; - } else { - query[sortBy] = { $gt: pageParam }; - } - } - - if (search && search.trim()) { - try { - const searchResults = await Conversation.meiliSearch(search); - - if (!searchResults?.hits?.length) { - return { - links: [], - nextCursor: undefined, - hasNextPage: false, - }; - } - - const conversationIds = searchResults.hits.map((hit) => hit.conversationId); - query['conversationId'] = { $in: conversationIds }; - } catch (searchError) { - logger.error('[getSharedLinks] Meilisearch error', { - error: searchError.message, - user, - }); - return { - links: [], - nextCursor: undefined, - hasNextPage: false, - }; - } - } - - const sort = {}; - sort[sortBy] = sortDirection === 'desc' ? -1 : 1; - - if (Array.isArray(query.conversationId)) { - query.conversationId = { $in: query.conversationId }; - } - - const sharedLinks = await SharedLink.find(query) - .sort(sort) - .limit(pageSize + 1) - .select('-__v -user') - .lean(); - - const hasNextPage = sharedLinks.length > pageSize; - const links = sharedLinks.slice(0, pageSize); - - const nextCursor = hasNextPage ? links[links.length - 1][sortBy] : undefined; - - return { - links: links.map((link) => ({ - shareId: link.shareId, - title: link?.title || 'Untitled', - isPublic: link.isPublic, - createdAt: link.createdAt, - conversationId: link.conversationId, - })), - nextCursor, - hasNextPage, - }; - } catch (error) { - logger.error('[getSharedLinks] Error getting shares', { - error: error.message, - user, - }); - throw new ShareServiceError('Error getting shares', 'SHARES_FETCH_ERROR'); - } -} - -async function deleteAllSharedLinks(user) { - try { - const result = await SharedLink.deleteMany({ user }); - return { - message: 'All shared links deleted successfully', - deletedCount: result.deletedCount, - }; - } catch (error) { - logger.error('[deleteAllSharedLinks] Error deleting shared links', { - error: error.message, - user, - }); - throw new ShareServiceError('Error deleting shared links', 'BULK_DELETE_ERROR'); - } -} - -async function createSharedLink(user, conversationId) { - if (!user || !conversationId) { - throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS'); - } - try { - const [existingShare, conversationMessages] = await Promise.all([ - SharedLink.findOne({ conversationId, isPublic: true }).select('-_id -__v -user').lean(), - getMessages({ conversationId }), - ]); - - if (existingShare && existingShare.isPublic) { - throw new ShareServiceError('Share already exists', 'SHARE_EXISTS'); - } else if (existingShare) { - await SharedLink.deleteOne({ conversationId }); - } - - const conversation = await Conversation.findOne({ conversationId }).lean(); - const title = conversation?.title || 'Untitled'; - - const shareId = nanoid(); - await SharedLink.create({ - shareId, - conversationId, - messages: conversationMessages, - title, - user, - }); - - return { shareId, conversationId }; - } catch (error) { - logger.error('[createSharedLink] Error creating shared link', { - error: error.message, - user, - conversationId, - }); - throw new ShareServiceError('Error creating shared link', 'SHARE_CREATE_ERROR'); - } -} - -async function getSharedLink(user, conversationId) { - if (!user || !conversationId) { - throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS'); - } - - try { - const share = await SharedLink.findOne({ conversationId, user, isPublic: true }) - .select('shareId -_id') - .lean(); - - if (!share) { - return { shareId: null, success: false }; - } - - return { shareId: share.shareId, success: true }; - } catch (error) { - logger.error('[getSharedLink] Error getting shared link', { - error: error.message, - user, - conversationId, - }); - throw new ShareServiceError('Error getting shared link', 'SHARE_FETCH_ERROR'); - } -} - -async function updateSharedLink(user, shareId) { - if (!user || !shareId) { - throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS'); - } - - try { - const share = await SharedLink.findOne({ shareId }).select('-_id -__v -user').lean(); - - if (!share) { - throw new ShareServiceError('Share not found', 'SHARE_NOT_FOUND'); - } - - const [updatedMessages] = await Promise.all([ - getMessages({ conversationId: share.conversationId }), - ]); - - const newShareId = nanoid(); - const update = { - messages: updatedMessages, - user, - shareId: newShareId, - }; - - const updatedShare = await SharedLink.findOneAndUpdate({ shareId, user }, update, { - new: true, - upsert: false, - runValidators: true, - }).lean(); - - if (!updatedShare) { - throw new ShareServiceError('Share update failed', 'SHARE_UPDATE_ERROR'); - } - - anonymizeConvo(updatedShare); - - return { shareId: newShareId, conversationId: updatedShare.conversationId }; - } catch (error) { - logger.error('[updateSharedLink] Error updating shared link', { - error: error.message, - user, - shareId, - }); - throw new ShareServiceError( - error.code === 'SHARE_UPDATE_ERROR' ? error.message : 'Error updating shared link', - error.code || 'SHARE_UPDATE_ERROR', - ); - } -} - -async function deleteSharedLink(user, shareId) { - if (!user || !shareId) { - throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS'); - } - - try { - const result = await SharedLink.findOneAndDelete({ shareId, user }).lean(); - - if (!result) { - return null; - } - - return { - success: true, - shareId, - message: 'Share deleted successfully', - }; - } catch (error) { - logger.error('[deleteSharedLink] Error deleting shared link', { - error: error.message, - user, - shareId, - }); - throw new ShareServiceError('Error deleting shared link', 'SHARE_DELETE_ERROR'); - } -} - -module.exports = { - getSharedLink, - getSharedLinks, - createSharedLink, - updateSharedLink, - deleteSharedLink, - getSharedMessages, - deleteAllSharedLinks, -}; diff --git a/api/server/controllers/UserController.js b/api/server/controllers/UserController.js index 4577d20159..bcffb2189c 100644 --- a/api/server/controllers/UserController.js +++ b/api/server/controllers/UserController.js @@ -21,8 +21,8 @@ const { verifyEmail, resendVerificationEmail } = require('~/server/services/Auth const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud'); const { processDeleteRequest } = require('~/server/services/Files/process'); const { Transaction, Balance, User } = require('~/db/models'); -const { deleteAllSharedLinks } = require('~/models/Share'); const { deleteToolCalls } = require('~/models/ToolCall'); +const { deleteAllSharedLinks } = require('~/models'); const getUserController = async (req, res) => { /** @type {MongoUser} */ diff --git a/api/server/middleware/validate/convoAccess.js b/api/server/middleware/validate/convoAccess.js index 43cca0097d..afd2aeacef 100644 --- a/api/server/middleware/validate/convoAccess.js +++ b/api/server/middleware/validate/convoAccess.js @@ -1,8 +1,8 @@ +const { isEnabled } = require('@librechat/api'); const { Constants, ViolationTypes, Time } = require('librechat-data-provider'); const { searchConversation } = require('~/models/Conversation'); const denyRequest = require('~/server/middleware/denyRequest'); const { logViolation, getLogStores } = require('~/cache'); -const { isEnabled } = require('~/server/utils'); const { USE_REDIS, CONVO_ACCESS_VIOLATION_SCORE: score = 0 } = process.env ?? {}; diff --git a/api/server/routes/share.js b/api/server/routes/share.js index e551f4a354..14c25271fc 100644 --- a/api/server/routes/share.js +++ b/api/server/routes/share.js @@ -1,15 +1,15 @@ const express = require('express'); - +const { isEnabled } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { - getSharedLink, getSharedMessages, createSharedLink, updateSharedLink, - getSharedLinks, deleteSharedLink, -} = require('~/models/Share'); + getSharedLinks, + getSharedLink, +} = require('~/models'); const requireJwtAuth = require('~/server/middleware/requireJwtAuth'); -const { isEnabled } = require('~/server/utils'); const router = express.Router(); /** @@ -35,6 +35,7 @@ if (allowSharedLinks) { res.status(404).end(); } } catch (error) { + logger.error('Error getting shared messages:', error); res.status(500).json({ message: 'Error getting shared messages' }); } }, @@ -54,9 +55,7 @@ router.get('/', requireJwtAuth, async (req, res) => { sortDirection: ['asc', 'desc'].includes(req.query.sortDirection) ? req.query.sortDirection : 'desc', - search: req.query.search - ? decodeURIComponent(req.query.search.trim()) - : undefined, + search: req.query.search ? decodeURIComponent(req.query.search.trim()) : undefined, }; const result = await getSharedLinks( @@ -75,7 +74,7 @@ router.get('/', requireJwtAuth, async (req, res) => { hasNextPage: result.hasNextPage, }); } catch (error) { - console.error('Error getting shared links:', error); + logger.error('Error getting shared links:', error); res.status(500).json({ message: 'Error getting shared links', error: error.message, @@ -93,6 +92,7 @@ router.get('/link/:conversationId', requireJwtAuth, async (req, res) => { conversationId: req.params.conversationId, }); } catch (error) { + logger.error('Error getting shared link:', error); res.status(500).json({ message: 'Error getting shared link' }); } }); @@ -106,6 +106,7 @@ router.post('/:conversationId', requireJwtAuth, async (req, res) => { res.status(404).end(); } } catch (error) { + logger.error('Error creating shared link:', error); res.status(500).json({ message: 'Error creating shared link' }); } }); @@ -119,6 +120,7 @@ router.patch('/:shareId', requireJwtAuth, async (req, res) => { res.status(404).end(); } } catch (error) { + logger.error('Error updating shared link:', error); res.status(500).json({ message: 'Error updating shared link' }); } }); @@ -133,7 +135,8 @@ router.delete('/:shareId', requireJwtAuth, async (req, res) => { return res.status(200).json(result); } catch (error) { - return res.status(400).json({ message: error.message }); + logger.error('Error deleting shared link:', error); + return res.status(400).json({ message: 'Error deleting shared link' }); } }); diff --git a/package-lock.json b/package-lock.json index 62ff311767..50dacbb5c0 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2142,27 +2142,6 @@ "node": ">= 0.6" } }, - "api/node_modules/follow-redirects": { - "version": "1.15.9", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", - "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, "api/node_modules/https-proxy-agent": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", @@ -27220,9 +27199,10 @@ } }, "node_modules/bson": { - "version": "6.10.3", - "resolved": "https://registry.npmjs.org/bson/-/bson-6.10.3.tgz", - "integrity": "sha512-MTxGsqgYTwfshYWTRdmZRC+M7FnG1b4y7RO7p2k3X24Wq0yv1m77Wsj0BzlPzd/IowgESfsruQCUToa7vbOpPQ==", + "version": "6.10.4", + "resolved": "https://registry.npmjs.org/bson/-/bson-6.10.4.tgz", + "integrity": "sha512-WIsKqkSC0ABoBJuT1LEX+2HEvNmNKKgnTAyd0fL8qzK4SH2i9NXg+t08YtdZp/V9IZ33cxe3iV4yM0qg8lMQng==", + "license": "Apache-2.0", "engines": { "node": ">=16.20.1" } @@ -31002,15 +30982,16 @@ "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" }, "node_modules/follow-redirects": { - "version": "1.15.6", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", - "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", + "version": "1.15.9", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.9.tgz", + "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==", "funding": [ { "type": "individual", "url": "https://github.com/sponsors/RubenVerborgh" } ], + "license": "MIT", "engines": { "node": ">=4.0" }, @@ -36978,11 +36959,57 @@ "node": "*" } }, + "node_modules/mongodb": { + "version": "6.17.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-6.17.0.tgz", + "integrity": "sha512-neerUzg/8U26cgruLysKEjJvoNSXhyID3RvzvdcpsIi2COYM3FS3o9nlH7fxFtefTb942dX3W9i37oPfCVj4wA==", + "devOptional": true, + "license": "Apache-2.0", + "dependencies": { + "@mongodb-js/saslprep": "^1.1.9", + "bson": "^6.10.4", + "mongodb-connection-string-url": "^3.0.0" + }, + "engines": { + "node": ">=16.20.1" + }, + "peerDependencies": { + "@aws-sdk/credential-providers": "^3.188.0", + "@mongodb-js/zstd": "^1.1.0 || ^2.0.0", + "gcp-metadata": "^5.2.0", + "kerberos": "^2.0.1", + "mongodb-client-encryption": ">=6.0.0 <7", + "snappy": "^7.2.2", + "socks": "^2.7.1" + }, + "peerDependenciesMeta": { + "@aws-sdk/credential-providers": { + "optional": true + }, + "@mongodb-js/zstd": { + "optional": true + }, + "gcp-metadata": { + "optional": true + }, + "kerberos": { + "optional": true + }, + "mongodb-client-encryption": { + "optional": true + }, + "snappy": { + "optional": true + }, + "socks": { + "optional": true + } + } + }, "node_modules/mongodb-connection-string-url": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-3.0.2.tgz", "integrity": "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA==", - "peer": true, "dependencies": { "@types/whatwg-url": "^11.0.2", "whatwg-url": "^14.1.0 || ^13.0.0" @@ -36992,7 +37019,6 @@ "version": "5.1.1", "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz", "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", - "peer": true, "dependencies": { "punycode": "^2.3.1" }, @@ -37004,7 +37030,6 @@ "version": "14.2.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz", "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", - "peer": true, "dependencies": { "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" @@ -37013,6 +37038,69 @@ "node": ">=18" } }, + "node_modules/mongodb-memory-server": { + "version": "10.1.4", + "resolved": "https://registry.npmjs.org/mongodb-memory-server/-/mongodb-memory-server-10.1.4.tgz", + "integrity": "sha512-+oKQ/kc3CX+816oPFRtaF0CN4vNcGKNjpOQe4bHo/21A3pMD+lC7Xz1EX5HP7siCX4iCpVchDMmCOFXVQSGkUg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "mongodb-memory-server-core": "10.1.4", + "tslib": "^2.7.0" + }, + "engines": { + "node": ">=16.20.1" + } + }, + "node_modules/mongodb-memory-server-core": { + "version": "10.1.4", + "resolved": "https://registry.npmjs.org/mongodb-memory-server-core/-/mongodb-memory-server-core-10.1.4.tgz", + "integrity": "sha512-o8fgY7ZalEd8pGps43fFPr/hkQu1L8i6HFEGbsTfA2zDOW0TopgpswaBCqDr0qD7ptibyPfB5DmC+UlIxbThzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "async-mutex": "^0.5.0", + "camelcase": "^6.3.0", + "debug": "^4.3.7", + "find-cache-dir": "^3.3.2", + "follow-redirects": "^1.15.9", + "https-proxy-agent": "^7.0.5", + "mongodb": "^6.9.0", + "new-find-package-json": "^2.0.0", + "semver": "^7.6.3", + "tar-stream": "^3.1.7", + "tslib": "^2.7.0", + "yauzl": "^3.1.3" + }, + "engines": { + "node": ">=16.20.1" + } + }, + "node_modules/mongodb-memory-server-core/node_modules/agent-base": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/mongodb-memory-server-core/node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, "node_modules/moo-color": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/moo-color/-/moo-color-1.0.3.tgz", @@ -46196,7 +46284,7 @@ "passport-facebook": "^3.0.0" }, "devDependencies": { - "@librechat/data-schemas": "^0.0.8", + "@librechat/data-schemas": "^0.0.9", "@rollup/plugin-alias": "^5.1.0", "@rollup/plugin-commonjs": "^25.0.2", "@rollup/plugin-json": "^6.1.0", @@ -46364,7 +46452,7 @@ }, "packages/data-schemas": { "name": "@librechat/data-schemas", - "version": "0.0.8", + "version": "0.0.9", "license": "MIT", "devDependencies": { "@rollup/plugin-alias": "^5.1.0", @@ -46381,6 +46469,7 @@ "@types/traverse": "^0.6.37", "jest": "^29.5.0", "jest-junit": "^16.0.0", + "mongodb-memory-server": "^10.1.4", "rimraf": "^5.0.1", "rollup": "^4.22.4", "rollup-plugin-generate-package-json": "^3.2.0", @@ -46397,6 +46486,7 @@ "lodash": "^4.17.21", "meilisearch": "^0.38.0", "mongoose": "^8.12.1", + "nanoid": "^3.3.7", "traverse": "^0.6.11", "winston": "^3.17.0", "winston-daily-rotate-file": "^5.0.0" diff --git a/packages/data-schemas/package.json b/packages/data-schemas/package.json index 8d625fa835..9234db4032 100644 --- a/packages/data-schemas/package.json +++ b/packages/data-schemas/package.json @@ -1,6 +1,6 @@ { "name": "@librechat/data-schemas", - "version": "0.0.8", + "version": "0.0.9", "description": "Mongoose schemas and models for LibreChat", "type": "module", "main": "dist/index.cjs", @@ -51,6 +51,7 @@ "@types/traverse": "^0.6.37", "jest": "^29.5.0", "jest-junit": "^16.0.0", + "mongodb-memory-server": "^10.1.4", "rimraf": "^5.0.1", "rollup": "^4.22.4", "rollup-plugin-generate-package-json": "^3.2.0", @@ -60,13 +61,14 @@ "typescript": "^5.0.4" }, "peerDependencies": { - "keyv": "^5.3.2", - "mongoose": "^8.12.1", - "librechat-data-provider": "*", "jsonwebtoken": "^9.0.2", + "keyv": "^5.3.2", "klona": "^2.0.6", + "librechat-data-provider": "*", "lodash": "^4.17.21", "meilisearch": "^0.38.0", + "mongoose": "^8.12.1", + "nanoid": "^3.3.7", "traverse": "^0.6.11", "winston": "^3.17.0", "winston-daily-rotate-file": "^5.0.0" diff --git a/packages/data-schemas/src/methods/index.ts b/packages/data-schemas/src/methods/index.ts index 33dcdc918d..67f8255c8d 100644 --- a/packages/data-schemas/src/methods/index.ts +++ b/packages/data-schemas/src/methods/index.ts @@ -4,6 +4,7 @@ import { createTokenMethods, type TokenMethods } from './token'; import { createRoleMethods, type RoleMethods } from './role'; /* Memories */ import { createMemoryMethods, type MemoryMethods } from './memory'; +import { createShareMethods, type ShareMethods } from './share'; /** * Creates all database methods for all collections @@ -15,8 +16,14 @@ export function createMethods(mongoose: typeof import('mongoose')) { ...createTokenMethods(mongoose), ...createRoleMethods(mongoose), ...createMemoryMethods(mongoose), + ...createShareMethods(mongoose), }; } -export type { MemoryMethods }; -export type AllMethods = UserMethods & SessionMethods & TokenMethods & RoleMethods & MemoryMethods; +export type { MemoryMethods, ShareMethods }; +export type AllMethods = UserMethods & + SessionMethods & + TokenMethods & + RoleMethods & + MemoryMethods & + ShareMethods; diff --git a/packages/data-schemas/src/methods/share.test.ts b/packages/data-schemas/src/methods/share.test.ts new file mode 100644 index 0000000000..45b7faeb1a --- /dev/null +++ b/packages/data-schemas/src/methods/share.test.ts @@ -0,0 +1,1043 @@ +import { nanoid } from 'nanoid'; +import mongoose from 'mongoose'; +import { Constants } from 'librechat-data-provider'; +import { MongoMemoryServer } from 'mongodb-memory-server'; +import { createShareMethods, type ShareMethods } from './share'; +import type { SchemaWithMeiliMethods } from '~/models/plugins/mongoMeili'; +import type * as t from '~/types'; + +describe('Share Methods', () => { + let mongoServer: MongoMemoryServer; + let shareMethods: ShareMethods; + let SharedLink: mongoose.Model; + let Message: mongoose.Model; + let Conversation: SchemaWithMeiliMethods; + + beforeAll(async () => { + mongoServer = await MongoMemoryServer.create(); + const mongoUri = mongoServer.getUri(); + await mongoose.connect(mongoUri); + + // Create schemas + const sharedLinkSchema = new mongoose.Schema( + { + conversationId: { type: String, required: true }, + title: { type: String, index: true }, + user: { type: String, index: true }, + messages: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Message' }], + shareId: { type: String, index: true }, + isPublic: { type: Boolean, default: true }, + }, + { timestamps: true }, + ); + + const messageSchema = new mongoose.Schema( + { + messageId: { type: String, required: true }, + conversationId: { type: String, required: true }, + user: { type: String, required: true }, + text: String, + isCreatedByUser: Boolean, + model: String, + parentMessageId: String, + attachments: [mongoose.Schema.Types.Mixed], + content: [mongoose.Schema.Types.Mixed], + }, + { timestamps: true }, + ); + + const conversationSchema = new mongoose.Schema( + { + conversationId: { type: String, required: true }, + title: String, + user: String, + }, + { timestamps: true }, + ); + + // Register models + SharedLink = + mongoose.models.SharedLink || mongoose.model('SharedLink', sharedLinkSchema); + Message = mongoose.models.Message || mongoose.model('Message', messageSchema); + Conversation = (mongoose.models.Conversation || + mongoose.model( + 'Conversation', + conversationSchema, + )) as SchemaWithMeiliMethods; + + // Create share methods + shareMethods = createShareMethods(mongoose); + }); + + afterAll(async () => { + await mongoose.disconnect(); + await mongoServer.stop(); + }); + + beforeEach(async () => { + await SharedLink.deleteMany({}); + await Message.deleteMany({}); + await Conversation.deleteMany({}); + }); + + describe('createSharedLink', () => { + test('should create a new shared link', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + + // Create test conversation + await Conversation.create({ + conversationId, + title: 'Test Conversation', + user: userId, + }); + + // Create test messages + await Message.create([ + { + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'Hello', + isCreatedByUser: true, + }, + { + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'World', + isCreatedByUser: false, + model: 'gpt-4', + }, + ]); + + const result = await shareMethods.createSharedLink(userId, conversationId); + + expect(result).toBeDefined(); + expect(result.shareId).toBeDefined(); + expect(result.conversationId).toBe(conversationId); + + // Verify the share was created in the database + const savedShare = await SharedLink.findOne({ shareId: result.shareId }); + expect(savedShare).toBeDefined(); + expect(savedShare?.user).toBe(userId); + expect(savedShare?.title).toBe('Test Conversation'); + expect(savedShare?.messages).toHaveLength(2); + }); + + test('should throw error if share already exists', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + + await Conversation.create({ + conversationId, + title: 'Test Conversation', + user: userId, + }); + + // Create messages so we can create a share + await Message.create({ + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'Test message', + isCreatedByUser: true, + }); + + // Create first share + await shareMethods.createSharedLink(userId, conversationId); + + // Try to create duplicate + await expect(shareMethods.createSharedLink(userId, conversationId)).rejects.toThrow( + 'Share already exists', + ); + }); + + test('should throw error with missing parameters', async () => { + await expect(shareMethods.createSharedLink('', 'conv123')).rejects.toThrow( + 'Missing required parameters', + ); + + await expect(shareMethods.createSharedLink('user123', '')).rejects.toThrow( + 'Missing required parameters', + ); + }); + + test('should only include messages from the same user', async () => { + const userId1 = new mongoose.Types.ObjectId().toString(); + const userId2 = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + + await Conversation.create({ + conversationId, + title: 'Test Conversation', + user: userId1, + }); + + // Create messages from different users + await Message.create([ + { + messageId: `msg_${nanoid()}`, + conversationId, + user: userId1, + text: 'User 1 message', + isCreatedByUser: true, + }, + { + messageId: `msg_${nanoid()}`, + conversationId, + user: userId2, + text: 'User 2 message', + isCreatedByUser: true, + }, + ]); + + const result = await shareMethods.createSharedLink(userId1, conversationId); + + const savedShare = await SharedLink.findOne({ shareId: result.shareId }).populate('messages'); + expect(savedShare?.messages).toHaveLength(1); + expect((savedShare?.messages?.[0] as unknown as t.IMessage | undefined)?.text).toBe( + 'User 1 message', + ); + }); + + test('should not allow user to create shared link for conversation they do not own', async () => { + const ownerUserId = new mongoose.Types.ObjectId().toString(); + const otherUserId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + + // Create conversation owned by ownerUserId + await Conversation.create({ + conversationId, + title: 'Owner Conversation', + user: ownerUserId, + }); + + // Create messages for the conversation + await Message.create([ + { + messageId: `msg_${nanoid()}`, + conversationId, + user: ownerUserId, + text: 'Owner message', + isCreatedByUser: true, + }, + ]); + + // Try to create a shared link as a different user + await expect(shareMethods.createSharedLink(otherUserId, conversationId)).rejects.toThrow( + 'Conversation not found or access denied', + ); + + // Verify no share was created + const shares = await SharedLink.find({ conversationId }); + expect(shares).toHaveLength(0); + }); + + test('should not allow creating share for conversation with no messages', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + + // Create conversation without any messages + await Conversation.create({ + conversationId, + title: 'Empty Conversation', + user: userId, + }); + + // Try to create a shared link for conversation with no messages + await expect(shareMethods.createSharedLink(userId, conversationId)).rejects.toThrow( + 'No messages to share', + ); + + // Verify no share was created + const shares = await SharedLink.find({ conversationId }); + expect(shares).toHaveLength(0); + }); + }); + + describe('getSharedMessages', () => { + test('should retrieve and anonymize shared messages', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const shareId = `share_${nanoid()}`; + + // Create messages + const messages = await Message.create([ + { + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'Hello', + isCreatedByUser: true, + parentMessageId: Constants.NO_PARENT, + }, + { + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'World', + isCreatedByUser: false, + model: 'gpt-4', + parentMessageId: Constants.NO_PARENT, + }, + ]); + + // Create shared link + await SharedLink.create({ + shareId, + conversationId, + user: userId, + title: 'Test Share', + messages: messages.map((m) => m._id), + isPublic: true, + }); + + const result = await shareMethods.getSharedMessages(shareId); + + expect(result).toBeDefined(); + expect(result?.shareId).toBe(shareId); + expect(result?.conversationId).not.toBe(conversationId); // Should be anonymized + expect(result?.messages).toHaveLength(2); + + // Check anonymization + result?.messages.forEach((msg) => { + expect(msg.messageId).toMatch(/^msg_/); // Should be anonymized with msg_ prefix + expect(msg.messageId).not.toBe(messages[0].messageId); // Should be different from original + expect(msg.conversationId).toBe(result.conversationId); + expect(msg.user).toBeUndefined(); // User should be removed + }); + }); + + test('should return null for non-public share', async () => { + const shareId = `share_${nanoid()}`; + + await SharedLink.create({ + shareId, + conversationId: 'conv123', + user: 'user123', + isPublic: false, + }); + + const result = await shareMethods.getSharedMessages(shareId); + expect(result).toBeNull(); + }); + + test('should return null for non-existent share', async () => { + const result = await shareMethods.getSharedMessages('non_existent_share'); + expect(result).toBeNull(); + }); + + test('should handle messages with attachments', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const shareId = `share_${nanoid()}`; + + const message = await Message.create({ + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'Message with attachment', + isCreatedByUser: true, + attachments: [ + { + file_id: 'file123', + filename: 'test.pdf', + type: 'application/pdf', + }, + ], + }); + + await SharedLink.create({ + shareId, + conversationId, + user: userId, + messages: [message._id], + isPublic: true, + }); + + const result = await shareMethods.getSharedMessages(shareId); + + expect(result?.messages[0].attachments).toHaveLength(1); + expect( + (result?.messages[0].attachments?.[0] as unknown as t.IMessage | undefined)?.messageId, + ).toBe(result?.messages[0].messageId); + expect( + (result?.messages[0].attachments?.[0] as unknown as t.IMessage | undefined)?.conversationId, + ).toBe(result?.conversationId); + }); + }); + + describe('getSharedLinks', () => { + test('should retrieve paginated shared links for a user', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + + // Create multiple shared links + const sharePromises = Array.from({ length: 15 }, (_, i) => + SharedLink.create({ + shareId: `share_${i}`, + conversationId: `conv_${i}`, + user: userId, + title: `Share ${i}`, + isPublic: true, + createdAt: new Date(Date.now() - i * 1000 * 60), // Different timestamps + }), + ); + + await Promise.all(sharePromises); + + const result = await shareMethods.getSharedLinks(userId, undefined, 10); + + expect(result.links).toHaveLength(10); + expect(result.hasNextPage).toBe(true); + expect(result.nextCursor).toBeDefined(); + + // Check ordering (newest first by default) + expect(result.links[0].title).toBe('Share 0'); + expect(result.links[9].title).toBe('Share 9'); + }); + + test('should filter by isPublic parameter', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + + await SharedLink.create([ + { + shareId: 'public_share', + conversationId: 'conv1', + user: userId, + title: 'Public Share', + isPublic: true, + }, + { + shareId: 'private_share', + conversationId: 'conv2', + user: userId, + title: 'Private Share', + isPublic: false, + }, + ]); + + const publicResults = await shareMethods.getSharedLinks(userId, undefined, 10, true); + const privateResults = await shareMethods.getSharedLinks(userId, undefined, 10, false); + + expect(publicResults.links).toHaveLength(1); + expect(publicResults.links[0].title).toBe('Public Share'); + + expect(privateResults.links).toHaveLength(1); + expect(privateResults.links[0].title).toBe('Private Share'); + }); + + test('should handle search with mocked meiliSearch', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + + // Mock meiliSearch method + Conversation.meiliSearch = jest.fn().mockResolvedValue({ + hits: [{ conversationId: 'conv1' }], + }); + + await SharedLink.create([ + { + shareId: 'share1', + conversationId: 'conv1', + user: userId, + title: 'Matching Share', + isPublic: true, + }, + { + shareId: 'share2', + conversationId: 'conv2', + user: userId, + title: 'Non-matching Share', + isPublic: true, + }, + ]); + + const result = await shareMethods.getSharedLinks( + userId, + undefined, + 10, + true, + 'createdAt', + 'desc', + 'search term', + ); + + expect(result.links).toHaveLength(1); + expect(result.links[0].title).toBe('Matching Share'); + }); + + test('should handle empty results', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const result = await shareMethods.getSharedLinks(userId); + + expect(result.links).toHaveLength(0); + expect(result.hasNextPage).toBe(false); + expect(result.nextCursor).toBeUndefined(); + }); + + test('should only return shares for the specified user', async () => { + const userId1 = new mongoose.Types.ObjectId().toString(); + const userId2 = new mongoose.Types.ObjectId().toString(); + + // Create shares for different users + await SharedLink.create([ + { + shareId: 'share1', + conversationId: 'conv1', + user: userId1, + title: 'User 1 Share', + isPublic: true, + }, + { + shareId: 'share2', + conversationId: 'conv2', + user: userId2, + title: 'User 2 Share', + isPublic: true, + }, + { + shareId: 'share3', + conversationId: 'conv3', + user: userId1, + title: 'Another User 1 Share', + isPublic: true, + }, + ]); + + const result1 = await shareMethods.getSharedLinks(userId1); + const result2 = await shareMethods.getSharedLinks(userId2); + + expect(result1.links).toHaveLength(2); + expect(result1.links.every((link) => link.title.includes('User 1'))).toBe(true); + + expect(result2.links).toHaveLength(1); + expect(result2.links[0].title).toBe('User 2 Share'); + }); + }); + + describe('updateSharedLink', () => { + test('should update shared link with new messages', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const oldShareId = `share_${nanoid()}`; + + // Create initial messages + const initialMessages = await Message.create([ + { + messageId: `msg_1`, + conversationId, + user: userId, + text: 'Initial message', + isCreatedByUser: true, + }, + ]); + + // Create shared link + await SharedLink.create({ + shareId: oldShareId, + conversationId, + user: userId, + messages: initialMessages.map((m) => m._id), + isPublic: true, + }); + + // Add new message + await Message.create({ + messageId: `msg_2`, + conversationId, + user: userId, + text: 'New message', + isCreatedByUser: false, + }); + + const result = await shareMethods.updateSharedLink(userId, oldShareId); + + expect(result.shareId).not.toBe(oldShareId); // Should generate new shareId + expect(result.conversationId).toBe(conversationId); + + // Verify updated share + const updatedShare = await SharedLink.findOne({ shareId: result.shareId }).populate( + 'messages', + ); + expect(updatedShare?.messages).toHaveLength(2); + }); + + test('should throw error if share not found', async () => { + await expect(shareMethods.updateSharedLink('user123', 'non_existent')).rejects.toThrow( + 'Share not found', + ); + }); + + test('should throw error with missing parameters', async () => { + await expect(shareMethods.updateSharedLink('', 'share123')).rejects.toThrow( + 'Missing required parameters', + ); + }); + + test('should only update with messages from the same user', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const otherUserId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const shareId = `share_${nanoid()}`; + + // Create initial share + await SharedLink.create({ + shareId, + conversationId, + user: userId, + messages: [], + isPublic: true, + }); + + // Add messages from different users + await Message.create([ + { + messageId: `msg_1`, + conversationId, + user: userId, + text: 'User message', + isCreatedByUser: true, + }, + { + messageId: `msg_2`, + conversationId, + user: otherUserId, + text: 'Other user message', + isCreatedByUser: true, + }, + ]); + + const result = await shareMethods.updateSharedLink(userId, shareId); + + const updatedShare = await SharedLink.findOne({ shareId: result.shareId }).populate( + 'messages', + ); + expect(updatedShare?.messages).toHaveLength(1); + expect((updatedShare?.messages?.[0] as unknown as t.IMessage | undefined)?.text).toBe( + 'User message', + ); + }); + + test('should not allow user to update shared link they do not own', async () => { + const ownerUserId = new mongoose.Types.ObjectId().toString(); + const otherUserId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const shareId = `share_${nanoid()}`; + + // Create shared link owned by ownerUserId + await SharedLink.create({ + shareId, + conversationId, + user: ownerUserId, + messages: [], + isPublic: true, + }); + + // Try to update as a different user + await expect(shareMethods.updateSharedLink(otherUserId, shareId)).rejects.toThrow( + 'Share not found', + ); + + // Verify the original share still exists and is unchanged + const originalShare = await SharedLink.findOne({ shareId }); + expect(originalShare).toBeDefined(); + expect(originalShare?.user).toBe(ownerUserId); + }); + }); + + describe('deleteSharedLink', () => { + test('should delete shared link', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const shareId = `share_${nanoid()}`; + + await SharedLink.create({ + shareId, + conversationId: 'conv123', + user: userId, + isPublic: true, + }); + + const result = await shareMethods.deleteSharedLink(userId, shareId); + + expect(result).toBeDefined(); + expect(result?.success).toBe(true); + expect(result?.shareId).toBe(shareId); + + // Verify deletion + const deletedShare = await SharedLink.findOne({ shareId }); + expect(deletedShare).toBeNull(); + }); + + test('should return null if share not found', async () => { + const result = await shareMethods.deleteSharedLink('user123', 'non_existent'); + expect(result).toBeNull(); + }); + + test('should not delete share from different user', async () => { + const userId1 = new mongoose.Types.ObjectId().toString(); + const userId2 = new mongoose.Types.ObjectId().toString(); + const shareId = `share_${nanoid()}`; + + await SharedLink.create({ + shareId, + conversationId: 'conv123', + user: userId1, + isPublic: true, + }); + + const result = await shareMethods.deleteSharedLink(userId2, shareId); + expect(result).toBeNull(); + + // Verify share still exists + const share = await SharedLink.findOne({ shareId }); + expect(share).toBeDefined(); + }); + + test('should handle missing parameters for deleteSharedLink', async () => { + await expect(shareMethods.deleteSharedLink('', 'share123')).rejects.toThrow( + 'Missing required parameters', + ); + + await expect(shareMethods.deleteSharedLink('user123', '')).rejects.toThrow( + 'Missing required parameters', + ); + }); + }); + + describe('getSharedLink', () => { + test('should retrieve existing shared link', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const shareId = `share_${nanoid()}`; + + await SharedLink.create({ + shareId, + conversationId, + user: userId, + isPublic: true, + }); + + const result = await shareMethods.getSharedLink(userId, conversationId); + + expect(result.success).toBe(true); + expect(result.shareId).toBe(shareId); + }); + + test('should return null shareId if not found', async () => { + const result = await shareMethods.getSharedLink('user123', 'conv123'); + + expect(result.success).toBe(false); + expect(result.shareId).toBeNull(); + }); + + test('should not return share from different user', async () => { + const userId1 = new mongoose.Types.ObjectId().toString(); + const userId2 = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + + await SharedLink.create({ + shareId: 'share123', + conversationId, + user: userId1, + isPublic: true, + }); + + const result = await shareMethods.getSharedLink(userId2, conversationId); + + expect(result.success).toBe(false); + expect(result.shareId).toBeNull(); + }); + + test('should handle missing parameters for getSharedLink', async () => { + await expect(shareMethods.getSharedLink('', 'conv123')).rejects.toThrow( + 'Missing required parameters', + ); + + await expect(shareMethods.getSharedLink('user123', '')).rejects.toThrow( + 'Missing required parameters', + ); + }); + + test('should only return public shares', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const shareId = `share_${nanoid()}`; + + // Create a non-public share + await SharedLink.create({ + shareId, + conversationId, + user: userId, + isPublic: false, + }); + + const result = await shareMethods.getSharedLink(userId, conversationId); + + expect(result.success).toBe(false); + expect(result.shareId).toBeNull(); + }); + }); + + describe('deleteAllSharedLinks', () => { + test('should delete all shared links for a user', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const otherUserId = new mongoose.Types.ObjectId().toString(); + + // Create shares for different users + await SharedLink.create([ + { shareId: 'share1', conversationId: 'conv1', user: userId }, + { shareId: 'share2', conversationId: 'conv2', user: userId }, + { shareId: 'share3', conversationId: 'conv3', user: otherUserId }, + ]); + + const result = await shareMethods.deleteAllSharedLinks(userId); + + expect(result.deletedCount).toBe(2); + expect(result.message).toContain('successfully'); + + // Verify only user's shares were deleted + const remainingShares = await SharedLink.find({}); + expect(remainingShares).toHaveLength(1); + expect(remainingShares[0].user).toBe(otherUserId); + }); + + test('should handle when no shares exist', async () => { + const result = await shareMethods.deleteAllSharedLinks('user123'); + + expect(result.deletedCount).toBe(0); + expect(result.message).toContain('successfully'); + }); + + test('should only delete shares belonging to the specified user', async () => { + const userId1 = new mongoose.Types.ObjectId().toString(); + const userId2 = new mongoose.Types.ObjectId().toString(); + const userId3 = new mongoose.Types.ObjectId().toString(); + + // Create multiple shares for different users + await SharedLink.create([ + { shareId: 'share1', conversationId: 'conv1', user: userId1, isPublic: true }, + { shareId: 'share2', conversationId: 'conv2', user: userId1, isPublic: false }, + { shareId: 'share3', conversationId: 'conv3', user: userId2, isPublic: true }, + { shareId: 'share4', conversationId: 'conv4', user: userId2, isPublic: true }, + { shareId: 'share5', conversationId: 'conv5', user: userId3, isPublic: true }, + ]); + + // Delete all shares for userId1 + const result = await shareMethods.deleteAllSharedLinks(userId1); + expect(result.deletedCount).toBe(2); + + // Verify shares for other users still exist + const remainingShares = await SharedLink.find({}); + expect(remainingShares).toHaveLength(3); + expect(remainingShares.every((share) => share.user !== userId1)).toBe(true); + + // Verify specific users' shares remain + const user2Shares = await SharedLink.find({ user: userId2 }); + expect(user2Shares).toHaveLength(2); + + const user3Shares = await SharedLink.find({ user: userId3 }); + expect(user3Shares).toHaveLength(1); + }); + }); + + describe('Edge Cases and Error Handling', () => { + test('should handle conversation with special characters in ID', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = 'conv|with|pipes'; + + await Conversation.create({ + conversationId, + title: 'Special Conversation', + user: userId, + }); + + // Create a message so we can create a share + await Message.create({ + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'Test message', + isCreatedByUser: true, + }); + + const result = await shareMethods.createSharedLink(userId, conversationId); + + expect(result).toBeDefined(); + expect(result.conversationId).toBe(conversationId); + }); + + test('should handle messages with assistant_id', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const shareId = `share_${nanoid()}`; + + const message = await Message.create({ + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'Assistant message', + isCreatedByUser: false, + model: 'asst_123456', + }); + + await SharedLink.create({ + shareId, + conversationId, + user: userId, + messages: [message._id], + isPublic: true, + }); + + const result = await shareMethods.getSharedMessages(shareId); + + expect(result?.messages[0].model).toMatch(/^a_/); // Should be anonymized + }); + + test('should handle concurrent operations', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationIds = Array.from({ length: 5 }, () => `conv_${nanoid()}`); + + // Create conversations and messages + await Promise.all( + conversationIds.map(async (id) => { + await Conversation.create({ + conversationId: id, + title: `Conversation ${id}`, + user: userId, + }); + // Create a message for each conversation + await Message.create({ + messageId: `msg_${nanoid()}`, + conversationId: id, + user: userId, + text: `Message for ${id}`, + isCreatedByUser: true, + }); + }), + ); + + // Concurrent share creation + const createPromises = conversationIds.map((id) => shareMethods.createSharedLink(userId, id)); + + const results = await Promise.all(createPromises); + + expect(results).toHaveLength(5); + results.forEach((result, index) => { + expect(result.shareId).toBeDefined(); + expect(result.conversationId).toBe(conversationIds[index]); + }); + }); + + test('should handle database errors gracefully', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + + // Create conversation and message first + await Conversation.create({ + conversationId, + title: 'Test Conversation', + user: userId, + }); + + await Message.create({ + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'Test message', + isCreatedByUser: true, + }); + + // Mock a database error + const originalCreate = SharedLink.create; + SharedLink.create = jest.fn().mockRejectedValue(new Error('Database error')); + + await expect(shareMethods.createSharedLink(userId, conversationId)).rejects.toThrow( + 'Error creating shared link', + ); + + SharedLink.create = originalCreate; + }); + }); + + describe('Anonymization', () => { + beforeEach(() => { + // Ensure any mocks are restored before each test + jest.restoreAllMocks(); + }); + + test('should consistently anonymize IDs', async () => { + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const shareId = `share_${nanoid()}`; + const messageId1 = `msg_${nanoid()}`; + const messageId2 = `msg_${nanoid()}`; + + const messages = await Message.create([ + { + messageId: messageId1, + conversationId, + user: userId, + text: 'First message', + isCreatedByUser: true, + parentMessageId: Constants.NO_PARENT, + }, + { + messageId: messageId2, + conversationId, + user: userId, + text: 'Second message', + isCreatedByUser: false, + parentMessageId: messageId1, // Reference to first message + }, + ]); + + await SharedLink.create({ + shareId, + conversationId, + user: userId, + messages: messages.map((m) => m._id), + isPublic: true, + }); + + const result = await shareMethods.getSharedMessages(shareId); + + // Check that anonymization is consistent within the same result + expect(result?.messages).toHaveLength(2); + + // The second message's parentMessageId should match the first message's anonymized ID + expect(result?.messages[1].parentMessageId).toBe(result?.messages[0].messageId); + + // Both messages should have the same anonymized conversationId + expect(result?.messages[0].conversationId).toBe(result?.conversationId); + expect(result?.messages[1].conversationId).toBe(result?.conversationId); + }); + + test('should handle NO_PARENT constant correctly', async () => { + const { Constants } = await import('librechat-data-provider'); + const userId = new mongoose.Types.ObjectId().toString(); + const conversationId = `conv_${nanoid()}`; + const shareId = `share_${nanoid()}`; + + const message = await Message.create({ + messageId: `msg_${nanoid()}`, + conversationId, + user: userId, + text: 'Root message', + isCreatedByUser: true, + parentMessageId: Constants.NO_PARENT, + }); + + await SharedLink.create({ + shareId, + conversationId, + user: userId, + messages: [message._id], + isPublic: true, + }); + + const result = await shareMethods.getSharedMessages(shareId); + + expect(result?.messages[0].parentMessageId).toBe(Constants.NO_PARENT); + }); + }); +}); diff --git a/packages/data-schemas/src/methods/share.ts b/packages/data-schemas/src/methods/share.ts new file mode 100644 index 0000000000..7c16ead209 --- /dev/null +++ b/packages/data-schemas/src/methods/share.ts @@ -0,0 +1,442 @@ +import { nanoid } from 'nanoid'; +import { Constants } from 'librechat-data-provider'; +import type { FilterQuery, Model } from 'mongoose'; +import type { SchemaWithMeiliMethods } from '~/models/plugins/mongoMeili'; +import type * as t from '~/types'; +import logger from '~/config/winston'; + +class ShareServiceError extends Error { + code: string; + constructor(message: string, code: string) { + super(message); + this.name = 'ShareServiceError'; + this.code = code; + } +} + +function memoizedAnonymizeId(prefix: string) { + const memo = new Map(); + return (id: string) => { + if (!memo.has(id)) { + memo.set(id, `${prefix}_${nanoid()}`); + } + return memo.get(id) as string; + }; +} + +const anonymizeConvoId = memoizedAnonymizeId('convo'); +const anonymizeAssistantId = memoizedAnonymizeId('a'); +const anonymizeMessageId = (id: string) => + id === Constants.NO_PARENT ? id : memoizedAnonymizeId('msg')(id); + +function anonymizeConvo(conversation: Partial & Partial) { + if (!conversation) { + return null; + } + + const newConvo = { ...conversation }; + if (newConvo.assistant_id) { + newConvo.assistant_id = anonymizeAssistantId(newConvo.assistant_id); + } + return newConvo; +} + +function anonymizeMessages(messages: t.IMessage[], newConvoId: string): t.IMessage[] { + if (!Array.isArray(messages)) { + return []; + } + + const idMap = new Map(); + return messages.map((message) => { + const newMessageId = anonymizeMessageId(message.messageId); + idMap.set(message.messageId, newMessageId); + + type MessageAttachment = { + messageId?: string; + conversationId?: string; + [key: string]: unknown; + }; + + const anonymizedAttachments = (message.attachments as MessageAttachment[])?.map( + (attachment) => { + return { + ...attachment, + messageId: newMessageId, + conversationId: newConvoId, + }; + }, + ); + + return { + ...message, + messageId: newMessageId, + parentMessageId: + idMap.get(message.parentMessageId || '') || + anonymizeMessageId(message.parentMessageId || ''), + conversationId: newConvoId, + model: message.model?.startsWith('asst_') + ? anonymizeAssistantId(message.model) + : message.model, + attachments: anonymizedAttachments, + } as t.IMessage; + }); +} + +/** Factory function that takes mongoose instance and returns the methods */ +export function createShareMethods(mongoose: typeof import('mongoose')) { + /** + * Get shared messages for a public share link + */ + async function getSharedMessages(shareId: string): Promise { + try { + const SharedLink = mongoose.models.SharedLink as Model; + const share = (await SharedLink.findOne({ shareId, isPublic: true }) + .populate({ + path: 'messages', + select: '-_id -__v -user', + }) + .select('-_id -__v -user') + .lean()) as (t.ISharedLink & { messages: t.IMessage[] }) | null; + + if (!share?.conversationId || !share.isPublic) { + return null; + } + + const newConvoId = anonymizeConvoId(share.conversationId); + const result: t.SharedMessagesResult = { + shareId: share.shareId || shareId, + title: share.title, + isPublic: share.isPublic, + createdAt: share.createdAt, + updatedAt: share.updatedAt, + conversationId: newConvoId, + messages: anonymizeMessages(share.messages, newConvoId), + }; + + return result; + } catch (error) { + logger.error('[getSharedMessages] Error getting share link', { + error: error instanceof Error ? error.message : 'Unknown error', + shareId, + }); + throw new ShareServiceError('Error getting share link', 'SHARE_FETCH_ERROR'); + } + } + + /** + * Get shared links for a specific user with pagination and search + */ + async function getSharedLinks( + user: string, + pageParam?: Date, + pageSize: number = 10, + isPublic: boolean = true, + sortBy: string = 'createdAt', + sortDirection: string = 'desc', + search?: string, + ): Promise { + try { + const SharedLink = mongoose.models.SharedLink as Model; + const Conversation = mongoose.models.Conversation as SchemaWithMeiliMethods; + const query: FilterQuery = { user, isPublic }; + + if (pageParam) { + if (sortDirection === 'desc') { + query[sortBy] = { $lt: pageParam }; + } else { + query[sortBy] = { $gt: pageParam }; + } + } + + if (search && search.trim()) { + try { + const searchResults = await Conversation.meiliSearch(search); + + if (!searchResults?.hits?.length) { + return { + links: [], + nextCursor: undefined, + hasNextPage: false, + }; + } + + const conversationIds = searchResults.hits.map((hit) => hit.conversationId); + query['conversationId'] = { $in: conversationIds }; + } catch (searchError) { + logger.error('[getSharedLinks] Meilisearch error', { + error: searchError instanceof Error ? searchError.message : 'Unknown error', + user, + }); + return { + links: [], + nextCursor: undefined, + hasNextPage: false, + }; + } + } + + const sort: Record = {}; + sort[sortBy] = sortDirection === 'desc' ? -1 : 1; + + const sharedLinks = await SharedLink.find(query) + .sort(sort) + .limit(pageSize + 1) + .select('-__v -user') + .lean(); + + const hasNextPage = sharedLinks.length > pageSize; + const links = sharedLinks.slice(0, pageSize); + + const nextCursor = hasNextPage + ? (links[links.length - 1][sortBy as keyof t.ISharedLink] as Date) + : undefined; + + return { + links: links.map((link) => ({ + shareId: link.shareId || '', + title: link?.title || 'Untitled', + isPublic: link.isPublic, + createdAt: link.createdAt || new Date(), + conversationId: link.conversationId, + })), + nextCursor, + hasNextPage, + }; + } catch (error) { + logger.error('[getSharedLinks] Error getting shares', { + error: error instanceof Error ? error.message : 'Unknown error', + user, + }); + throw new ShareServiceError('Error getting shares', 'SHARES_FETCH_ERROR'); + } + } + + /** + * Delete all shared links for a user + */ + async function deleteAllSharedLinks(user: string): Promise { + try { + const SharedLink = mongoose.models.SharedLink as Model; + const result = await SharedLink.deleteMany({ user }); + return { + message: 'All shared links deleted successfully', + deletedCount: result.deletedCount, + }; + } catch (error) { + logger.error('[deleteAllSharedLinks] Error deleting shared links', { + error: error instanceof Error ? error.message : 'Unknown error', + user, + }); + throw new ShareServiceError('Error deleting shared links', 'BULK_DELETE_ERROR'); + } + } + + /** + * Create a new shared link for a conversation + */ + async function createSharedLink( + user: string, + conversationId: string, + ): Promise { + if (!user || !conversationId) { + throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS'); + } + try { + const Message = mongoose.models.Message as SchemaWithMeiliMethods; + const SharedLink = mongoose.models.SharedLink as Model; + const Conversation = mongoose.models.Conversation as SchemaWithMeiliMethods; + + const [existingShare, conversationMessages] = await Promise.all([ + SharedLink.findOne({ conversationId, user, isPublic: true }) + .select('-_id -__v -user') + .lean() as Promise, + Message.find({ conversationId, user }).sort({ createdAt: 1 }).lean(), + ]); + + if (existingShare && existingShare.isPublic) { + logger.error('[createSharedLink] Share already exists', { + user, + conversationId, + }); + throw new ShareServiceError('Share already exists', 'SHARE_EXISTS'); + } else if (existingShare) { + await SharedLink.deleteOne({ conversationId, user }); + } + + const conversation = (await Conversation.findOne({ conversationId, user }).lean()) as { + title?: string; + } | null; + + // Check if user owns the conversation + if (!conversation) { + throw new ShareServiceError( + 'Conversation not found or access denied', + 'CONVERSATION_NOT_FOUND', + ); + } + + // Check if there are any messages to share + if (!conversationMessages || conversationMessages.length === 0) { + throw new ShareServiceError('No messages to share', 'NO_MESSAGES'); + } + + const title = conversation.title || 'Untitled'; + + const shareId = nanoid(); + await SharedLink.create({ + shareId, + conversationId, + messages: conversationMessages, + title, + user, + }); + + return { shareId, conversationId }; + } catch (error) { + if (error instanceof ShareServiceError) { + throw error; + } + logger.error('[createSharedLink] Error creating shared link', { + error: error instanceof Error ? error.message : 'Unknown error', + user, + conversationId, + }); + throw new ShareServiceError('Error creating shared link', 'SHARE_CREATE_ERROR'); + } + } + + /** + * Get a shared link for a conversation + */ + async function getSharedLink( + user: string, + conversationId: string, + ): Promise { + if (!user || !conversationId) { + throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS'); + } + + try { + const SharedLink = mongoose.models.SharedLink as Model; + const share = (await SharedLink.findOne({ conversationId, user, isPublic: true }) + .select('shareId -_id') + .lean()) as { shareId?: string } | null; + + if (!share) { + return { shareId: null, success: false }; + } + + return { shareId: share.shareId || null, success: true }; + } catch (error) { + logger.error('[getSharedLink] Error getting shared link', { + error: error instanceof Error ? error.message : 'Unknown error', + user, + conversationId, + }); + throw new ShareServiceError('Error getting shared link', 'SHARE_FETCH_ERROR'); + } + } + + /** + * Update a shared link with new messages + */ + async function updateSharedLink(user: string, shareId: string): Promise { + if (!user || !shareId) { + throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS'); + } + + try { + const SharedLink = mongoose.models.SharedLink as Model; + const Message = mongoose.models.Message as SchemaWithMeiliMethods; + const share = (await SharedLink.findOne({ shareId, user }) + .select('-_id -__v -user') + .lean()) as t.ISharedLink | null; + + if (!share) { + throw new ShareServiceError('Share not found', 'SHARE_NOT_FOUND'); + } + + const updatedMessages = await Message.find({ conversationId: share.conversationId, user }) + .sort({ createdAt: 1 }) + .lean(); + + const newShareId = nanoid(); + const update = { + messages: updatedMessages, + user, + shareId: newShareId, + }; + + const updatedShare = (await SharedLink.findOneAndUpdate({ shareId, user }, update, { + new: true, + upsert: false, + runValidators: true, + }).lean()) as t.ISharedLink | null; + + if (!updatedShare) { + throw new ShareServiceError('Share update failed', 'SHARE_UPDATE_ERROR'); + } + + anonymizeConvo(updatedShare); + + return { shareId: newShareId, conversationId: updatedShare.conversationId }; + } catch (error) { + logger.error('[updateSharedLink] Error updating shared link', { + error: error instanceof Error ? error.message : 'Unknown error', + user, + shareId, + }); + throw new ShareServiceError( + error instanceof ShareServiceError ? error.message : 'Error updating shared link', + error instanceof ShareServiceError ? error.code : 'SHARE_UPDATE_ERROR', + ); + } + } + + /** + * Delete a shared link + */ + async function deleteSharedLink( + user: string, + shareId: string, + ): Promise { + if (!user || !shareId) { + throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS'); + } + + try { + const SharedLink = mongoose.models.SharedLink as Model; + const result = await SharedLink.findOneAndDelete({ shareId, user }).lean(); + + if (!result) { + return null; + } + + return { + success: true, + shareId, + message: 'Share deleted successfully', + }; + } catch (error) { + logger.error('[deleteSharedLink] Error deleting shared link', { + error: error instanceof Error ? error.message : 'Unknown error', + user, + shareId, + }); + throw new ShareServiceError('Error deleting shared link', 'SHARE_DELETE_ERROR'); + } + } + + // Return all methods + return { + getSharedLink, + getSharedLinks, + createSharedLink, + updateSharedLink, + deleteSharedLink, + getSharedMessages, + deleteAllSharedLinks, + }; +} + +export type ShareMethods = ReturnType; diff --git a/packages/data-schemas/src/models/plugins/mongoMeili.ts b/packages/data-schemas/src/models/plugins/mongoMeili.ts index 77597cdf63..d44dfd806b 100644 --- a/packages/data-schemas/src/models/plugins/mongoMeili.ts +++ b/packages/data-schemas/src/models/plugins/mongoMeili.ts @@ -1,5 +1,6 @@ import _ from 'lodash'; -import { MeiliSearch, Index } from 'meilisearch'; +import { MeiliSearch } from 'meilisearch'; +import type { SearchResponse, Index } from 'meilisearch'; import type { CallbackWithoutResultAndOptionalError, FilterQuery, @@ -9,6 +10,7 @@ import type { Types, Model, } from 'mongoose'; +import type { IConversation, IMessage } from '~/types'; import logger from '~/config/meiliLogger'; interface MongoMeiliOptions { @@ -29,7 +31,7 @@ interface ContentItem { text?: string; } -interface DocumentWithMeiliIndex extends Document { +interface _DocumentWithMeiliIndex extends Document { _meiliIndex?: boolean; preprocessObjectForIndex?: () => Record; addObjectToMeili?: (next: CallbackWithoutResultAndOptionalError) => Promise; @@ -38,19 +40,18 @@ interface DocumentWithMeiliIndex extends Document { postSaveHook?: (next: CallbackWithoutResultAndOptionalError) => void; postUpdateHook?: (next: CallbackWithoutResultAndOptionalError) => void; postRemoveHook?: (next: CallbackWithoutResultAndOptionalError) => void; - conversationId?: string; - content?: ContentItem[]; - messageId?: string; - unfinished?: boolean; - messages?: unknown[]; - title?: string; - toJSON(): Record; } -interface SchemaWithMeiliMethods extends Model { +export type DocumentWithMeiliIndex = _DocumentWithMeiliIndex & IConversation & Partial; + +export interface SchemaWithMeiliMethods extends Model { syncWithMeili(): Promise; setMeiliIndexSettings(settings: Record): Promise; - meiliSearch(q: string, params: Record, populate: boolean): Promise; + meiliSearch( + q: string, + params?: Record, + populate?: boolean, + ): Promise>>; } // Environment flags @@ -247,7 +248,7 @@ const createMeiliMongooseModel = ({ q: string, params: Record, populate: boolean, - ): Promise { + ): Promise>> { const data = await index.search(q, params); if (populate) { diff --git a/packages/data-schemas/src/schema/promptGroup.ts b/packages/data-schemas/src/schema/promptGroup.ts index b13a83724f..ed5f88fe0e 100644 --- a/packages/data-schemas/src/schema/promptGroup.ts +++ b/packages/data-schemas/src/schema/promptGroup.ts @@ -63,11 +63,11 @@ const promptGroupSchema = new Schema( type: String, index: true, validate: { - validator: function (v: unknown): boolean { + validator: function (v: string | undefined | null): boolean { return v === undefined || v === null || v === '' || /^[a-z0-9-]+$/.test(v); }, - message: (props: unknown) => - `${props.value} is not a valid command. Only lowercase alphanumeric characters and hyphens are allowed.`, + message: (props: { value?: string } | undefined) => + `${props?.value ?? 'Value'} is not a valid command. Only lowercase alphanumeric characters and hyphens are allowed.`, }, maxlength: [ Constants.COMMANDS_MAX_LENGTH as number, diff --git a/packages/data-schemas/src/types/index.ts b/packages/data-schemas/src/types/index.ts index 7b7037c8ed..3dfe1334e5 100644 --- a/packages/data-schemas/src/types/index.ts +++ b/packages/data-schemas/src/types/index.ts @@ -13,5 +13,6 @@ export * from './role'; export * from './action'; export * from './assistant'; export * from './file'; +export * from './share'; /* Memories */ export * from './memory'; diff --git a/packages/data-schemas/src/types/share.ts b/packages/data-schemas/src/types/share.ts new file mode 100644 index 0000000000..3db1a360c6 --- /dev/null +++ b/packages/data-schemas/src/types/share.ts @@ -0,0 +1,66 @@ +import type { Types } from 'mongoose'; +import type { IMessage } from './message'; + +export interface ISharedLink { + _id?: Types.ObjectId; + conversationId: string; + title?: string; + user?: string; + messages?: Types.ObjectId[]; + shareId?: string; + isPublic: boolean; + createdAt?: Date; + updatedAt?: Date; +} + +export interface ShareServiceError extends Error { + code: string; +} + +export interface SharedLinksResult { + links: Array<{ + shareId: string; + title: string; + isPublic: boolean; + createdAt: Date; + conversationId: string; + }>; + nextCursor?: Date; + hasNextPage: boolean; +} + +export interface SharedMessagesResult { + conversationId: string; + messages: Array; + shareId: string; + title?: string; + isPublic: boolean; + createdAt?: Date; + updatedAt?: Date; +} + +export interface CreateShareResult { + shareId: string; + conversationId: string; +} + +export interface UpdateShareResult { + shareId: string; + conversationId: string; +} + +export interface DeleteShareResult { + success: boolean; + shareId: string; + message: string; +} + +export interface GetShareLinkResult { + shareId: string | null; + success: boolean; +} + +export interface DeleteAllSharesResult { + message: string; + deletedCount: number; +} From b412455e9d8e5c8cb500c1d96d735a6bfc2d2d50 Mon Sep 17 00:00:00 2001 From: Ruben Talstra Date: Sun, 15 Jun 2025 21:08:31 +0200 Subject: [PATCH 02/16] =?UTF-8?q?=F0=9F=9A=AE=20feat:=20Enhance=20"Delete?= =?UTF-8?q?=20User"=20Script=20(#7899)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🔧 fix: Enhance user deletion script to allow deep deletion of related data * 🔧 fix: Update user deletion script to confirm deep deletion of transaction history * 🔧 fix: Refactor user deletion script to use graceful exit and ensure deep deletion of related data * Update config/delete-user.js is a good idea Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- config/delete-user.js | 124 ++++++++++++++++++++++++++++++++---------- 1 file changed, 95 insertions(+), 29 deletions(-) diff --git a/config/delete-user.js b/config/delete-user.js index 36f34fb21b..51caa7ce3b 100644 --- a/config/delete-user.js +++ b/config/delete-user.js @@ -1,50 +1,116 @@ +#!/usr/bin/env node const path = require('path'); const mongoose = require(path.resolve(__dirname, '..', 'api', 'node_modules', 'mongoose')); -const { User } = require('@librechat/data-schemas').createModels(mongoose); +const { + User, + Agent, + Assistant, + Balance, + Transaction, + ConversationTag, + Conversation, + Message, + File, + Key, + MemoryEntry, + PluginAuth, + Prompt, + PromptGroup, + Preset, + Session, + SharedLink, + ToolCall, + Token, +} = require('@librechat/data-schemas').createModels(mongoose); require('module-alias')({ base: path.resolve(__dirname, '..', 'api') }); const { askQuestion, silentExit } = require('./helpers'); const connect = require('./connect'); +async function gracefulExit(code = 0) { + try { + await mongoose.disconnect(); + } catch (err) { + console.error('Error disconnecting from MongoDB:', err); + } + silentExit(code); +} + (async () => { await connect(); - /** - * Show the welcome / help menu - */ console.purple('---------------'); - console.purple('Deleting a user'); + console.purple('Deleting a user and all related data'); console.purple('---------------'); - let email = ''; - if (process.argv.length >= 3) { - email = process.argv[2]; - } else { - email = await askQuestion('Email:'); - } - let user = await User.findOne({ email: email }); - if (user !== null) { - if ((await askQuestion(`Delete user ${user}?`)) === 'y') { - user = await User.findOneAndDelete({ _id: user._id }); - if (user !== null) { - console.yellow(`Deleted user ${user}`); - } else { - console.yellow(`Couldn't delete user with email ${email}`); - } - } - } else { - console.yellow(`Didn't find user with email ${email}`); + // 1) Get email + let email = process.argv[2]?.trim(); + if (!email) { + email = (await askQuestion('Email:')).trim(); } - silentExit(0); -})(); + // 2) Find user + const user = await User.findOne({ email: email.toLowerCase() }); + if (!user) { + console.yellow(`No user found with email "${email}"`); + return gracefulExit(0); + } -process.on('uncaughtException', (err) => { + // 3) Confirm full deletion + const confirmAll = await askQuestion( + `Really delete user ${user.email} (${user._id}) and ALL their data? (y/N)`, + ); + if (confirmAll.toLowerCase() !== 'y') { + console.yellow('Aborted.'); + return gracefulExit(0); + } + + // 4) Ask specifically about transactions + const confirmTx = await askQuestion('Also delete all transaction history for this user? (y/N)'); + const deleteTx = confirmTx.toLowerCase() === 'y'; + + const uid = user._id.toString(); + + // 5) Build and run deletion tasks + const tasks = [ + Agent.deleteMany({ author: uid }), + Assistant.deleteMany({ user: uid }), + Balance.deleteMany({ user: uid }), + ConversationTag.deleteMany({ user: uid }), + Conversation.deleteMany({ user: uid }), + Message.deleteMany({ user: uid }), + File.deleteMany({ user: uid }), + Key.deleteMany({ userId: uid }), + MemoryEntry.deleteMany({ userId: uid }), + PluginAuth.deleteMany({ userId: uid }), + Prompt.deleteMany({ author: uid }), + PromptGroup.deleteMany({ author: uid }), + Preset.deleteMany({ user: uid }), + Session.deleteMany({ user: uid }), + SharedLink.deleteMany({ user: uid }), + ToolCall.deleteMany({ user: uid }), + Token.deleteMany({ userId: uid }), + ]; + + if (deleteTx) { + tasks.push(Transaction.deleteMany({ user: uid })); + } + + await Promise.all(tasks); + + // 6) Finally delete the user document itself + await User.deleteOne({ _id: uid }); + + console.green(`✔ Successfully deleted user ${email} and all associated data.`); + if (!deleteTx) { + console.yellow('⚠️ Transaction history was retained.'); + } + + return gracefulExit(0); +})().catch(async (err) => { if (!err.message.includes('fetch failed')) { console.error('There was an uncaught error:'); console.error(err); - } - - if (!err.message.includes('fetch failed')) { + await mongoose.disconnect(); process.exit(1); } }); From ec7370dfe9a9e6a739f9de36de635e7e2d0433bf Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Tue, 17 Jun 2025 13:50:33 -0400 Subject: [PATCH 03/16] =?UTF-8?q?=F0=9F=AA=90=20feat:=20MCP=20OAuth=202.0?= =?UTF-8?q?=20Discovery=20Support=20(#7924)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update @modelcontextprotocol/sdk to version 1.12.3 in package.json and package-lock.json - Bump version of @modelcontextprotocol/sdk to 1.12.3 to incorporate recent updates. - Update dependencies for ajv and cross-spawn to their latest versions. - Add ajv as a new dependency in the sdk module. - Include json-schema-traverse as a new dependency in the sdk module. * feat: @librechat/auth * feat: Add crypto module exports to auth package - Introduced a new crypto module by creating index.ts in the crypto directory. - Updated the main index.ts of the auth package to export from the new crypto module. * feat: Update package dependencies and build scripts for auth package - Added @librechat/auth as a dependency in package.json and package-lock.json. - Updated build scripts to include the auth package in both frontend and bun build processes. - Removed unused mongoose and openid-client dependencies from package-lock.json for cleaner dependency management. * refactor: Migrate crypto utility functions to @librechat/auth - Replaced local crypto utility imports with the new @librechat/auth package across multiple files. - Removed the obsolete crypto.js file and its exports. - Updated relevant services and models to utilize the new encryption and decryption methods from @librechat/auth. * feat: Enhance OAuth token handling and update dependencies in auth package * chore: Remove Token model and TokenService due to restructuring of OAuth handling - Deleted the Token.js model and TokenService.js, which were responsible for managing OAuth tokens. - This change is part of a broader refactor to streamline OAuth token management and improve code organization. * refactor: imports from '@librechat/auth' to '@librechat/api' and add OAuth token handling functionality * refactor: Simplify logger usage in MCP and FlowStateManager classes * chore: fix imports * feat: Add OAuth configuration schema to MCP with token exchange method support * feat: FIRST PASS Implement MCP OAuth flow with token management and error handling - Added a new route for handling OAuth callbacks and token retrieval. - Integrated OAuth token storage and retrieval mechanisms. - Enhanced MCP connection to support automatic OAuth flow initiation on 401 errors. - Implemented dynamic client registration and metadata discovery for OAuth. - Updated MCPManager to manage OAuth tokens and handle authentication requirements. - Introduced comprehensive logging for OAuth processes and error handling. * refactor: Update MCPConnection and MCPManager to utilize new URL handling - Added a `url` property to MCPConnection for better URL management. - Refactored MCPManager to use the new `url` property instead of a deprecated method for OAuth handling. - Changed logging from info to debug level for flow manager and token methods initialization. - Improved comments for clarity on existing tokens and OAuth event listener setup. * refactor: Improve connection timeout error messages in MCPConnection and MCPManager and use initTimeout for connection - Updated the connection timeout error messages to include the duration of the timeout. - Introduced a configurable `connectTimeout` variable in both MCPConnection and MCPManager for better flexibility. * chore: cleanup MCP OAuth Token exchange handling; fix: erroneous use of flowsCache and remove verbose logs * refactor: Update MCPManager and MCPTokenStorage to use TokenMethods for token management - Removed direct token storage handling in MCPManager and replaced it with TokenMethods for better abstraction. - Refactored MCPTokenStorage methods to accept parameters for token operations, enhancing flexibility and readability. - Improved logging messages related to token persistence and retrieval processes. * refactor: Update MCP OAuth handling to use static methods and improve flow management - Refactored MCPOAuthHandler to utilize static methods for initiating and completing OAuth flows, enhancing clarity and reducing instance dependencies. - Updated MCPManager to pass flowManager explicitly to OAuth handling methods, improving flexibility in flow state management. - Enhanced comments and logging for better understanding of OAuth processes and flow state retrieval. * refactor: Integrate token methods into createMCPTool for enhanced token management * refactor: Change logging from info to debug level in MCPOAuthHandler for improved log management * chore: clean up logging * feat: first pass, auth URL from MCP OAuth flow * chore: Improve logging format for OAuth authentication URL display * chore: cleanup mcp manager comments * feat: add connection reconnection logic in MCPManager * refactor: reorganize token storage handling in MCP - Moved token storage logic from MCPManager to a new MCPTokenStorage class for better separation of concerns. - Updated imports to reflect the new token storage structure. - Enhanced methods for storing, retrieving, updating, and deleting OAuth tokens, improving overall token management. * chore: update comment for SYSTEM_USER_ID in MCPManager for clarity * feat: implement refresh token functionality in MCP - Added refresh token handling in MCPManager to support token renewal for both app-level and user-specific connections. - Introduced a refreshTokens function to facilitate token refresh logic. - Enhanced MCPTokenStorage to manage client information and refresh token processes. - Updated logging for better traceability during token operations. * chore: cleanup @librechat/auth * feat: implement MCP server initialization in a separate service - Added a new service to handle the initialization of MCP servers, improving code organization and readability. - Refactored the server startup logic to utilize the new initializeMCP function. - Removed redundant MCP initialization code from the main server file. * fix: don't log auth url for user connections * feat: enhance OAuth flow with success and error handling components - Updated OAuth callback routes to redirect to new success and error pages instead of sending status messages. - Introduced `OAuthSuccess` and `OAuthError` components to provide user feedback during authentication. - Added localization support for success and error messages in the translation files. - Implemented countdown functionality in the success component for a better user experience. * fix: refresh token handling for user connections, add missing URL and methods - add standard enum for system user id and helper for determining app-lvel vs. user-level connections * refactor: update token handling in MCPManager and MCPTokenStorage * fix: improve error logging in OAuth authentication handler * fix: concurrency issues for both login url emission and concurrency of oauth flows for shared flows (same user, same server, multiple calls for same server) * fix: properly fail shared flows for concurrent server calls and prevent duplication of tokens * chore: remove unused auth package directory from update configuration * ci: fix mocks in samlStrategy tests * ci: add mcpConfig to AppService test setup * chore: remove obsolete MCP OAuth implementation documentation * fix: update build script for API to use correct command * chore: bump version of @librechat/api to 1.2.4 * fix: update abort signal handling in createMCPTool function * fix: add optional clientInfo parameter to refreshTokensFunction metadata * refactor: replace app.locals.availableTools with getCachedTools in multiple services and controllers for improved tool management * fix: concurrent refresh token handling issue * refactor: add signal parameter to getUserConnection method for improved abort handling * chore: JSDoc typing for `loadEphemeralAgent` * refactor: update isConnectionActive method to use destructured parameters for improved readability * feat: implement caching for MCP tools to handle app-level disconnects for loading list of tools * ci: fix agent test --- api/app/clients/tools/util/handleTools.js | 6 +- api/cache/getLogStores.js | 5 + api/config/index.js | 3 +- api/models/Agent.js | 9 +- api/models/Agent.spec.js | 67 +- api/models/Token.js | 42 -- api/models/inviteUser.js | 2 +- api/server/controllers/PluginController.js | 57 +- api/server/controllers/TwoFactorController.js | 2 +- api/server/controllers/agents/v1.js | 13 +- api/server/controllers/assistants/v1.js | 30 +- api/server/controllers/assistants/v2.js | 20 +- api/server/index.js | 19 +- api/server/routes/actions.js | 118 +-- api/server/routes/index.js | 2 + api/server/routes/mcp.js | 205 ++++++ api/server/routes/oauth.js | 4 +- api/server/services/ActionService.js | 37 +- api/server/services/AppService.js | 14 +- api/server/services/AppService.spec.js | 67 +- api/server/services/AuthService.js | 5 +- api/server/services/Config/getCachedTools.js | 258 +++++++ api/server/services/Config/index.js | 2 + api/server/services/MCP.js | 152 +++- api/server/services/PluginService.js | 4 +- api/server/services/TokenService.js | 195 ----- api/server/services/ToolService.js | 9 +- api/server/services/UserService.js | 3 +- api/server/services/initializeMCP.js | 54 ++ api/server/services/twoFactorService.js | 2 +- api/server/utils/index.js | 2 - api/strategies/openidStrategy.spec.js | 11 +- api/strategies/samlStrategy.spec.js | 42 +- api/typedefs.js | 7 + client/src/components/OAuth/OAuthError.tsx | 72 ++ client/src/components/OAuth/OAuthSuccess.tsx | 47 ++ client/src/components/OAuth/index.ts | 2 + client/src/locales/en/translation.json | 15 +- client/src/routes/index.tsx | 21 +- package-lock.json | 87 +-- package.json | 1 + packages/api/package.json | 4 +- packages/api/rollup.config.js | 3 +- .../api/src/crypto/encryption.ts | 45 +- packages/api/src/crypto/index.ts | 1 + packages/api/src/flow/manager.spec.ts | 4 +- packages/api/src/flow/manager.ts | 43 +- packages/api/src/index.ts | 5 + packages/api/src/mcp/connection.ts | 222 ++++-- packages/api/src/mcp/enum.ts | 6 + packages/api/src/mcp/manager.ts | 676 +++++++++++++++--- packages/api/src/mcp/oauth/handler.ts | 603 ++++++++++++++++ packages/api/src/mcp/oauth/index.ts | 3 + packages/api/src/mcp/oauth/tokens.ts | 382 ++++++++++ packages/api/src/mcp/oauth/types.ts | 98 +++ packages/api/src/oauth/index.ts | 1 + packages/api/src/oauth/tokens.ts | 324 +++++++++ packages/data-provider/src/config.ts | 4 + packages/data-provider/src/mcp.ts | 24 + packages/data-schemas/src/methods/index.ts | 2 +- 60 files changed, 3399 insertions(+), 764 deletions(-) delete mode 100644 api/models/Token.js create mode 100644 api/server/routes/mcp.js create mode 100644 api/server/services/Config/getCachedTools.js delete mode 100644 api/server/services/TokenService.js create mode 100644 api/server/services/initializeMCP.js create mode 100644 client/src/components/OAuth/OAuthError.tsx create mode 100644 client/src/components/OAuth/OAuthSuccess.tsx create mode 100644 client/src/components/OAuth/index.ts rename api/server/utils/crypto.js => packages/api/src/crypto/encryption.ts (81%) create mode 100644 packages/api/src/crypto/index.ts create mode 100644 packages/api/src/mcp/oauth/handler.ts create mode 100644 packages/api/src/mcp/oauth/index.ts create mode 100644 packages/api/src/mcp/oauth/tokens.ts create mode 100644 packages/api/src/mcp/oauth/types.ts create mode 100644 packages/api/src/oauth/index.ts create mode 100644 packages/api/src/oauth/tokens.ts diff --git a/api/app/clients/tools/util/handleTools.js b/api/app/clients/tools/util/handleTools.js index 51f0c87ef9..b5a40fc4a3 100644 --- a/api/app/clients/tools/util/handleTools.js +++ b/api/app/clients/tools/util/handleTools.js @@ -1,3 +1,4 @@ +const { logger } = require('@librechat/data-schemas'); const { SerpAPI } = require('@langchain/community/tools/serpapi'); const { Calculator } = require('@langchain/community/tools/calculator'); const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents'); @@ -29,8 +30,8 @@ const { const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process'); const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch'); const { loadAuthValues } = require('~/server/services/Tools/credentials'); +const { getCachedTools } = require('~/server/services/Config'); const { createMCPTool } = require('~/server/services/MCP'); -const { logger } = require('~/config'); const mcpToolPattern = new RegExp(`^.+${Constants.mcp_delimiter}.+$`); @@ -236,7 +237,7 @@ const loadTools = async ({ /** @type {Record} */ const toolContextMap = {}; - const appTools = options.req?.app?.locals?.availableTools ?? {}; + const appTools = (await getCachedTools({ includeGlobal: true })) ?? {}; for (const tool of tools) { if (tool === Tools.execute_code) { @@ -299,6 +300,7 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })} requestedTools[tool] = async () => createMCPTool({ req: options.req, + res: options.res, toolKey: tool, model: agent?.model ?? model, provider: agent?.provider ?? endpoint, diff --git a/api/cache/getLogStores.js b/api/cache/getLogStores.js index 2478bf40d9..06cadf9f64 100644 --- a/api/cache/getLogStores.js +++ b/api/cache/getLogStores.js @@ -29,6 +29,10 @@ const roles = isRedisEnabled ? new Keyv({ store: keyvRedis }) : new Keyv({ namespace: CacheKeys.ROLES }); +const mcpTools = isRedisEnabled + ? new Keyv({ store: keyvRedis }) + : new Keyv({ namespace: CacheKeys.MCP_TOOLS }); + const audioRuns = isRedisEnabled ? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES }) : new Keyv({ namespace: CacheKeys.AUDIO_RUNS, ttl: Time.TEN_MINUTES }); @@ -67,6 +71,7 @@ const openIdExchangedTokensCache = isRedisEnabled const namespaces = { [CacheKeys.ROLES]: roles, + [CacheKeys.MCP_TOOLS]: mcpTools, [CacheKeys.CONFIG_STORE]: config, [CacheKeys.PENDING_REQ]: pending_req, [ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }), diff --git a/api/config/index.js b/api/config/index.js index a02c75887e..2e69e87118 100644 --- a/api/config/index.js +++ b/api/config/index.js @@ -15,7 +15,7 @@ let flowManager = null; */ function getMCPManager(userId) { if (!mcpManager) { - mcpManager = MCPManager.getInstance(logger); + mcpManager = MCPManager.getInstance(); } else { mcpManager.checkIdleConnections(userId); } @@ -30,7 +30,6 @@ function getFlowStateManager(flowsCache) { if (!flowManager) { flowManager = new FlowStateManager(flowsCache, { ttl: Time.ONE_MINUTE * 3, - logger, }); } return flowManager; diff --git a/api/models/Agent.js b/api/models/Agent.js index 297604c444..d33ca8a8bf 100644 --- a/api/models/Agent.js +++ b/api/models/Agent.js @@ -11,6 +11,7 @@ const { removeAgentIdsFromProject, removeAgentFromAllProjects, } = require('./Project'); +const { getCachedTools } = require('~/server/services/Config'); const getLogStores = require('~/cache/getLogStores'); const { getActions } = require('./Action'); const { Agent } = require('~/db/models'); @@ -55,12 +56,12 @@ const getAgent = async (searchParameter) => await Agent.findOne(searchParameter) * @param {string} params.agent_id * @param {string} params.endpoint * @param {import('@librechat/agents').ClientOptions} [params.model_parameters] - * @returns {Agent|null} The agent document as a plain object, or null if not found. + * @returns {Promise} The agent document as a plain object, or null if not found. */ -const loadEphemeralAgent = ({ req, agent_id, endpoint, model_parameters: _m }) => { +const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _m }) => { const { model, ...model_parameters } = _m; /** @type {Record} */ - const availableTools = req.app.locals.availableTools; + const availableTools = await getCachedTools({ includeGlobal: true }); /** @type {TEphemeralAgent | null} */ const ephemeralAgent = req.body.ephemeralAgent; const mcpServers = new Set(ephemeralAgent?.mcp); @@ -111,7 +112,7 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => { return null; } if (agent_id === EPHEMERAL_AGENT_ID) { - return loadEphemeralAgent({ req, agent_id, endpoint, model_parameters }); + return await loadEphemeralAgent({ req, agent_id, endpoint, model_parameters }); } const agent = await getAgent({ id: agent_id, diff --git a/api/models/Agent.spec.js b/api/models/Agent.spec.js index 1e18168147..0b0646f524 100644 --- a/api/models/Agent.spec.js +++ b/api/models/Agent.spec.js @@ -6,6 +6,10 @@ const originalEnv = { process.env.CREDS_KEY = '0123456789abcdef0123456789abcdef'; process.env.CREDS_IV = '0123456789abcdef'; +jest.mock('~/server/services/Config', () => ({ + getCachedTools: jest.fn(), +})); + const mongoose = require('mongoose'); const { v4: uuidv4 } = require('uuid'); const { agentSchema } = require('@librechat/data-schemas'); @@ -23,6 +27,7 @@ const { generateActionMetadataHash, revertAgentVersion, } = require('./Agent'); +const { getCachedTools } = require('~/server/services/Config'); /** * @type {import('mongoose').Model} @@ -406,6 +411,7 @@ describe('models/Agent', () => { beforeAll(async () => { mongoServer = await MongoMemoryServer.create(); const mongoUri = mongoServer.getUri(); + Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema); await mongoose.connect(mongoUri); }); @@ -1546,6 +1552,12 @@ describe('models/Agent', () => { test('should test ephemeral agent loading logic', async () => { const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants; + getCachedTools.mockResolvedValue({ + tool1_mcp_server1: {}, + tool2_mcp_server2: {}, + another_tool: {}, + }); + const mockReq = { user: { id: 'user123' }, body: { @@ -1556,15 +1568,6 @@ describe('models/Agent', () => { mcp: ['server1', 'server2'], }, }, - app: { - locals: { - availableTools: { - tool1_mcp_server1: {}, - tool2_mcp_server2: {}, - another_tool: {}, - }, - }, - }, }; const result = await loadAgent({ @@ -1657,6 +1660,8 @@ describe('models/Agent', () => { test('should handle ephemeral agent with no MCP servers', async () => { const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants; + getCachedTools.mockResolvedValue({}); + const mockReq = { user: { id: 'user123' }, body: { @@ -1667,11 +1672,6 @@ describe('models/Agent', () => { mcp: [], }, }, - app: { - locals: { - availableTools: {}, - }, - }, }; const result = await loadAgent({ @@ -1692,16 +1692,13 @@ describe('models/Agent', () => { test('should handle ephemeral agent with undefined ephemeralAgent in body', async () => { const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants; + getCachedTools.mockResolvedValue({}); + const mockReq = { user: { id: 'user123' }, body: { promptPrefix: 'Basic instructions', }, - app: { - locals: { - availableTools: {}, - }, - }, }; const result = await loadAgent({ @@ -1734,6 +1731,13 @@ describe('models/Agent', () => { const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants; const largeToolList = Array.from({ length: 100 }, (_, i) => `tool_${i}_mcp_server1`); + const availableTools = largeToolList.reduce((acc, tool) => { + acc[tool] = {}; + return acc; + }, {}); + + getCachedTools.mockResolvedValue(availableTools); + const mockReq = { user: { id: 'user123' }, body: { @@ -1744,14 +1748,6 @@ describe('models/Agent', () => { mcp: ['server1'], }, }, - app: { - locals: { - availableTools: largeToolList.reduce((acc, tool) => { - acc[tool] = {}; - return acc; - }, {}), - }, - }, }; const result = await loadAgent({ @@ -2272,6 +2268,13 @@ describe('models/Agent', () => { test('should handle loadEphemeralAgent with malformed MCP tool names', async () => { const { EPHEMERAL_AGENT_ID } = require('librechat-data-provider').Constants; + getCachedTools.mockResolvedValue({ + malformed_tool_name: {}, // No mcp delimiter + tool__server1: {}, // Wrong delimiter + tool_mcp_server1: {}, // Correct format + tool_mcp_server2: {}, // Different server + }); + const mockReq = { user: { id: 'user123' }, body: { @@ -2282,16 +2285,6 @@ describe('models/Agent', () => { mcp: ['server1'], }, }, - app: { - locals: { - availableTools: { - malformed_tool_name: {}, // No mcp delimiter - tool__server1: {}, // Wrong delimiter - tool_mcp_server1: {}, // Correct format - tool_mcp_server2: {}, // Different server - }, - }, - }, }; const result = await loadAgent({ diff --git a/api/models/Token.js b/api/models/Token.js deleted file mode 100644 index 6f130eb2c4..0000000000 --- a/api/models/Token.js +++ /dev/null @@ -1,42 +0,0 @@ -const { findToken, updateToken, createToken } = require('~/models'); -const { encryptV2 } = require('~/server/utils/crypto'); - -/** - * Handles the OAuth token by creating or updating the token. - * @param {object} fields - * @param {string} fields.userId - The user's ID. - * @param {string} fields.token - The full token to store. - * @param {string} fields.identifier - Unique, alternative identifier for the token. - * @param {number} fields.expiresIn - The number of seconds until the token expires. - * @param {object} fields.metadata - Additional metadata to store with the token. - * @param {string} [fields.type="oauth"] - The type of token. Default is 'oauth'. - */ -async function handleOAuthToken({ - token, - userId, - identifier, - expiresIn, - metadata, - type = 'oauth', -}) { - const encrypedToken = await encryptV2(token); - const tokenData = { - type, - userId, - metadata, - identifier, - token: encrypedToken, - expiresIn: parseInt(expiresIn, 10) || 3600, - }; - - const existingToken = await findToken({ userId, identifier }); - if (existingToken) { - return await updateToken({ identifier }, tokenData); - } else { - return await createToken(tokenData); - } -} - -module.exports = { - handleOAuthToken, -}; diff --git a/api/models/inviteUser.js b/api/models/inviteUser.js index 9f35b3f02b..eeb42841bf 100644 --- a/api/models/inviteUser.js +++ b/api/models/inviteUser.js @@ -1,6 +1,6 @@ const mongoose = require('mongoose'); +const { getRandomValues } = require('@librechat/api'); const { logger, hashToken } = require('@librechat/data-schemas'); -const { getRandomValues } = require('~/server/utils/crypto'); const { createToken, findToken } = require('~/models'); /** diff --git a/api/server/controllers/PluginController.js b/api/server/controllers/PluginController.js index 674e36002a..98e9cbfc45 100644 --- a/api/server/controllers/PluginController.js +++ b/api/server/controllers/PluginController.js @@ -1,8 +1,9 @@ +const { logger } = require('@librechat/data-schemas'); const { CacheKeys, AuthType } = require('librechat-data-provider'); +const { getCustomConfig, getCachedTools } = require('~/server/services/Config'); const { getToolkitKey } = require('~/server/services/ToolService'); -const { getCustomConfig } = require('~/server/services/Config'); +const { getMCPManager, getFlowStateManager } = require('~/config'); const { availableTools } = require('~/app/clients/tools'); -const { getMCPManager } = require('~/config'); const { getLogStores } = require('~/cache'); /** @@ -84,6 +85,45 @@ const getAvailablePluginsController = async (req, res) => { } }; +function createServerToolsCallback() { + /** + * @param {string} serverName + * @param {TPlugin[] | null} serverTools + */ + return async function (serverName, serverTools) { + try { + const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS); + if (!serverName || !mcpToolsCache) { + return; + } + await mcpToolsCache.set(serverName, serverTools); + logger.debug(`MCP tools for ${serverName} added to cache.`); + } catch (error) { + logger.error('Error retrieving MCP tools from cache:', error); + } + }; +} + +function createGetServerTools() { + /** + * Retrieves cached server tools + * @param {string} serverName + * @returns {Promise} + */ + return async function (serverName) { + try { + const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS); + if (!mcpToolsCache) { + return null; + } + return await mcpToolsCache.get(serverName); + } catch (error) { + logger.error('Error retrieving MCP tools from cache:', error); + return null; + } + }; +} + /** * Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file. * @@ -109,7 +149,16 @@ const getAvailableTools = async (req, res) => { const customConfig = await getCustomConfig(); if (customConfig?.mcpServers != null) { const mcpManager = getMCPManager(); - pluginManifest = await mcpManager.loadManifestTools(pluginManifest); + const flowsCache = getLogStores(CacheKeys.FLOWS); + const flowManager = flowsCache ? getFlowStateManager(flowsCache) : null; + const serverToolsCallback = createServerToolsCallback(); + const getServerTools = createGetServerTools(); + const mcpTools = await mcpManager.loadManifestTools({ + flowManager, + serverToolsCallback, + getServerTools, + }); + pluginManifest = [...mcpTools, ...pluginManifest]; } /** @type {TPlugin[]} */ @@ -123,7 +172,7 @@ const getAvailableTools = async (req, res) => { } }); - const toolDefinitions = req.app.locals.availableTools; + const toolDefinitions = await getCachedTools({ includeGlobal: true }); const tools = authenticatedPlugins.filter( (plugin) => toolDefinitions[plugin.pluginKey] !== undefined || diff --git a/api/server/controllers/TwoFactorController.js b/api/server/controllers/TwoFactorController.js index 6e22db2e5c..44baf92ee7 100644 --- a/api/server/controllers/TwoFactorController.js +++ b/api/server/controllers/TwoFactorController.js @@ -1,3 +1,4 @@ +const { encryptV3 } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); const { verifyTOTP, @@ -7,7 +8,6 @@ const { generateBackupCodes, } = require('~/server/services/twoFactorService'); const { getUserById, updateUser } = require('~/models'); -const { encryptV3 } = require('~/server/utils/crypto'); const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, ''); diff --git a/api/server/controllers/agents/v1.js b/api/server/controllers/agents/v1.js index 38a058b540..18bd7190f0 100644 --- a/api/server/controllers/agents/v1.js +++ b/api/server/controllers/agents/v1.js @@ -1,9 +1,9 @@ const fs = require('fs').promises; const { nanoid } = require('nanoid'); +const { logger } = require('@librechat/data-schemas'); const { Tools, Constants, - FileContext, FileSources, SystemRoles, EToolResources, @@ -16,16 +16,16 @@ const { deleteAgent, getListAgents, } = require('~/models/Agent'); -const { uploadImageBuffer, filterFile } = require('~/server/services/Files/process'); const { getStrategyFunctions } = require('~/server/services/Files/strategies'); const { resizeAvatar } = require('~/server/services/Files/images/avatar'); const { refreshS3Url } = require('~/server/services/Files/S3/crud'); +const { filterFile } = require('~/server/services/Files/process'); const { updateAction, getActions } = require('~/models/Action'); +const { getCachedTools } = require('~/server/services/Config'); const { updateAgentProjects } = require('~/models/Agent'); const { getProjectByName } = require('~/models/Project'); -const { deleteFileByFilter } = require('~/models/File'); const { revertAgentVersion } = require('~/models/Agent'); -const { logger } = require('~/config'); +const { deleteFileByFilter } = require('~/models/File'); const systemTools = { [Tools.execute_code]: true, @@ -47,8 +47,9 @@ const createAgentHandler = async (req, res) => { agentData.tools = []; + const availableTools = await getCachedTools({ includeGlobal: true }); for (const tool of tools) { - if (req.app.locals.availableTools[tool]) { + if (availableTools[tool]) { agentData.tools.push(tool); } @@ -445,7 +446,7 @@ const uploadAgentAvatarHandler = async (req, res) => { try { await fs.unlink(req.file.path); logger.debug('[/:agent_id/avatar] Temp. image upload file deleted'); - } catch (error) { + } catch { logger.debug('[/:agent_id/avatar] Temp. image upload file already deleted'); } } diff --git a/api/server/controllers/assistants/v1.js b/api/server/controllers/assistants/v1.js index 8fb73167c1..e723cda4fc 100644 --- a/api/server/controllers/assistants/v1.js +++ b/api/server/controllers/assistants/v1.js @@ -1,4 +1,5 @@ const fs = require('fs').promises; +const { logger } = require('@librechat/data-schemas'); const { FileContext } = require('librechat-data-provider'); const { uploadImageBuffer, filterFile } = require('~/server/services/Files/process'); const validateAuthor = require('~/server/middleware/assistants/validateAuthor'); @@ -6,9 +7,9 @@ const { getStrategyFunctions } = require('~/server/services/Files/strategies'); const { deleteAssistantActions } = require('~/server/services/ActionService'); const { updateAssistantDoc, getAssistants } = require('~/models/Assistant'); const { getOpenAIClient, fetchAssistants } = require('./helpers'); +const { getCachedTools } = require('~/server/services/Config'); const { manifestToolMap } = require('~/app/clients/tools'); const { deleteFileByFilter } = require('~/models/File'); -const { logger } = require('~/config'); /** * Create an assistant. @@ -30,21 +31,20 @@ const createAssistant = async (req, res) => { delete assistantData.conversation_starters; delete assistantData.append_current_datetime; + const toolDefinitions = await getCachedTools({ includeGlobal: true }); + assistantData.tools = tools .map((tool) => { if (typeof tool !== 'string') { return tool; } - const toolDefinitions = req.app.locals.availableTools; const toolDef = toolDefinitions[tool]; if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) { - return ( - Object.entries(toolDefinitions) - .filter(([key]) => key.startsWith(`${tool}_`)) - // eslint-disable-next-line no-unused-vars - .map(([_, val]) => val) - ); + return Object.entries(toolDefinitions) + .filter(([key]) => key.startsWith(`${tool}_`)) + + .map(([_, val]) => val); } return toolDef; @@ -135,21 +135,21 @@ const patchAssistant = async (req, res) => { append_current_datetime, ...updateData } = req.body; + + const toolDefinitions = await getCachedTools({ includeGlobal: true }); + updateData.tools = (updateData.tools ?? []) .map((tool) => { if (typeof tool !== 'string') { return tool; } - const toolDefinitions = req.app.locals.availableTools; const toolDef = toolDefinitions[tool]; if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) { - return ( - Object.entries(toolDefinitions) - .filter(([key]) => key.startsWith(`${tool}_`)) - // eslint-disable-next-line no-unused-vars - .map(([_, val]) => val) - ); + return Object.entries(toolDefinitions) + .filter(([key]) => key.startsWith(`${tool}_`)) + + .map(([_, val]) => val); } return toolDef; diff --git a/api/server/controllers/assistants/v2.js b/api/server/controllers/assistants/v2.js index 3bf83a626f..98441ba70a 100644 --- a/api/server/controllers/assistants/v2.js +++ b/api/server/controllers/assistants/v2.js @@ -1,10 +1,11 @@ +const { logger } = require('@librechat/data-schemas'); const { ToolCallTypes } = require('librechat-data-provider'); const validateAuthor = require('~/server/middleware/assistants/validateAuthor'); const { validateAndUpdateTool } = require('~/server/services/ActionService'); +const { getCachedTools } = require('~/server/services/Config'); const { updateAssistantDoc } = require('~/models/Assistant'); const { manifestToolMap } = require('~/app/clients/tools'); const { getOpenAIClient } = require('./helpers'); -const { logger } = require('~/config'); /** * Create an assistant. @@ -27,21 +28,20 @@ const createAssistant = async (req, res) => { delete assistantData.conversation_starters; delete assistantData.append_current_datetime; + const toolDefinitions = await getCachedTools({ includeGlobal: true }); + assistantData.tools = tools .map((tool) => { if (typeof tool !== 'string') { return tool; } - const toolDefinitions = req.app.locals.availableTools; const toolDef = toolDefinitions[tool]; if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) { - return ( - Object.entries(toolDefinitions) - .filter(([key]) => key.startsWith(`${tool}_`)) - // eslint-disable-next-line no-unused-vars - .map(([_, val]) => val) - ); + return Object.entries(toolDefinitions) + .filter(([key]) => key.startsWith(`${tool}_`)) + + .map(([_, val]) => val); } return toolDef; @@ -125,13 +125,13 @@ const updateAssistant = async ({ req, openai, assistant_id, updateData }) => { let hasFileSearch = false; for (const tool of updateData.tools ?? []) { - const toolDefinitions = req.app.locals.availableTools; + const toolDefinitions = await getCachedTools({ includeGlobal: true }); let actualTool = typeof tool === 'string' ? toolDefinitions[tool] : tool; if (!actualTool && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) { actualTool = Object.entries(toolDefinitions) .filter(([key]) => key.startsWith(`${tool}_`)) - // eslint-disable-next-line no-unused-vars + .map(([_, val]) => val); } else if (!actualTool) { continue; diff --git a/api/server/index.js b/api/server/index.js index a04c339b0f..b1132873c7 100644 --- a/api/server/index.js +++ b/api/server/index.js @@ -1,22 +1,22 @@ require('dotenv').config(); +const fs = require('fs'); const path = require('path'); require('module-alias')({ base: path.resolve(__dirname, '..') }); const cors = require('cors'); const axios = require('axios'); const express = require('express'); -const compression = require('compression'); const passport = require('passport'); -const mongoSanitize = require('express-mongo-sanitize'); -const fs = require('fs'); +const compression = require('compression'); const cookieParser = require('cookie-parser'); +const { isEnabled } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); +const mongoSanitize = require('express-mongo-sanitize'); const { connectDb, indexSync } = require('~/db'); -const { jwtLogin, passportLogin } = require('~/strategies'); -const { isEnabled } = require('~/server/utils'); -const { ldapLogin } = require('~/strategies'); -const { logger } = require('~/config'); const validateImageRequest = require('./middleware/validateImageRequest'); +const { jwtLogin, ldapLogin, passportLogin } = require('~/strategies'); const errorController = require('./controllers/ErrorController'); +const initializeMCP = require('./services/initializeMCP'); const configureSocialLogins = require('./socialLogins'); const AppService = require('./services/AppService'); const staticCache = require('./utils/staticCache'); @@ -119,6 +119,7 @@ const startServer = async () => { app.use('/api/bedrock', routes.bedrock); app.use('/api/memories', routes.memories); app.use('/api/tags', routes.tags); + app.use('/api/mcp', routes.mcp); app.use((req, res) => { res.set({ @@ -142,6 +143,8 @@ const startServer = async () => { } else { logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`); } + + initializeMCP(app); }); }; @@ -184,5 +187,5 @@ process.on('uncaughtException', (err) => { process.exit(1); }); -// export app for easier testing purposes +/** Export app for easier testing purposes */ module.exports = app; diff --git a/api/server/routes/actions.js b/api/server/routes/actions.js index 242e52e4ae..9f94f617ce 100644 --- a/api/server/routes/actions.js +++ b/api/server/routes/actions.js @@ -1,8 +1,10 @@ const express = require('express'); const jwt = require('jsonwebtoken'); +const { getAccessToken } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { CacheKeys } = require('librechat-data-provider'); -const { getAccessToken } = require('~/server/services/TokenService'); -const { logger, getFlowStateManager } = require('~/config'); +const { findToken, updateToken, createToken } = require('~/models'); +const { getFlowStateManager } = require('~/config'); const { getLogStores } = require('~/cache'); const router = express.Router(); @@ -28,18 +30,19 @@ router.get('/:action_id/oauth/callback', async (req, res) => { try { decodedState = jwt.verify(state, JWT_SECRET); } catch (err) { + logger.error('Error verifying state parameter:', err); await flowManager.failFlow(identifier, 'oauth', 'Invalid or expired state parameter'); - return res.status(400).send('Invalid or expired state parameter'); + return res.redirect('/oauth/error?error=invalid_state'); } if (decodedState.action_id !== action_id) { await flowManager.failFlow(identifier, 'oauth', 'Mismatched action ID in state parameter'); - return res.status(400).send('Mismatched action ID in state parameter'); + return res.redirect('/oauth/error?error=invalid_state'); } if (!decodedState.user) { await flowManager.failFlow(identifier, 'oauth', 'Invalid user ID in state parameter'); - return res.status(400).send('Invalid user ID in state parameter'); + return res.redirect('/oauth/error?error=invalid_state'); } identifier = `${decodedState.user}:${action_id}`; const flowState = await flowManager.getFlowState(identifier, 'oauth'); @@ -47,91 +50,34 @@ router.get('/:action_id/oauth/callback', async (req, res) => { throw new Error('OAuth flow not found'); } - const tokenData = await getAccessToken({ - code, - userId: decodedState.user, - identifier, - client_url: flowState.metadata.client_url, - redirect_uri: flowState.metadata.redirect_uri, - token_exchange_method: flowState.metadata.token_exchange_method, - /** Encrypted values */ - encrypted_oauth_client_id: flowState.metadata.encrypted_oauth_client_id, - encrypted_oauth_client_secret: flowState.metadata.encrypted_oauth_client_secret, - }); + const tokenData = await getAccessToken( + { + code, + userId: decodedState.user, + identifier, + client_url: flowState.metadata.client_url, + redirect_uri: flowState.metadata.redirect_uri, + token_exchange_method: flowState.metadata.token_exchange_method, + /** Encrypted values */ + encrypted_oauth_client_id: flowState.metadata.encrypted_oauth_client_id, + encrypted_oauth_client_secret: flowState.metadata.encrypted_oauth_client_secret, + }, + { + findToken, + updateToken, + createToken, + }, + ); await flowManager.completeFlow(identifier, 'oauth', tokenData); - res.send(` - - - - Authentication Successful - - - - - -
-

Authentication Successful

-

- Your authentication was successful. This window will close in - 3 seconds. -

-
- - - - `); + + /** Redirect to React success page */ + const serverName = flowState.metadata?.action_name || `Action ${action_id}`; + const redirectUrl = `/oauth/success?serverName=${encodeURIComponent(serverName)}`; + res.redirect(redirectUrl); } catch (error) { logger.error('Error in OAuth callback:', error); await flowManager.failFlow(identifier, 'oauth', error); - res.status(500).send('Authentication failed. Please try again.'); + res.redirect('/oauth/error?error=callback_failed'); } }); diff --git a/api/server/routes/index.js b/api/server/routes/index.js index 06e39d3671..7c1b5de0fa 100644 --- a/api/server/routes/index.js +++ b/api/server/routes/index.js @@ -27,6 +27,7 @@ const edit = require('./edit'); const keys = require('./keys'); const user = require('./user'); const ask = require('./ask'); +const mcp = require('./mcp'); module.exports = { ask, @@ -58,4 +59,5 @@ module.exports = { assistants, categories, staticRoute, + mcp, }; diff --git a/api/server/routes/mcp.js b/api/server/routes/mcp.js new file mode 100644 index 0000000000..3dfed4d240 --- /dev/null +++ b/api/server/routes/mcp.js @@ -0,0 +1,205 @@ +const { Router } = require('express'); +const { MCPOAuthHandler } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); +const { CacheKeys } = require('librechat-data-provider'); +const { requireJwtAuth } = require('~/server/middleware'); +const { getFlowStateManager } = require('~/config'); +const { getLogStores } = require('~/cache'); + +const router = Router(); + +/** + * Initiate OAuth flow + * This endpoint is called when the user clicks the auth link in the UI + */ +router.get('/:serverName/oauth/initiate', requireJwtAuth, async (req, res) => { + try { + const { serverName } = req.params; + const { userId, flowId } = req.query; + const user = req.user; + + // Verify the userId matches the authenticated user + if (userId !== user.id) { + return res.status(403).json({ error: 'User mismatch' }); + } + + logger.debug('[MCP OAuth] Initiate request', { serverName, userId, flowId }); + + const flowsCache = getLogStores(CacheKeys.FLOWS); + const flowManager = getFlowStateManager(flowsCache); + + /** Flow state to retrieve OAuth config */ + const flowState = await flowManager.getFlowState(flowId, 'mcp_oauth'); + if (!flowState) { + logger.error('[MCP OAuth] Flow state not found', { flowId }); + return res.status(404).json({ error: 'Flow not found' }); + } + + const { serverUrl, oauth: oauthConfig } = flowState.metadata || {}; + if (!serverUrl || !oauthConfig) { + logger.error('[MCP OAuth] Missing server URL or OAuth config in flow state'); + return res.status(400).json({ error: 'Invalid flow state' }); + } + + const { authorizationUrl, flowId: oauthFlowId } = await MCPOAuthHandler.initiateOAuthFlow( + serverName, + serverUrl, + userId, + oauthConfig, + ); + + logger.debug('[MCP OAuth] OAuth flow initiated', { oauthFlowId, authorizationUrl }); + + // Redirect user to the authorization URL + res.redirect(authorizationUrl); + } catch (error) { + logger.error('[MCP OAuth] Failed to initiate OAuth', error); + res.status(500).json({ error: 'Failed to initiate OAuth' }); + } +}); + +/** + * OAuth callback handler + * This handles the OAuth callback after the user has authorized the application + */ +router.get('/:serverName/oauth/callback', async (req, res) => { + try { + const { serverName } = req.params; + const { code, state, error: oauthError } = req.query; + + logger.debug('[MCP OAuth] Callback received', { + serverName, + code: code ? 'present' : 'missing', + state, + error: oauthError, + }); + + if (oauthError) { + logger.error('[MCP OAuth] OAuth error received', { error: oauthError }); + return res.redirect(`/oauth/error?error=${encodeURIComponent(String(oauthError))}`); + } + + if (!code || typeof code !== 'string') { + logger.error('[MCP OAuth] Missing or invalid code'); + return res.redirect('/oauth/error?error=missing_code'); + } + + if (!state || typeof state !== 'string') { + logger.error('[MCP OAuth] Missing or invalid state'); + return res.redirect('/oauth/error?error=missing_state'); + } + + // Extract flow ID from state + const flowId = state; + logger.debug('[MCP OAuth] Using flow ID from state', { flowId }); + + const flowsCache = getLogStores(CacheKeys.FLOWS); + const flowManager = getFlowStateManager(flowsCache); + + logger.debug('[MCP OAuth] Getting flow state for flowId: ' + flowId); + const flowState = await MCPOAuthHandler.getFlowState(flowId, flowManager); + + if (!flowState) { + logger.error('[MCP OAuth] Flow state not found for flowId:', flowId); + return res.redirect('/oauth/error?error=invalid_state'); + } + + logger.debug('[MCP OAuth] Flow state details', { + serverName: flowState.serverName, + userId: flowState.userId, + hasMetadata: !!flowState.metadata, + hasClientInfo: !!flowState.clientInfo, + hasCodeVerifier: !!flowState.codeVerifier, + }); + + // Complete the OAuth flow + logger.debug('[MCP OAuth] Completing OAuth flow'); + const tokens = await MCPOAuthHandler.completeOAuthFlow(flowId, code, flowManager); + logger.info('[MCP OAuth] OAuth flow completed, tokens received in callback route'); + + // For system-level OAuth, we need to store the tokens and retry the connection + if (flowState.userId === 'system') { + logger.debug(`[MCP OAuth] System-level OAuth completed for ${serverName}`); + } + + /** ID of the flow that the tool/connection is waiting for */ + const toolFlowId = flowState.metadata?.toolFlowId; + if (toolFlowId) { + logger.debug('[MCP OAuth] Completing tool flow', { toolFlowId }); + await flowManager.completeFlow(toolFlowId, 'mcp_oauth', tokens); + } + + /** Redirect to success page with flowId and serverName */ + const redirectUrl = `/oauth/success?serverName=${encodeURIComponent(serverName)}`; + res.redirect(redirectUrl); + } catch (error) { + logger.error('[MCP OAuth] OAuth callback error', error); + res.redirect('/oauth/error?error=callback_failed'); + } +}); + +/** + * Get OAuth tokens for a completed flow + * This is primarily for user-level OAuth flows + */ +router.get('/oauth/tokens/:flowId', requireJwtAuth, async (req, res) => { + try { + const { flowId } = req.params; + const user = req.user; + + if (!user?.id) { + return res.status(401).json({ error: 'User not authenticated' }); + } + + // Allow system flows or user-owned flows + if (!flowId.startsWith(`${user.id}:`) && !flowId.startsWith('system:')) { + return res.status(403).json({ error: 'Access denied' }); + } + + const flowsCache = getLogStores(CacheKeys.FLOWS); + const flowManager = getFlowStateManager(flowsCache); + + const flowState = await flowManager.getFlowState(flowId, 'mcp_oauth'); + if (!flowState) { + return res.status(404).json({ error: 'Flow not found' }); + } + + if (flowState.status !== 'COMPLETED') { + return res.status(400).json({ error: 'Flow not completed' }); + } + + res.json({ tokens: flowState.result }); + } catch (error) { + logger.error('[MCP OAuth] Failed to get tokens', error); + res.status(500).json({ error: 'Failed to get tokens' }); + } +}); + +/** + * Check OAuth flow status + * This endpoint can be used to poll the status of an OAuth flow + */ +router.get('/oauth/status/:flowId', async (req, res) => { + try { + const { flowId } = req.params; + const flowsCache = getLogStores(CacheKeys.FLOWS); + const flowManager = getFlowStateManager(flowsCache); + + const flowState = await flowManager.getFlowState(flowId, 'mcp_oauth'); + if (!flowState) { + return res.status(404).json({ error: 'Flow not found' }); + } + + res.json({ + status: flowState.status, + completed: flowState.status === 'COMPLETED', + failed: flowState.status === 'FAILED', + error: flowState.error, + }); + } catch (error) { + logger.error('[MCP OAuth] Failed to get flow status', error); + res.status(500).json({ error: 'Failed to get flow status' }); + } +}); + +module.exports = router; diff --git a/api/server/routes/oauth.js b/api/server/routes/oauth.js index bc8d120ef5..afc4a05b75 100644 --- a/api/server/routes/oauth.js +++ b/api/server/routes/oauth.js @@ -47,7 +47,9 @@ const oauthHandler = async (req, res) => { router.get('/error', (req, res) => { // A single error message is pushed by passport when authentication fails. - logger.error('Error in OAuth authentication:', { message: req.session.messages.pop() }); + logger.error('Error in OAuth authentication:', { + message: req.session?.messages?.pop() || 'Unknown error', + }); // Redirect to login page with auth_failed parameter to prevent infinite redirect loops res.redirect(`${domains.client}/login?redirect=false`); diff --git a/api/server/services/ActionService.js b/api/server/services/ActionService.js index 9bf7491543..b9555a752c 100644 --- a/api/server/services/ActionService.js +++ b/api/server/services/ActionService.js @@ -3,7 +3,13 @@ const { nanoid } = require('nanoid'); const { tool } = require('@langchain/core/tools'); const { logger } = require('@librechat/data-schemas'); const { GraphEvents, sleep } = require('@librechat/agents'); -const { sendEvent, logAxiosError } = require('@librechat/api'); +const { + sendEvent, + encryptV2, + decryptV2, + logAxiosError, + refreshAccessToken, +} = require('@librechat/api'); const { Time, CacheKeys, @@ -14,13 +20,11 @@ const { isImageVisionTool, actionDomainSeparator, } = require('librechat-data-provider'); -const { refreshAccessToken } = require('~/server/services/TokenService'); -const { encryptV2, decryptV2 } = require('~/server/utils/crypto'); +const { findToken, updateToken, createToken } = require('~/models'); const { getActions, deleteActions } = require('~/models/Action'); const { deleteAssistant } = require('~/models/Assistant'); const { getFlowStateManager } = require('~/config'); const { getLogStores } = require('~/cache'); -const { findToken } = require('~/models'); const JWT_SECRET = process.env.JWT_SECRET; const toolNameRegex = /^[a-zA-Z0-9_-]+$/; @@ -258,15 +262,22 @@ async function createActionTool({ try { const refresh_token = await decryptV2(refreshTokenData.token); const refreshTokens = async () => - await refreshAccessToken({ - userId, - identifier, - refresh_token, - client_url: metadata.auth.client_url, - encrypted_oauth_client_id: encrypted.oauth_client_id, - token_exchange_method: metadata.auth.token_exchange_method, - encrypted_oauth_client_secret: encrypted.oauth_client_secret, - }); + await refreshAccessToken( + { + userId, + identifier, + refresh_token, + client_url: metadata.auth.client_url, + encrypted_oauth_client_id: encrypted.oauth_client_id, + token_exchange_method: metadata.auth.token_exchange_method, + encrypted_oauth_client_secret: encrypted.oauth_client_secret, + }, + { + findToken, + updateToken, + createToken, + }, + ); const flowsCache = getLogStores(CacheKeys.FLOWS); const flowManager = getFlowStateManager(flowsCache); const refreshData = await flowManager.createFlowWithHandler( diff --git a/api/server/services/AppService.js b/api/server/services/AppService.js index 2e5a0e586b..6b7ff7417f 100644 --- a/api/server/services/AppService.js +++ b/api/server/services/AppService.js @@ -1,7 +1,6 @@ const { FileSources, loadOCRConfig, - processMCPEnv, EModelEndpoint, loadMemoryConfig, getConfigDefaults, @@ -28,7 +27,7 @@ const { initializeS3 } = require('./Files/S3/initialize'); const { loadAndFormatTools } = require('./ToolService'); const { isEnabled } = require('~/server/utils'); const { initializeRoles } = require('~/models'); -const { getMCPManager } = require('~/config'); +const { setCachedTools } = require('./Config'); const paths = require('~/config/paths'); /** @@ -76,11 +75,10 @@ const AppService = async (app) => { directory: paths.structuredTools, }); - if (config.mcpServers != null) { - const mcpManager = getMCPManager(); - await mcpManager.initializeMCP(config.mcpServers, processMCPEnv); - await mcpManager.mapAvailableTools(availableTools); - } + await setCachedTools(availableTools, { isGlobal: true }); + + // Store MCP config for later initialization + const mcpConfig = config.mcpServers || null; const socialLogins = config?.registration?.socialLogins ?? configDefaults?.registration?.socialLogins; @@ -96,11 +94,11 @@ const AppService = async (app) => { socialLogins, filteredTools, includedTools, - availableTools, imageOutputType, interfaceConfig, turnstileConfig, balance, + mcpConfig, }; const agentsDefaults = agentsConfigSetup(config); diff --git a/api/server/services/AppService.spec.js b/api/server/services/AppService.spec.js index 70a405ccdb..7edccc2c0d 100644 --- a/api/server/services/AppService.spec.js +++ b/api/server/services/AppService.spec.js @@ -32,6 +32,25 @@ jest.mock('~/models', () => ({ jest.mock('~/models/Role', () => ({ updateAccessPermissions: jest.fn(), })); +jest.mock('./Config', () => ({ + setCachedTools: jest.fn(), + getCachedTools: jest.fn().mockResolvedValue({ + ExampleTool: { + type: 'function', + function: { + description: 'Example tool function', + name: 'exampleFunction', + parameters: { + type: 'object', + properties: { + param1: { type: 'string', description: 'An example parameter' }, + }, + required: ['param1'], + }, + }, + }, + }), +})); jest.mock('./ToolService', () => ({ loadAndFormatTools: jest.fn().mockReturnValue({ ExampleTool: { @@ -121,22 +140,9 @@ describe('AppService', () => { sidePanel: true, presets: true, }), + mcpConfig: null, turnstileConfig: mockedTurnstileConfig, modelSpecs: undefined, - availableTools: { - ExampleTool: { - type: 'function', - function: expect.objectContaining({ - description: 'Example tool function', - name: 'exampleFunction', - parameters: expect.objectContaining({ - type: 'object', - properties: expect.any(Object), - required: expect.arrayContaining(['param1']), - }), - }), - }, - }, paths: expect.anything(), ocr: expect.anything(), imageOutputType: expect.any(String), @@ -223,14 +229,41 @@ describe('AppService', () => { it('should load and format tools accurately with defined structure', async () => { const { loadAndFormatTools } = require('./ToolService'); + const { setCachedTools, getCachedTools } = require('./Config'); + await AppService(app); expect(loadAndFormatTools).toHaveBeenCalledWith({ + adminFilter: undefined, + adminIncluded: undefined, directory: expect.anything(), }); - expect(app.locals.availableTools.ExampleTool).toBeDefined(); - expect(app.locals.availableTools.ExampleTool).toEqual({ + // Verify setCachedTools was called with the tools + expect(setCachedTools).toHaveBeenCalledWith( + { + ExampleTool: { + type: 'function', + function: { + description: 'Example tool function', + name: 'exampleFunction', + parameters: { + type: 'object', + properties: { + param1: { type: 'string', description: 'An example parameter' }, + }, + required: ['param1'], + }, + }, + }, + }, + { isGlobal: true }, + ); + + // Verify we can retrieve the tools from cache + const cachedTools = await getCachedTools({ includeGlobal: true }); + expect(cachedTools.ExampleTool).toBeDefined(); + expect(cachedTools.ExampleTool).toEqual({ type: 'function', function: { description: 'Example tool function', @@ -535,7 +568,6 @@ describe('AppService updating app.locals and issuing warnings', () => { expect(app.locals).toBeDefined(); expect(app.locals.paths).toBeDefined(); - expect(app.locals.availableTools).toBeDefined(); expect(app.locals.fileStrategy).toEqual(FileSources.local); expect(app.locals.socialLogins).toEqual(defaultSocialLogins); expect(app.locals.balance).toEqual( @@ -568,7 +600,6 @@ describe('AppService updating app.locals and issuing warnings', () => { expect(app.locals).toBeDefined(); expect(app.locals.paths).toBeDefined(); - expect(app.locals.availableTools).toBeDefined(); expect(app.locals.fileStrategy).toEqual(customConfig.fileStrategy); expect(app.locals.socialLogins).toEqual(customConfig.registration.socialLogins); expect(app.locals.balance).toEqual(customConfig.balance); diff --git a/api/server/services/AuthService.js b/api/server/services/AuthService.js index 2c285512ee..6061277437 100644 --- a/api/server/services/AuthService.js +++ b/api/server/services/AuthService.js @@ -1,5 +1,7 @@ const bcrypt = require('bcryptjs'); const { webcrypto } = require('node:crypto'); +const { isEnabled } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { SystemRoles, errorsToString } = require('librechat-data-provider'); const { findUser, @@ -17,11 +19,10 @@ const { deleteUserById, generateRefreshToken, } = require('~/models'); -const { isEnabled, checkEmailConfig, sendEmail } = require('~/server/utils'); const { isEmailDomainAllowed } = require('~/server/services/domains'); +const { checkEmailConfig, sendEmail } = require('~/server/utils'); const { getBalanceConfig } = require('~/server/services/Config'); const { registerSchema } = require('~/strategies/validators'); -const { logger } = require('~/config'); const domains = { client: process.env.DOMAIN_CLIENT, diff --git a/api/server/services/Config/getCachedTools.js b/api/server/services/Config/getCachedTools.js new file mode 100644 index 0000000000..b3a4f0c869 --- /dev/null +++ b/api/server/services/Config/getCachedTools.js @@ -0,0 +1,258 @@ +const { CacheKeys } = require('librechat-data-provider'); +const getLogStores = require('~/cache/getLogStores'); + +/** + * Cache key generators for different tool access patterns + * These will support future permission-based caching + */ +const ToolCacheKeys = { + /** Global tools available to all users */ + GLOBAL: 'tools:global', + /** Tools available to a specific user */ + USER: (userId) => `tools:user:${userId}`, + /** Tools available to a specific role */ + ROLE: (roleId) => `tools:role:${roleId}`, + /** Tools available to a specific group */ + GROUP: (groupId) => `tools:group:${groupId}`, + /** Combined effective tools for a user (computed from all sources) */ + EFFECTIVE: (userId) => `tools:effective:${userId}`, +}; + +/** + * Retrieves available tools from cache + * @function getCachedTools + * @param {Object} options - Options for retrieving tools + * @param {string} [options.userId] - User ID for user-specific tools + * @param {string[]} [options.roleIds] - Role IDs for role-based tools + * @param {string[]} [options.groupIds] - Group IDs for group-based tools + * @param {boolean} [options.includeGlobal=true] - Whether to include global tools + * @returns {Promise} The available tools object or null if not cached + */ +async function getCachedTools(options = {}) { + const cache = getLogStores(CacheKeys.CONFIG_STORE); + const { userId, roleIds = [], groupIds = [], includeGlobal = true } = options; + + // For now, return global tools (current behavior) + // This will be expanded to merge tools from different sources + if (!userId && includeGlobal) { + return await cache.get(ToolCacheKeys.GLOBAL); + } + + // Future implementation will merge tools from multiple sources + // based on user permissions, roles, and groups + if (userId) { + // Check if we have pre-computed effective tools for this user + const effectiveTools = await cache.get(ToolCacheKeys.EFFECTIVE(userId)); + if (effectiveTools) { + return effectiveTools; + } + + // Otherwise, compute from individual sources + const toolSources = []; + + if (includeGlobal) { + const globalTools = await cache.get(ToolCacheKeys.GLOBAL); + if (globalTools) { + toolSources.push(globalTools); + } + } + + // User-specific tools + const userTools = await cache.get(ToolCacheKeys.USER(userId)); + if (userTools) { + toolSources.push(userTools); + } + + // Role-based tools + for (const roleId of roleIds) { + const roleTools = await cache.get(ToolCacheKeys.ROLE(roleId)); + if (roleTools) { + toolSources.push(roleTools); + } + } + + // Group-based tools + for (const groupId of groupIds) { + const groupTools = await cache.get(ToolCacheKeys.GROUP(groupId)); + if (groupTools) { + toolSources.push(groupTools); + } + } + + // Merge all tool sources (for now, simple merge - future will handle conflicts) + if (toolSources.length > 0) { + return mergeToolSources(toolSources); + } + } + + return null; +} + +/** + * Sets available tools in cache + * @function setCachedTools + * @param {Object} tools - The tools object to cache + * @param {Object} options - Options for caching tools + * @param {string} [options.userId] - User ID for user-specific tools + * @param {string} [options.roleId] - Role ID for role-based tools + * @param {string} [options.groupId] - Group ID for group-based tools + * @param {boolean} [options.isGlobal=false] - Whether these are global tools + * @param {number} [options.ttl] - Time to live in milliseconds + * @returns {Promise} Whether the operation was successful + */ +async function setCachedTools(tools, options = {}) { + const cache = getLogStores(CacheKeys.CONFIG_STORE); + const { userId, roleId, groupId, isGlobal = false, ttl } = options; + + let cacheKey; + if (isGlobal || (!userId && !roleId && !groupId)) { + cacheKey = ToolCacheKeys.GLOBAL; + } else if (userId) { + cacheKey = ToolCacheKeys.USER(userId); + } else if (roleId) { + cacheKey = ToolCacheKeys.ROLE(roleId); + } else if (groupId) { + cacheKey = ToolCacheKeys.GROUP(groupId); + } + + if (!cacheKey) { + throw new Error('Invalid cache key options provided'); + } + + return await cache.set(cacheKey, tools, ttl); +} + +/** + * Invalidates cached tools + * @function invalidateCachedTools + * @param {Object} options - Options for invalidating tools + * @param {string} [options.userId] - User ID to invalidate + * @param {string} [options.roleId] - Role ID to invalidate + * @param {string} [options.groupId] - Group ID to invalidate + * @param {boolean} [options.invalidateGlobal=false] - Whether to invalidate global tools + * @param {boolean} [options.invalidateEffective=true] - Whether to invalidate effective tools + * @returns {Promise} + */ +async function invalidateCachedTools(options = {}) { + const cache = getLogStores(CacheKeys.CONFIG_STORE); + const { userId, roleId, groupId, invalidateGlobal = false, invalidateEffective = true } = options; + + const keysToDelete = []; + + if (invalidateGlobal) { + keysToDelete.push(ToolCacheKeys.GLOBAL); + } + + if (userId) { + keysToDelete.push(ToolCacheKeys.USER(userId)); + if (invalidateEffective) { + keysToDelete.push(ToolCacheKeys.EFFECTIVE(userId)); + } + } + + if (roleId) { + keysToDelete.push(ToolCacheKeys.ROLE(roleId)); + // TODO: In future, invalidate all users with this role + } + + if (groupId) { + keysToDelete.push(ToolCacheKeys.GROUP(groupId)); + // TODO: In future, invalidate all users in this group + } + + await Promise.all(keysToDelete.map((key) => cache.delete(key))); +} + +/** + * Computes and caches effective tools for a user + * @function computeEffectiveTools + * @param {string} userId - The user ID + * @param {Object} context - Context containing user's roles and groups + * @param {string[]} [context.roleIds=[]] - User's role IDs + * @param {string[]} [context.groupIds=[]] - User's group IDs + * @param {number} [ttl] - Time to live for the computed result + * @returns {Promise} The computed effective tools + */ +async function computeEffectiveTools(userId, context = {}, ttl) { + const { roleIds = [], groupIds = [] } = context; + + // Get all tool sources + const tools = await getCachedTools({ + userId, + roleIds, + groupIds, + includeGlobal: true, + }); + + if (tools) { + // Cache the computed result + const cache = getLogStores(CacheKeys.CONFIG_STORE); + await cache.set(ToolCacheKeys.EFFECTIVE(userId), tools, ttl); + } + + return tools; +} + +/** + * Merges multiple tool sources into a single tools object + * @function mergeToolSources + * @param {Object[]} sources - Array of tool objects to merge + * @returns {Object} Merged tools object + */ +function mergeToolSources(sources) { + // For now, simple merge that combines all tools + // Future implementation will handle: + // - Permission precedence (deny > allow) + // - Tool property conflicts + // - Metadata merging + const merged = {}; + + for (const source of sources) { + if (!source || typeof source !== 'object') { + continue; + } + + for (const [toolId, toolConfig] of Object.entries(source)) { + // Simple last-write-wins for now + // Future: merge based on permission levels + merged[toolId] = toolConfig; + } + } + + return merged; +} + +/** + * Middleware-friendly function to get tools for a request + * @function getToolsForRequest + * @param {Object} req - Express request object + * @returns {Promise} Available tools for the request + */ +async function getToolsForRequest(req) { + const userId = req.user?.id; + + // For now, return global tools if no user + if (!userId) { + return getCachedTools({ includeGlobal: true }); + } + + // Future: Extract roles and groups from req.user + const roleIds = req.user?.roles || []; + const groupIds = req.user?.groups || []; + + return getCachedTools({ + userId, + roleIds, + groupIds, + includeGlobal: true, + }); +} + +module.exports = { + ToolCacheKeys, + getCachedTools, + setCachedTools, + getToolsForRequest, + invalidateCachedTools, + computeEffectiveTools, +}; diff --git a/api/server/services/Config/index.js b/api/server/services/Config/index.js index 9d668da958..ad25e57998 100644 --- a/api/server/services/Config/index.js +++ b/api/server/services/Config/index.js @@ -1,4 +1,5 @@ const { config } = require('./EndpointService'); +const getCachedTools = require('./getCachedTools'); const getCustomConfig = require('./getCustomConfig'); const loadCustomConfig = require('./loadCustomConfig'); const loadConfigModels = require('./loadConfigModels'); @@ -14,6 +15,7 @@ module.exports = { loadDefaultModels, loadOverrideConfig, loadAsyncEndpoints, + ...getCachedTools, ...getCustomConfig, ...getEndpointsConfig, }; diff --git a/api/server/services/MCP.js b/api/server/services/MCP.js index 357913e519..9720305668 100644 --- a/api/server/services/MCP.js +++ b/api/server/services/MCP.js @@ -1,27 +1,111 @@ const { z } = require('zod'); const { tool } = require('@langchain/core/tools'); -const { normalizeServerName } = require('@librechat/api'); -const { Constants: AgentConstants, Providers } = require('@librechat/agents'); +const { logger } = require('@librechat/data-schemas'); +const { Time, CacheKeys, StepTypes } = require('librechat-data-provider'); +const { sendEvent, normalizeServerName, MCPOAuthHandler } = require('@librechat/api'); +const { Constants: AgentConstants, Providers, GraphEvents } = require('@librechat/agents'); const { Constants, ContentTypes, isAssistantsEndpoint, convertJsonSchemaToZod, } = require('librechat-data-provider'); -const { logger, getMCPManager } = require('~/config'); +const { getMCPManager, getFlowStateManager } = require('~/config'); +const { findToken, createToken, updateToken } = require('~/models'); +const { getCachedTools } = require('./Config'); +const { getLogStores } = require('~/cache'); + +/** + * @param {object} params + * @param {ServerResponse} params.res - The Express response object for sending events. + * @param {string} params.stepId - The ID of the step in the flow. + * @param {ToolCallChunk} params.toolCall - The tool call object containing tool information. + * @param {string} params.loginFlowId - The ID of the login flow. + * @param {FlowStateManager} params.flowManager - The flow manager instance. + */ +function createOAuthStart({ res, stepId, toolCall, loginFlowId, flowManager, signal }) { + /** + * Creates a function to handle OAuth login requests. + * @param {string} authURL - The URL to redirect the user for OAuth authentication. + * @returns {Promise} Returns true to indicate the event was sent successfully. + */ + return async function (authURL) { + /** @type {{ id: string; delta: AgentToolCallDelta }} */ + const data = { + id: stepId, + delta: { + type: StepTypes.TOOL_CALLS, + tool_calls: [{ ...toolCall, args: '' }], + auth: authURL, + expires_at: Date.now() + Time.TWO_MINUTES, + }, + }; + /** Used to ensure the handler (use of `sendEvent`) is only invoked once */ + await flowManager.createFlowWithHandler( + loginFlowId, + 'oauth_login', + async () => { + sendEvent(res, { event: GraphEvents.ON_RUN_STEP_DELTA, data }); + logger.debug('Sent OAuth login request to client'); + return true; + }, + signal, + ); + }; +} + +/** + * @param {object} params + * @param {ServerResponse} params.res - The Express response object for sending events. + * @param {string} params.stepId - The ID of the step in the flow. + * @param {ToolCallChunk} params.toolCall - The tool call object containing tool information. + * @param {string} params.loginFlowId - The ID of the login flow. + * @param {FlowStateManager} params.flowManager - The flow manager instance. + */ +function createOAuthEnd({ res, stepId, toolCall }) { + return async function () { + /** @type {{ id: string; delta: AgentToolCallDelta }} */ + const data = { + id: stepId, + delta: { + type: StepTypes.TOOL_CALLS, + tool_calls: [{ ...toolCall }], + }, + }; + sendEvent(res, { event: GraphEvents.ON_RUN_STEP_DELTA, data }); + logger.debug('Sent OAuth login success to client'); + }; +} + +/** + * @param {object} params + * @param {string} params.userId - The ID of the user. + * @param {string} params.serverName - The name of the server. + * @param {string} params.toolName - The name of the tool. + * @param {FlowStateManager} params.flowManager - The flow manager instance. + */ +function createAbortHandler({ userId, serverName, toolName, flowManager }) { + return function () { + logger.info(`[MCP][User: ${userId}][${serverName}][${toolName}] Tool call aborted`); + const flowId = MCPOAuthHandler.generateFlowId(userId, serverName); + flowManager.failFlow(flowId, 'mcp_oauth', new Error('Tool call aborted')); + }; +} /** * Creates a general tool for an entire action set. * * @param {Object} params - The parameters for loading action sets. * @param {ServerRequest} params.req - The Express request object, containing user/request info. + * @param {ServerResponse} params.res - The Express response object for sending events. * @param {string} params.toolKey - The toolKey for the tool. * @param {import('@librechat/agents').Providers | EModelEndpoint} params.provider - The provider for the tool. * @param {string} params.model - The model for the tool. * @returns { Promise unknown}> } An object with `_call` method to execute the tool input. */ -async function createMCPTool({ req, toolKey, provider: _provider }) { - const toolDefinition = req.app.locals.availableTools[toolKey]?.function; +async function createMCPTool({ req, res, toolKey, provider: _provider }) { + const availableTools = await getCachedTools({ includeGlobal: true }); + const toolDefinition = availableTools?.[toolKey]?.function; if (!toolDefinition) { logger.error(`Tool ${toolKey} not found in available tools`); return null; @@ -51,10 +135,39 @@ async function createMCPTool({ req, toolKey, provider: _provider }) { /** @type {(toolArguments: Object | string, config?: GraphRunnableConfig) => Promise} */ const _call = async (toolArguments, config) => { const userId = config?.configurable?.user?.id || config?.configurable?.user_id; + /** @type {ReturnType} */ + let abortHandler = null; + /** @type {AbortSignal} */ + let derivedSignal = null; + try { - const derivedSignal = config?.signal ? AbortSignal.any([config.signal]) : undefined; + const flowsCache = getLogStores(CacheKeys.FLOWS); + const flowManager = getFlowStateManager(flowsCache); + derivedSignal = config?.signal ? AbortSignal.any([config.signal]) : undefined; const mcpManager = getMCPManager(userId); const provider = (config?.metadata?.provider || _provider)?.toLowerCase(); + + const { args: _args, stepId, ...toolCall } = config.toolCall ?? {}; + const loginFlowId = `${serverName}:oauth_login:${config.metadata.thread_id}:${config.metadata.run_id}`; + const oauthStart = createOAuthStart({ + res, + stepId, + toolCall, + loginFlowId, + flowManager, + signal: derivedSignal, + }); + const oauthEnd = createOAuthEnd({ + res, + stepId, + toolCall, + }); + + if (derivedSignal) { + abortHandler = createAbortHandler({ userId, serverName, toolName, flowManager }); + derivedSignal.addEventListener('abort', abortHandler, { once: true }); + } + const result = await mcpManager.callTool({ serverName, toolName, @@ -64,6 +177,14 @@ async function createMCPTool({ req, toolKey, provider: _provider }) { signal: derivedSignal, user: config?.configurable?.user, }, + flowManager, + tokenMethods: { + findToken, + createToken, + updateToken, + }, + oauthStart, + oauthEnd, }); if (isAssistantsEndpoint(provider) && Array.isArray(result)) { @@ -78,9 +199,28 @@ async function createMCPTool({ req, toolKey, provider: _provider }) { `[MCP][User: ${userId}][${serverName}] Error calling "${toolName}" MCP tool:`, error, ); + + /** OAuth error, provide a helpful message */ + const isOAuthError = + error.message?.includes('401') || + error.message?.includes('OAuth') || + error.message?.includes('authentication') || + error.message?.includes('Non-200 status code (401)'); + + if (isOAuthError) { + throw new Error( + `OAuth authentication required for ${serverName}. Please check the server logs for the authentication URL.`, + ); + } + throw new Error( `"${toolKey}" tool call failed${error?.message ? `: ${error?.message}` : '.'}`, ); + } finally { + // Clean up abort handler to prevent memory leaks + if (abortHandler && derivedSignal) { + derivedSignal.removeEventListener('abort', abortHandler); + } } }; diff --git a/api/server/services/PluginService.js b/api/server/services/PluginService.js index 7463e0814e..04c5abb32b 100644 --- a/api/server/services/PluginService.js +++ b/api/server/services/PluginService.js @@ -1,6 +1,6 @@ -const { encrypt, decrypt } = require('~/server/utils/crypto'); +const { logger } = require('@librechat/data-schemas'); +const { encrypt, decrypt } = require('@librechat/api'); const { PluginAuth } = require('~/db/models'); -const { logger } = require('~/config'); /** * Asynchronously retrieves and decrypts the authentication value for a user's plugin, based on a specified authentication field. diff --git a/api/server/services/TokenService.js b/api/server/services/TokenService.js deleted file mode 100644 index ec74844197..0000000000 --- a/api/server/services/TokenService.js +++ /dev/null @@ -1,195 +0,0 @@ -const axios = require('axios'); -const { logAxiosError } = require('@librechat/api'); -const { logger } = require('@librechat/data-schemas'); -const { TokenExchangeMethodEnum } = require('librechat-data-provider'); -const { handleOAuthToken } = require('~/models/Token'); -const { decryptV2 } = require('~/server/utils/crypto'); - -/** - * Processes the access tokens and stores them in the database. - * @param {object} tokenData - * @param {string} tokenData.access_token - * @param {number} tokenData.expires_in - * @param {string} [tokenData.refresh_token] - * @param {number} [tokenData.refresh_token_expires_in] - * @param {object} metadata - * @param {string} metadata.userId - * @param {string} metadata.identifier - * @returns {Promise} - */ -async function processAccessTokens(tokenData, { userId, identifier }) { - const { access_token, expires_in = 3600, refresh_token, refresh_token_expires_in } = tokenData; - if (!access_token) { - logger.error('Access token not found: ', tokenData); - throw new Error('Access token not found'); - } - await handleOAuthToken({ - identifier, - token: access_token, - expiresIn: expires_in, - userId, - }); - - if (refresh_token != null) { - logger.debug('Processing refresh token'); - await handleOAuthToken({ - token: refresh_token, - type: 'oauth_refresh', - userId, - identifier: `${identifier}:refresh`, - expiresIn: refresh_token_expires_in ?? null, - }); - } - logger.debug('Access tokens processed'); -} - -/** - * Refreshes the access token using the refresh token. - * @param {object} fields - * @param {string} fields.userId - The ID of the user. - * @param {string} fields.client_url - The URL of the OAuth provider. - * @param {string} fields.identifier - The identifier for the token. - * @param {string} fields.refresh_token - The refresh token to use. - * @param {string} fields.token_exchange_method - The token exchange method ('default_post' or 'basic_auth_header'). - * @param {string} fields.encrypted_oauth_client_id - The client ID for the OAuth provider. - * @param {string} fields.encrypted_oauth_client_secret - The client secret for the OAuth provider. - * @returns {Promise<{ - * access_token: string, - * expires_in: number, - * refresh_token?: string, - * refresh_token_expires_in?: number, - * }>} - */ -const refreshAccessToken = async ({ - userId, - client_url, - identifier, - refresh_token, - token_exchange_method, - encrypted_oauth_client_id, - encrypted_oauth_client_secret, -}) => { - try { - const oauth_client_id = await decryptV2(encrypted_oauth_client_id); - const oauth_client_secret = await decryptV2(encrypted_oauth_client_secret); - - const headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - Accept: 'application/json', - }; - - const params = new URLSearchParams({ - grant_type: 'refresh_token', - refresh_token, - }); - - if (token_exchange_method === TokenExchangeMethodEnum.BasicAuthHeader) { - const basicAuth = Buffer.from(`${oauth_client_id}:${oauth_client_secret}`).toString('base64'); - headers['Authorization'] = `Basic ${basicAuth}`; - } else { - params.append('client_id', oauth_client_id); - params.append('client_secret', oauth_client_secret); - } - - const response = await axios({ - method: 'POST', - url: client_url, - headers, - data: params.toString(), - }); - await processAccessTokens(response.data, { - userId, - identifier, - }); - logger.debug(`Access token refreshed successfully for ${identifier}`); - return response.data; - } catch (error) { - const message = 'Error refreshing OAuth tokens'; - throw new Error( - logAxiosError({ - message, - error, - }), - ); - } -}; - -/** - * Handles the OAuth callback and exchanges the authorization code for tokens. - * @param {object} fields - * @param {string} fields.code - The authorization code returned by the provider. - * @param {string} fields.userId - The ID of the user. - * @param {string} fields.identifier - The identifier for the token. - * @param {string} fields.client_url - The URL of the OAuth provider. - * @param {string} fields.redirect_uri - The redirect URI for the OAuth provider. - * @param {string} fields.token_exchange_method - The token exchange method ('default_post' or 'basic_auth_header'). - * @param {string} fields.encrypted_oauth_client_id - The client ID for the OAuth provider. - * @param {string} fields.encrypted_oauth_client_secret - The client secret for the OAuth provider. - * @returns {Promise<{ - * access_token: string, - * expires_in: number, - * refresh_token?: string, - * refresh_token_expires_in?: number, - * }>} - */ -const getAccessToken = async ({ - code, - userId, - identifier, - client_url, - redirect_uri, - token_exchange_method, - encrypted_oauth_client_id, - encrypted_oauth_client_secret, -}) => { - const oauth_client_id = await decryptV2(encrypted_oauth_client_id); - const oauth_client_secret = await decryptV2(encrypted_oauth_client_secret); - - const headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - Accept: 'application/json', - }; - - const params = new URLSearchParams({ - code, - grant_type: 'authorization_code', - redirect_uri, - }); - - if (token_exchange_method === TokenExchangeMethodEnum.BasicAuthHeader) { - const basicAuth = Buffer.from(`${oauth_client_id}:${oauth_client_secret}`).toString('base64'); - headers['Authorization'] = `Basic ${basicAuth}`; - } else { - params.append('client_id', oauth_client_id); - params.append('client_secret', oauth_client_secret); - } - - try { - const response = await axios({ - method: 'POST', - url: client_url, - headers, - data: params.toString(), - }); - - await processAccessTokens(response.data, { - userId, - identifier, - }); - logger.debug(`Access tokens successfully created for ${identifier}`); - return response.data; - } catch (error) { - const message = 'Error exchanging OAuth code'; - throw new Error( - logAxiosError({ - message, - error, - }), - ); - } -}; - -module.exports = { - getAccessToken, - refreshAccessToken, -}; diff --git a/api/server/services/ToolService.js b/api/server/services/ToolService.js index 9172c25e96..f1567a3783 100644 --- a/api/server/services/ToolService.js +++ b/api/server/services/ToolService.js @@ -1,5 +1,7 @@ const fs = require('fs'); const path = require('path'); +const { sleep } = require('@librechat/agents'); +const { logger } = require('@librechat/data-schemas'); const { zodToJsonSchema } = require('zod-to-json-schema'); const { Calculator } = require('@langchain/community/tools/calculator'); const { tool: toolFn, Tool, DynamicStructuredTool } = require('@langchain/core/tools'); @@ -31,14 +33,12 @@ const { toolkits, } = require('~/app/clients/tools'); const { processFileURL, uploadImageBuffer } = require('~/server/services/Files/process'); +const { getEndpointsConfig, getCachedTools } = require('~/server/services/Config'); const { createOnSearchResults } = require('~/server/services/Tools/search'); const { isActionDomainAllowed } = require('~/server/services/domains'); -const { getEndpointsConfig } = require('~/server/services/Config'); const { recordUsage } = require('~/server/services/Threads'); const { loadTools } = require('~/app/clients/tools/util'); const { redactMessage } = require('~/config/parsers'); -const { sleep } = require('~/server/utils'); -const { logger } = require('~/config'); /** * @param {string} toolName @@ -226,7 +226,7 @@ async function processRequiredActions(client, requiredActions) { `[required actions] user: ${client.req.user.id} | thread_id: ${requiredActions[0].thread_id} | run_id: ${requiredActions[0].run_id}`, requiredActions, ); - const toolDefinitions = client.req.app.locals.availableTools; + const toolDefinitions = await getCachedTools({ includeGlobal: true }); const seenToolkits = new Set(); const tools = requiredActions .map((action) => { @@ -553,6 +553,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey }) tools: _agentTools, options: { req, + res, openAIApiKey, tool_resources, processFileURL, diff --git a/api/server/services/UserService.js b/api/server/services/UserService.js index b729607f69..7cf2f832a3 100644 --- a/api/server/services/UserService.js +++ b/api/server/services/UserService.js @@ -1,6 +1,6 @@ const { logger } = require('@librechat/data-schemas'); +const { encrypt, decrypt } = require('@librechat/api'); const { ErrorTypes } = require('librechat-data-provider'); -const { encrypt, decrypt } = require('~/server/utils/crypto'); const { updateUser } = require('~/models'); const { Key } = require('~/db/models'); @@ -70,6 +70,7 @@ const getUserKeyValues = async ({ userId, name }) => { try { userValues = JSON.parse(userValues); } catch (e) { + logger.error('[getUserKeyValues]', e); throw new Error( JSON.stringify({ type: ErrorTypes.INVALID_USER_KEY, diff --git a/api/server/services/initializeMCP.js b/api/server/services/initializeMCP.js new file mode 100644 index 0000000000..d7c5ab7d8a --- /dev/null +++ b/api/server/services/initializeMCP.js @@ -0,0 +1,54 @@ +const { logger } = require('@librechat/data-schemas'); +const { CacheKeys, processMCPEnv } = require('librechat-data-provider'); +const { getMCPManager, getFlowStateManager } = require('~/config'); +const { getCachedTools, setCachedTools } = require('./Config'); +const { getLogStores } = require('~/cache'); +const { findToken, updateToken, createToken, deleteTokens } = require('~/models'); + +/** + * Initialize MCP servers + * @param {import('express').Application} app - Express app instance + */ +async function initializeMCP(app) { + const mcpServers = app.locals.mcpConfig; + if (!mcpServers) { + return; + } + + logger.info('Initializing MCP servers...'); + const mcpManager = getMCPManager(); + const flowsCache = getLogStores(CacheKeys.FLOWS); + const flowManager = flowsCache ? getFlowStateManager(flowsCache) : null; + + try { + await mcpManager.initializeMCP({ + mcpServers, + flowManager, + tokenMethods: { + findToken, + updateToken, + createToken, + deleteTokens, + }, + processMCPEnv, + }); + + delete app.locals.mcpConfig; + const availableTools = await getCachedTools(); + + if (!availableTools) { + logger.warn('No available tools found in cache during MCP initialization'); + return; + } + + const toolsCopy = { ...availableTools }; + await mcpManager.mapAvailableTools(toolsCopy, flowManager); + await setCachedTools(toolsCopy, { isGlobal: true }); + + logger.info('MCP servers initialized successfully'); + } catch (error) { + logger.error('Failed to initialize MCP servers:', error); + } +} + +module.exports = initializeMCP; diff --git a/api/server/services/twoFactorService.js b/api/server/services/twoFactorService.js index 0274842367..4ac86a5549 100644 --- a/api/server/services/twoFactorService.js +++ b/api/server/services/twoFactorService.js @@ -1,5 +1,5 @@ const { webcrypto } = require('node:crypto'); -const { hashBackupCode, decryptV3, decryptV2 } = require('~/server/utils/crypto'); +const { hashBackupCode, decryptV3, decryptV2 } = require('@librechat/api'); const { updateUser } = require('~/models'); // Base32 alphabet for TOTP secret encoding. diff --git a/api/server/utils/index.js b/api/server/utils/index.js index aa432ec379..2661ff75e1 100644 --- a/api/server/utils/index.js +++ b/api/server/utils/index.js @@ -3,7 +3,6 @@ const removePorts = require('./removePorts'); const countTokens = require('./countTokens'); const handleText = require('./handleText'); const sendEmail = require('./sendEmail'); -const cryptoUtils = require('./crypto'); const queue = require('./queue'); const files = require('./files'); const math = require('./math'); @@ -31,7 +30,6 @@ function checkEmailConfig() { module.exports = { ...streamResponse, checkEmailConfig, - ...cryptoUtils, ...handleText, countTokens, removePorts, diff --git a/api/strategies/openidStrategy.spec.js b/api/strategies/openidStrategy.spec.js index 3e52ad01f1..1e6750384e 100644 --- a/api/strategies/openidStrategy.spec.js +++ b/api/strategies/openidStrategy.spec.js @@ -21,19 +21,18 @@ jest.mock('~/models', () => ({ createUser: jest.fn(), updateUser: jest.fn(), })); -jest.mock('~/server/utils/crypto', () => ({ - hashToken: jest.fn().mockResolvedValue('hashed-token'), -})); -jest.mock('~/server/utils', () => ({ +jest.mock('@librechat/api', () => ({ + ...jest.requireActual('@librechat/api'), isEnabled: jest.fn(() => false), })); -jest.mock('~/config', () => ({ +jest.mock('@librechat/data-schemas', () => ({ + ...jest.requireActual('@librechat/api'), logger: { info: jest.fn(), debug: jest.fn(), error: jest.fn(), - warn: jest.fn(), }, + hashToken: jest.fn().mockResolvedValue('hashed-token'), })); jest.mock('~/cache/getLogStores', () => jest.fn(() => ({ diff --git a/api/strategies/samlStrategy.spec.js b/api/strategies/samlStrategy.spec.js index 675bdc998b..fc8329a31a 100644 --- a/api/strategies/samlStrategy.spec.js +++ b/api/strategies/samlStrategy.spec.js @@ -1,15 +1,17 @@ -const fs = require('fs'); -const path = require('path'); -const fetch = require('node-fetch'); -const { Strategy: SamlStrategy } = require('@node-saml/passport-saml'); -const { findUser, createUser, updateUser } = require('~/models'); -const { setupSaml, getCertificateContent } = require('./samlStrategy'); - // --- Mocks --- +jest.mock('tiktoken'); jest.mock('fs'); jest.mock('path'); jest.mock('node-fetch'); jest.mock('@node-saml/passport-saml'); +jest.mock('@librechat/data-schemas', () => ({ + logger: { + info: jest.fn(), + debug: jest.fn(), + error: jest.fn(), + }, + hashToken: jest.fn().mockResolvedValue('hashed-token'), +})); jest.mock('~/models', () => ({ findUser: jest.fn(), createUser: jest.fn(), @@ -29,26 +31,26 @@ jest.mock('~/server/services/Config', () => ({ jest.mock('~/server/services/Config/EndpointService', () => ({ config: {}, })); -jest.mock('~/server/utils', () => ({ - isEnabled: jest.fn(() => false), - isUserProvided: jest.fn(() => false), -})); jest.mock('~/server/services/Files/strategies', () => ({ getStrategyFunctions: jest.fn(() => ({ saveBuffer: jest.fn().mockResolvedValue('/fake/path/to/avatar.png'), })), })); -jest.mock('~/server/utils/crypto', () => ({ - hashToken: jest.fn().mockResolvedValue('hashed-token'), -})); -jest.mock('~/config', () => ({ - logger: { - info: jest.fn(), - debug: jest.fn(), - error: jest.fn(), - }, +jest.mock('~/config/paths', () => ({ + root: '/fake/root/path', })); +const fs = require('fs'); +const path = require('path'); +const fetch = require('node-fetch'); +const { Strategy: SamlStrategy } = require('@node-saml/passport-saml'); +const { setupSaml, getCertificateContent } = require('./samlStrategy'); + +// Configure fs mock +jest.mocked(fs).existsSync = jest.fn(); +jest.mocked(fs).statSync = jest.fn(); +jest.mocked(fs).readFileSync = jest.fn(); + // To capture the verify callback from the strategy, we grab it from the mock constructor let verifyCallback; SamlStrategy.mockImplementation((options, verify) => { diff --git a/api/typedefs.js b/api/typedefs.js index 5bc7ebf664..58cd802425 100644 --- a/api/typedefs.js +++ b/api/typedefs.js @@ -476,11 +476,18 @@ * @memberof typedefs */ +/** + * @exports ToolCallChunk + * @typedef {import('librechat-data-provider').Agents.ToolCallChunk} ToolCallChunk + * @memberof typedefs + */ + /** * @exports MessageContentImageUrl * @typedef {import('librechat-data-provider').Agents.MessageContentImageUrl} MessageContentImageUrl * @memberof typedefs */ + /** Web Search */ /** diff --git a/client/src/components/OAuth/OAuthError.tsx b/client/src/components/OAuth/OAuthError.tsx new file mode 100644 index 0000000000..ac5532c241 --- /dev/null +++ b/client/src/components/OAuth/OAuthError.tsx @@ -0,0 +1,72 @@ +import React from 'react'; +import { useSearchParams } from 'react-router-dom'; +import { useLocalize } from '~/hooks'; + +export default function OAuthError() { + const localize = useLocalize(); + const [searchParams] = useSearchParams(); + const error = searchParams.get('error') || 'unknown_error'; + + const getErrorMessage = (error: string): string => { + switch (error) { + case 'missing_code': + return ( + localize('com_ui_oauth_error_missing_code') || + 'Authorization code is missing. Please try again.' + ); + case 'missing_state': + return ( + localize('com_ui_oauth_error_missing_state') || + 'State parameter is missing. Please try again.' + ); + case 'invalid_state': + return ( + localize('com_ui_oauth_error_invalid_state') || + 'Invalid state parameter. Please try again.' + ); + case 'callback_failed': + return ( + localize('com_ui_oauth_error_callback_failed') || + 'Authentication callback failed. Please try again.' + ); + default: + return localize('com_ui_oauth_error_generic') || error.replace(/_/g, ' '); + } + }; + + return ( +
+
+
+
+ +
+
+

+ {localize('com_ui_oauth_error_title') || 'Authentication Failed'} +

+

{getErrorMessage(error)}

+ +
+
+ ); +} diff --git a/client/src/components/OAuth/OAuthSuccess.tsx b/client/src/components/OAuth/OAuthSuccess.tsx new file mode 100644 index 0000000000..fd8051a6f5 --- /dev/null +++ b/client/src/components/OAuth/OAuthSuccess.tsx @@ -0,0 +1,47 @@ +import React, { useEffect, useState } from 'react'; +import { useSearchParams } from 'react-router-dom'; +import { useLocalize } from '~/hooks'; + +export default function OAuthSuccess() { + const localize = useLocalize(); + const [searchParams] = useSearchParams(); + const [secondsLeft, setSecondsLeft] = useState(3); + const serverName = searchParams.get('serverName'); + + useEffect(() => { + const countdown = setInterval(() => { + setSecondsLeft((prev) => { + if (prev <= 1) { + clearInterval(countdown); + window.close(); + return 0; + } + return prev - 1; + }); + }, 1000); + + return () => clearInterval(countdown); + }, []); + + return ( +
+
+

+ {localize('com_ui_oauth_success_title') || 'Authentication Successful'} +

+

+ {localize('com_ui_oauth_success_description') || + 'Your authentication was successful. This window will close in'}{' '} + {secondsLeft}{' '} + {localize('com_ui_seconds') || 'seconds'}. +

+ {serverName && ( +

+ {localize('com_ui_oauth_connected_to') || 'Connected to'}:{' '} + {serverName} +

+ )} +
+
+ ); +} diff --git a/client/src/components/OAuth/index.ts b/client/src/components/OAuth/index.ts new file mode 100644 index 0000000000..788b92c3a9 --- /dev/null +++ b/client/src/components/OAuth/index.ts @@ -0,0 +1,2 @@ +export { default as OAuthSuccess } from './OAuthSuccess'; +export { default as OAuthError } from './OAuthError'; diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index 5791ff7713..d7171a032f 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -833,6 +833,17 @@ "com_ui_not_used": "Not Used", "com_ui_nothing_found": "Nothing found", "com_ui_oauth": "OAuth", + "com_ui_oauth_success_title": "Authentication Successful", + "com_ui_oauth_success_description": "Your authentication was successful. This window will close in", + "com_ui_oauth_connected_to": "Connected to", + "com_ui_oauth_error_title": "Authentication Failed", + "com_ui_oauth_error_missing_code": "Authorization code is missing. Please try again.", + "com_ui_oauth_error_missing_state": "State parameter is missing. Please try again.", + "com_ui_oauth_error_invalid_state": "Invalid state parameter. Please try again.", + "com_ui_oauth_error_callback_failed": "Authentication callback failed. Please try again.", + "com_ui_oauth_error_generic": "Authentication failed. Please try again.", + "com_ui_close_window": "Close Window", + "com_ui_seconds": "seconds", "com_ui_of": "of", "com_ui_off": "Off", "com_ui_on": "On", @@ -1006,7 +1017,7 @@ "com_ui_zoom": "Zoom", "com_user_message": "You", "com_warning_resubmit_unsupported": "Resubmitting the AI message is not supported for this endpoint.", - "com_ui_add_mcp": "Add MCP", + "com_ui_add_mcp": "Add MCP", "com_ui_add_mcp_server": "Add MCP Server", "com_ui_edit_mcp_server": "Edit MCP Server", "com_agents_mcps_disabled": "You need to create an agent before adding MCPs.", @@ -1028,4 +1039,4 @@ "com_agents_mcp_trust_subtext": "Custom connectors are not verified by LibreChat", "com_ui_icon": "Icon", "com_agents_mcp_icon_size": "Minimum size 128 x 128 px" -} \ No newline at end of file +} diff --git a/client/src/routes/index.tsx b/client/src/routes/index.tsx index c8bc382a42..607464ecbb 100644 --- a/client/src/routes/index.tsx +++ b/client/src/routes/index.tsx @@ -1,13 +1,14 @@ import { createBrowserRouter, Navigate, Outlet } from 'react-router-dom'; import { Login, - Registration, - RequestPasswordReset, - ResetPassword, VerifyEmail, + Registration, + ResetPassword, ApiErrorWatcher, TwoFactorScreen, + RequestPasswordReset, } from '~/components/Auth'; +import { OAuthSuccess, OAuthError } from '~/components/OAuth'; import { AuthContextProvider } from '~/hooks/AuthContext'; import RouteErrorBoundary from './RouteErrorBoundary'; import StartupLayout from './Layouts/Startup'; @@ -31,6 +32,20 @@ export const router = createBrowserRouter([ element: , errorElement: , }, + { + path: 'oauth', + errorElement: , + children: [ + { + path: 'success', + element: , + }, + { + path: 'error', + element: , + }, + ], + }, { path: '/', element: , diff --git a/package-lock.json b/package-lock.json index 50dacbb5c0..ed1ef74865 100644 --- a/package-lock.json +++ b/package-lock.json @@ -20056,15 +20056,16 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.11.2", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.11.2.tgz", - "integrity": "sha512-H9vwztj5OAqHg9GockCQC06k1natgcxWQSRpQcPJf6i5+MWBzfKkRtxGbjQf0X2ihii0ffLZCRGbYV2f2bjNCQ==", + "version": "1.12.3", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.12.3.tgz", + "integrity": "sha512-DyVYSOafBvk3/j1Oka4z5BWT8o4AFmoNyZY9pALOm7Lh3GZglR71Co4r4dEUoqDWdDazIZQHBe7J2Nwkg6gHgQ==", "license": "MIT", "peer": true, "dependencies": { + "ajv": "^6.12.6", "content-type": "^1.0.5", "cors": "^2.8.5", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", @@ -20091,6 +20092,23 @@ "node": ">= 0.6" } }, + "node_modules/@modelcontextprotocol/sdk/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "peer": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/@modelcontextprotocol/sdk/node_modules/body-parser": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", @@ -20219,6 +20237,13 @@ "node": ">=0.10.0" } }, + "node_modules/@modelcontextprotocol/sdk/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT", + "peer": true + }, "node_modules/@modelcontextprotocol/sdk/node_modules/media-typer": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", @@ -30602,8 +30627,7 @@ "node_modules/fast-json-stable-stringify": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" }, "node_modules/fast-levenshtein": { "version": "2.0.6", @@ -37599,9 +37623,9 @@ "integrity": "sha512-1orQ9MT1vHFGQxhuy7E/0gECD3fd2fCC+PIX+/jgmU/gI3EpRocXtmtvxCO5x3WZ443FLTLFWNDjl5MPJf9u+Q==" }, "node_modules/oauth4webapi": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/oauth4webapi/-/oauth4webapi-3.5.1.tgz", - "integrity": "sha512-txg/jZQwcbaF7PMJgY7aoxc9QuCxHVFMiEkDIJ60DwDz3PbtXPQnrzo+3X4IRYGChIwWLabRBRpf1k9hO9+xrQ==", + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/oauth4webapi/-/oauth4webapi-3.5.2.tgz", + "integrity": "sha512-VYz5BaP3izIrUc1GAVzIoz4JnljiW0YAUFObMBwsqDnfHxz2sjLu3W7/8vE8Ms9IbMewN9+1kcvhY3tMscAeGQ==", "license": "MIT", "funding": { "url": "https://github.com/sponsors/panva" @@ -44419,7 +44443,6 @@ "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, "license": "BSD-2-Clause", "dependencies": { "punycode": "^2.1.0" @@ -46150,7 +46173,7 @@ }, "packages/api": { "name": "@librechat/api", - "version": "1.2.3", + "version": "1.2.4", "license": "ISC", "devDependencies": { "@babel/preset-env": "^7.21.5", @@ -46184,7 +46207,7 @@ "peerDependencies": { "@librechat/agents": "^2.4.37", "@librechat/data-schemas": "*", - "@modelcontextprotocol/sdk": "^1.11.2", + "@modelcontextprotocol/sdk": "^1.12.3", "axios": "^1.8.2", "diff": "^7.0.0", "eventsource": "^3.0.2", @@ -46270,46 +46293,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "packages/auth": { - "name": "@librechat/auth", - "version": "0.0.1", - "extraneous": true, - "license": "MIT", - "dependencies": { - "https-proxy-agent": "^7.0.6", - "jsonwebtoken": "^9.0.2", - "mongoose": "^8.12.1", - "openid-client": "^6.5.0", - "passport": "^0.7.0", - "passport-facebook": "^3.0.0" - }, - "devDependencies": { - "@librechat/data-schemas": "^0.0.9", - "@rollup/plugin-alias": "^5.1.0", - "@rollup/plugin-commonjs": "^25.0.2", - "@rollup/plugin-json": "^6.1.0", - "@rollup/plugin-node-resolve": "^15.1.0", - "@rollup/plugin-replace": "^5.0.5", - "@rollup/plugin-terser": "^0.4.4", - "@rollup/plugin-typescript": "^12.1.2", - "@types/diff": "^6.0.0", - "@types/express": "^5.0.0", - "@types/jest": "^29.5.2", - "@types/node": "^20.3.0", - "jest": "^29.5.0", - "jest-junit": "^16.0.0", - "rimraf": "^5.0.1", - "rollup": "^4.22.4", - "rollup-plugin-generate-package-json": "^3.2.0", - "rollup-plugin-peer-deps-external": "^2.2.4", - "rollup-plugin-typescript2": "^0.35.0", - "ts-node": "^10.9.2", - "typescript": "^5.0.4" - }, - "peerDependencies": { - "keyv": "^5.3.2" - } - }, "packages/data-provider": { "name": "librechat-data-provider", "version": "0.7.87", diff --git a/package.json b/package.json index cb33ac9164..251247f4b9 100644 --- a/package.json +++ b/package.json @@ -64,6 +64,7 @@ "b:data": "cd packages/data-provider && bun run b:build", "b:mcp": "cd packages/api && bun run b:build", "b:data-schemas": "cd packages/data-schemas && bun run b:build", + "b:build:api": "cd packages/api && bun run b:build", "b:client": "bun --bun run b:data && bun --bun run b:mcp && bun --bun run b:data-schemas && cd client && bun --bun run b:build", "b:client:dev": "cd client && bun run b:dev", "b:test:client": "cd client && bun run b:test", diff --git a/packages/api/package.json b/packages/api/package.json index 61f4cda757..1c2ab534e3 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -1,6 +1,6 @@ { "name": "@librechat/api", - "version": "1.2.3", + "version": "1.2.4", "type": "commonjs", "description": "MCP services for LibreChat", "main": "dist/index.js", @@ -71,7 +71,7 @@ "peerDependencies": { "@librechat/agents": "^2.4.37", "@librechat/data-schemas": "*", - "@modelcontextprotocol/sdk": "^1.11.2", + "@modelcontextprotocol/sdk": "^1.12.3", "axios": "^1.8.2", "diff": "^7.0.0", "eventsource": "^3.0.2", diff --git a/packages/api/rollup.config.js b/packages/api/rollup.config.js index 0a56ea3d91..13d805874e 100644 --- a/packages/api/rollup.config.js +++ b/packages/api/rollup.config.js @@ -36,10 +36,11 @@ const plugins = [ const cjsBuild = { input: 'src/index.ts', output: { - file: pkg.main, + dir: 'dist', format: 'cjs', sourcemap: true, exports: 'named', + entryFileNames: '[name].js', }, external: [...Object.keys(pkg.dependencies || {}), ...Object.keys(pkg.devDependencies || {})], preserveSymlinks: true, diff --git a/api/server/utils/crypto.js b/packages/api/src/crypto/encryption.ts similarity index 81% rename from api/server/utils/crypto.js rename to packages/api/src/crypto/encryption.ts index 2f176fedee..aedf3c9c92 100644 --- a/api/server/utils/crypto.js +++ b/packages/api/src/crypto/encryption.ts @@ -1,15 +1,15 @@ -require('dotenv').config(); -const crypto = require('node:crypto'); +import 'dotenv/config'; +import crypto from 'node:crypto'; const { webcrypto } = crypto; // Use hex decoding for both key and IV for legacy methods. -const key = Buffer.from(process.env.CREDS_KEY, 'hex'); -const iv = Buffer.from(process.env.CREDS_IV, 'hex'); +const key = Buffer.from(process.env.CREDS_KEY ?? '', 'hex'); +const iv = Buffer.from(process.env.CREDS_IV ?? '', 'hex'); const algorithm = 'AES-CBC'; // --- Legacy v1/v2 Setup: AES-CBC with fixed key and IV --- -async function encrypt(value) { +export async function encrypt(value: string) { const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [ 'encrypt', ]); @@ -23,7 +23,7 @@ async function encrypt(value) { return Buffer.from(encryptedBuffer).toString('hex'); } -async function decrypt(encryptedValue) { +export async function decrypt(encryptedValue: string) { const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [ 'decrypt', ]); @@ -39,7 +39,7 @@ async function decrypt(encryptedValue) { // --- v2: AES-CBC with a random IV per encryption --- -async function encryptV2(value) { +export async function encryptV2(value: string) { const gen_iv = webcrypto.getRandomValues(new Uint8Array(16)); const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [ 'encrypt', @@ -54,12 +54,12 @@ async function encryptV2(value) { return Buffer.from(gen_iv).toString('hex') + ':' + Buffer.from(encryptedBuffer).toString('hex'); } -async function decryptV2(encryptedValue) { +export async function decryptV2(encryptedValue: string) { const parts = encryptedValue.split(':'); if (parts.length === 1) { return parts[0]; } - const gen_iv = Buffer.from(parts.shift(), 'hex'); + const gen_iv = Buffer.from(parts.shift() ?? '', 'hex'); const encrypted = parts.join(':'); const cryptoKey = await webcrypto.subtle.importKey('raw', key, { name: algorithm }, false, [ 'decrypt', @@ -81,10 +81,10 @@ const algorithm_v3 = 'aes-256-ctr'; * Encrypts a value using AES-256-CTR. * Note: AES-256 requires a 32-byte key. Ensure that process.env.CREDS_KEY is a 64-character hex string. * - * @param {string} value - The plaintext to encrypt. - * @returns {string} The encrypted string with a "v3:" prefix. + * @param value - The plaintext to encrypt. + * @returns The encrypted string with a "v3:" prefix. */ -function encryptV3(value) { +export function encryptV3(value: string) { if (key.length !== 32) { throw new Error(`Invalid key length: expected 32 bytes, got ${key.length} bytes`); } @@ -94,7 +94,7 @@ function encryptV3(value) { return `v3:${iv_v3.toString('hex')}:${encrypted.toString('hex')}`; } -function decryptV3(encryptedValue) { +export function decryptV3(encryptedValue: string) { const parts = encryptedValue.split(':'); if (parts[0] !== 'v3') { throw new Error('Not a v3 encrypted value'); @@ -106,7 +106,7 @@ function decryptV3(encryptedValue) { return decrypted.toString('utf8'); } -async function getRandomValues(length) { +export async function getRandomValues(length: number) { if (!Number.isInteger(length) || length <= 0) { throw new Error('Length must be a positive integer'); } @@ -117,24 +117,13 @@ async function getRandomValues(length) { /** * Computes SHA-256 hash for the given input. - * @param {string} input - * @returns {Promise} + * @param input - The input to hash. + * @returns The SHA-256 hash of the input. */ -async function hashBackupCode(input) { +export async function hashBackupCode(input: string) { const encoder = new TextEncoder(); const data = encoder.encode(input); const hashBuffer = await webcrypto.subtle.digest('SHA-256', data); const hashArray = Array.from(new Uint8Array(hashBuffer)); return hashArray.map((b) => b.toString(16).padStart(2, '0')).join(''); } - -module.exports = { - encrypt, - decrypt, - encryptV2, - decryptV2, - encryptV3, - decryptV3, - hashBackupCode, - getRandomValues, -}; diff --git a/packages/api/src/crypto/index.ts b/packages/api/src/crypto/index.ts new file mode 100644 index 0000000000..73ebae8b16 --- /dev/null +++ b/packages/api/src/crypto/index.ts @@ -0,0 +1 @@ +export * from './encryption'; diff --git a/packages/api/src/flow/manager.spec.ts b/packages/api/src/flow/manager.spec.ts index 1f1714509b..af6420ffc4 100644 --- a/packages/api/src/flow/manager.spec.ts +++ b/packages/api/src/flow/manager.spec.ts @@ -1,8 +1,8 @@ -import { FlowStateManager } from './manager'; import { Keyv } from 'keyv'; +import { FlowStateManager } from './manager'; import type { FlowState } from './types'; -// Create a mock class without extending Keyv +/** Mock class without extending Keyv */ class MockKeyv { private store: Map>; diff --git a/packages/api/src/flow/manager.ts b/packages/api/src/flow/manager.ts index 6a421b8d18..f27dfbb64b 100644 --- a/packages/api/src/flow/manager.ts +++ b/packages/api/src/flow/manager.ts @@ -1,28 +1,18 @@ import { Keyv } from 'keyv'; +import { logger } from '@librechat/data-schemas'; import type { StoredDataNoRaw } from 'keyv'; -import type { Logger } from 'winston'; import type { FlowState, FlowMetadata, FlowManagerOptions } from './types'; export class FlowStateManager { private keyv: Keyv; private ttl: number; - private logger: Logger; private intervals: Set; - private static getDefaultLogger(): Logger { - return { - error: console.error, - warn: console.warn, - info: console.info, - debug: console.debug, - } as Logger; - } - constructor(store: Keyv, options?: FlowManagerOptions) { if (!options) { options = { ttl: 60000 * 3 }; } - const { ci = false, ttl, logger } = options; + const { ci = false, ttl } = options; if (!ci && !(store instanceof Keyv)) { throw new Error('Invalid store provided to FlowStateManager'); @@ -30,14 +20,13 @@ export class FlowStateManager { this.ttl = ttl; this.keyv = store; - this.logger = logger || FlowStateManager.getDefaultLogger(); this.intervals = new Set(); this.setupCleanupHandlers(); } private setupCleanupHandlers() { const cleanup = () => { - this.logger.info('Cleaning up FlowStateManager intervals...'); + logger.info('Cleaning up FlowStateManager intervals...'); this.intervals.forEach((interval) => clearInterval(interval)); this.intervals.clear(); process.exit(0); @@ -66,7 +55,7 @@ export class FlowStateManager { let existingState = (await this.keyv.get(flowKey)) as FlowState | undefined; if (existingState) { - this.logger.debug(`[${flowKey}] Flow already exists`); + logger.debug(`[${flowKey}] Flow already exists`); return this.monitorFlow(flowKey, type, signal); } @@ -74,7 +63,7 @@ export class FlowStateManager { existingState = (await this.keyv.get(flowKey)) as FlowState | undefined; if (existingState) { - this.logger.debug(`[${flowKey}] Flow exists on 2nd check`); + logger.debug(`[${flowKey}] Flow exists on 2nd check`); return this.monitorFlow(flowKey, type, signal); } @@ -85,7 +74,7 @@ export class FlowStateManager { createdAt: Date.now(), }; - this.logger.debug('Creating initial flow state:', flowKey); + logger.debug('Creating initial flow state:', flowKey); await this.keyv.set(flowKey, initialState, this.ttl); return this.monitorFlow(flowKey, type, signal); } @@ -102,7 +91,7 @@ export class FlowStateManager { if (!flowState) { clearInterval(intervalId); this.intervals.delete(intervalId); - this.logger.error(`[${flowKey}] Flow state not found`); + logger.error(`[${flowKey}] Flow state not found`); reject(new Error(`${type} Flow state not found`)); return; } @@ -110,7 +99,7 @@ export class FlowStateManager { if (signal?.aborted) { clearInterval(intervalId); this.intervals.delete(intervalId); - this.logger.warn(`[${flowKey}] Flow aborted`); + logger.warn(`[${flowKey}] Flow aborted`); const message = `${type} flow aborted`; await this.keyv.delete(flowKey); reject(new Error(message)); @@ -120,7 +109,7 @@ export class FlowStateManager { if (flowState.status !== 'PENDING') { clearInterval(intervalId); this.intervals.delete(intervalId); - this.logger.debug(`[${flowKey}] Flow completed`); + logger.debug(`[${flowKey}] Flow completed`); if (flowState.status === 'COMPLETED' && flowState.result !== undefined) { resolve(flowState.result); @@ -135,17 +124,15 @@ export class FlowStateManager { if (elapsedTime >= this.ttl) { clearInterval(intervalId); this.intervals.delete(intervalId); - this.logger.error( + logger.error( `[${flowKey}] Flow timed out | Elapsed time: ${elapsedTime} | TTL: ${this.ttl}`, ); await this.keyv.delete(flowKey); reject(new Error(`${type} flow timed out`)); } - this.logger.debug( - `[${flowKey}] Flow state elapsed time: ${elapsedTime}, checking again...`, - ); + logger.debug(`[${flowKey}] Flow state elapsed time: ${elapsedTime}, checking again...`); } catch (error) { - this.logger.error(`[${flowKey}] Error checking flow state:`, error); + logger.error(`[${flowKey}] Error checking flow state:`, error); clearInterval(intervalId); this.intervals.delete(intervalId); reject(error); @@ -224,7 +211,7 @@ export class FlowStateManager { const flowKey = this.getFlowKey(flowId, type); let existingState = (await this.keyv.get(flowKey)) as FlowState | undefined; if (existingState) { - this.logger.debug(`[${flowKey}] Flow already exists`); + logger.debug(`[${flowKey}] Flow already exists`); return this.monitorFlow(flowKey, type, signal); } @@ -232,7 +219,7 @@ export class FlowStateManager { existingState = (await this.keyv.get(flowKey)) as FlowState | undefined; if (existingState) { - this.logger.debug(`[${flowKey}] Flow exists on 2nd check`); + logger.debug(`[${flowKey}] Flow exists on 2nd check`); return this.monitorFlow(flowKey, type, signal); } @@ -242,7 +229,7 @@ export class FlowStateManager { metadata: {}, createdAt: Date.now(), }; - this.logger.debug(`[${flowKey}] Creating initial flow state`); + logger.debug(`[${flowKey}] Creating initial flow state`); await this.keyv.set(flowKey, initialState, this.ttl); try { diff --git a/packages/api/src/index.ts b/packages/api/src/index.ts index 9e3db4ef6b..b7859e7ca4 100644 --- a/packages/api/src/index.ts +++ b/packages/api/src/index.ts @@ -1,8 +1,13 @@ /* MCP */ export * from './mcp/manager'; +export * from './mcp/oauth'; /* Utilities */ export * from './mcp/utils'; export * from './utils'; +/* OAuth */ +export * from './oauth'; +/* Crypto */ +export * from './crypto'; /* Flow */ export * from './flow/manager'; /* Agents */ diff --git a/packages/api/src/mcp/connection.ts b/packages/api/src/mcp/connection.ts index 81a6831a70..38ca2b23d6 100644 --- a/packages/api/src/mcp/connection.ts +++ b/packages/api/src/mcp/connection.ts @@ -1,4 +1,5 @@ import { EventEmitter } from 'events'; +import { logger } from '@librechat/data-schemas'; import { Client } from '@modelcontextprotocol/sdk/client/index.js'; import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js'; import { @@ -10,7 +11,7 @@ import { ResourceListChangedNotificationSchema } from '@modelcontextprotocol/sdk import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js'; import type { Transport } from '@modelcontextprotocol/sdk/shared/transport.js'; import type { JSONRPCMessage } from '@modelcontextprotocol/sdk/types.js'; -import type { Logger } from 'winston'; +import type { MCPOAuthTokens } from './oauth/types'; import type * as t from './types'; function isStdioOptions(options: t.MCPOptions): options is t.StdioOptions { @@ -67,24 +68,29 @@ export class MCPConnection extends EventEmitter { private isReconnecting = false; private isInitializing = false; private reconnectAttempts = 0; - iconPath?: string; - timeout?: number; private readonly userId?: string; private lastPingTime: number; + private oauthTokens?: MCPOAuthTokens | null; + private oauthRequired = false; + iconPath?: string; + timeout?: number; + url?: string; constructor( serverName: string, private readonly options: t.MCPOptions, - private logger?: Logger, userId?: string, + oauthTokens?: MCPOAuthTokens | null, ) { super(); this.serverName = serverName; - this.logger = logger; this.userId = userId; this.iconPath = options.iconPath; this.timeout = options.timeout; this.lastPingTime = Date.now(); + if (oauthTokens) { + this.oauthTokens = oauthTokens; + } this.client = new Client( { name: '@librechat/api-client', @@ -107,11 +113,10 @@ export class MCPConnection extends EventEmitter { public static getInstance( serverName: string, options: t.MCPOptions, - logger?: Logger, userId?: string, ): MCPConnection { if (!MCPConnection.instance) { - MCPConnection.instance = new MCPConnection(serverName, options, logger, userId); + MCPConnection.instance = new MCPConnection(serverName, options, userId); } return MCPConnection.instance; } @@ -129,7 +134,7 @@ export class MCPConnection extends EventEmitter { private emitError(error: unknown, errorContext: string): void { const errorMessage = error instanceof Error ? error.message : String(error); - this.logger?.error(`${this.getLogPrefix()} ${errorContext}: ${errorMessage}`); + logger.error(`${this.getLogPrefix()} ${errorContext}: ${errorMessage}`); this.emit('error', new Error(`${errorContext}: ${errorMessage}`)); } @@ -167,45 +172,52 @@ export class MCPConnection extends EventEmitter { if (!isWebSocketOptions(options)) { throw new Error('Invalid options for websocket transport.'); } + this.url = options.url; return new WebSocketClientTransport(new URL(options.url)); case 'sse': { if (!isSSEOptions(options)) { throw new Error('Invalid options for sse transport.'); } + this.url = options.url; const url = new URL(options.url); - this.logger?.info(`${this.getLogPrefix()} Creating SSE transport: ${url.toString()}`); + logger.info(`${this.getLogPrefix()} Creating SSE transport: ${url.toString()}`); const abortController = new AbortController(); + + /** Add OAuth token to headers if available */ + const headers = { ...options.headers }; + if (this.oauthTokens?.access_token) { + headers['Authorization'] = `Bearer ${this.oauthTokens.access_token}`; + } + const transport = new SSEClientTransport(url, { requestInit: { - headers: options.headers, + headers, signal: abortController.signal, }, eventSourceInit: { fetch: (url, init) => { - const headers = new Headers(Object.assign({}, init?.headers, options.headers)); + const fetchHeaders = new Headers(Object.assign({}, init?.headers, headers)); return fetch(url, { ...init, - headers, + headers: fetchHeaders, }); }, }, }); transport.onclose = () => { - this.logger?.info(`${this.getLogPrefix()} SSE transport closed`); + logger.info(`${this.getLogPrefix()} SSE transport closed`); this.emit('connectionChange', 'disconnected'); }; transport.onerror = (error) => { - this.logger?.error(`${this.getLogPrefix()} SSE transport error:`, error); + logger.error(`${this.getLogPrefix()} SSE transport error:`, error); this.emitError(error, 'SSE transport error:'); }; transport.onmessage = (message) => { - this.logger?.info( - `${this.getLogPrefix()} Message received: ${JSON.stringify(message)}`, - ); + logger.info(`${this.getLogPrefix()} Message received: ${JSON.stringify(message)}`); }; this.setupTransportErrorHandlers(transport); @@ -216,33 +228,38 @@ export class MCPConnection extends EventEmitter { if (!isStreamableHTTPOptions(options)) { throw new Error('Invalid options for streamable-http transport.'); } + this.url = options.url; const url = new URL(options.url); - this.logger?.info( + logger.info( `${this.getLogPrefix()} Creating streamable-http transport: ${url.toString()}`, ); const abortController = new AbortController(); + // Add OAuth token to headers if available + const headers = { ...options.headers }; + if (this.oauthTokens?.access_token) { + headers['Authorization'] = `Bearer ${this.oauthTokens.access_token}`; + } + const transport = new StreamableHTTPClientTransport(url, { requestInit: { - headers: options.headers, + headers, signal: abortController.signal, }, }); transport.onclose = () => { - this.logger?.info(`${this.getLogPrefix()} Streamable-http transport closed`); + logger.info(`${this.getLogPrefix()} Streamable-http transport closed`); this.emit('connectionChange', 'disconnected'); }; transport.onerror = (error: Error | unknown) => { - this.logger?.error(`${this.getLogPrefix()} Streamable-http transport error:`, error); + logger.error(`${this.getLogPrefix()} Streamable-http transport error:`, error); this.emitError(error, 'Streamable-http transport error:'); }; transport.onmessage = (message: JSONRPCMessage) => { - this.logger?.info( - `${this.getLogPrefix()} Message received: ${JSON.stringify(message)}`, - ); + logger.info(`${this.getLogPrefix()} Message received: ${JSON.stringify(message)}`); }; this.setupTransportErrorHandlers(transport); @@ -271,17 +288,17 @@ export class MCPConnection extends EventEmitter { /** * // FOR DEBUGGING * // this.client.setRequestHandler(PingRequestSchema, async (request, extra) => { - * // this.logger?.info(`[MCP][${this.serverName}] PingRequest: ${JSON.stringify(request)}`); + * // logger.info(`[MCP][${this.serverName}] PingRequest: ${JSON.stringify(request)}`); * // if (getEventListeners && extra.signal) { * // const listenerCount = getEventListeners(extra.signal, 'abort').length; - * // this.logger?.debug(`Signal has ${listenerCount} abort listeners`); + * // logger.debug(`Signal has ${listenerCount} abort listeners`); * // } * // return {}; * // }); */ } else if (state === 'error' && !this.isReconnecting && !this.isInitializing) { this.handleReconnection().catch((error) => { - this.logger?.error(`${this.getLogPrefix()} Reconnection handler failed:`, error); + logger.error(`${this.getLogPrefix()} Reconnection handler failed:`, error); }); } }); @@ -290,7 +307,15 @@ export class MCPConnection extends EventEmitter { } private async handleReconnection(): Promise { - if (this.isReconnecting || this.shouldStopReconnecting || this.isInitializing) { + if ( + this.isReconnecting || + this.shouldStopReconnecting || + this.isInitializing || + this.oauthRequired + ) { + if (this.oauthRequired) { + logger.info(`${this.getLogPrefix()} OAuth required, skipping reconnection attempts`); + } return; } @@ -305,7 +330,7 @@ export class MCPConnection extends EventEmitter { this.reconnectAttempts++; const delay = backoffDelay(this.reconnectAttempts); - this.logger?.info( + logger.info( `${this.getLogPrefix()} Reconnecting ${this.reconnectAttempts}/${this.MAX_RECONNECT_ATTEMPTS} (delay: ${delay}ms)`, ); @@ -316,13 +341,13 @@ export class MCPConnection extends EventEmitter { this.reconnectAttempts = 0; return; } catch (error) { - this.logger?.error(`${this.getLogPrefix()} Reconnection attempt failed:`, error); + logger.error(`${this.getLogPrefix()} Reconnection attempt failed:`, error); if ( this.reconnectAttempts === this.MAX_RECONNECT_ATTEMPTS || (this.shouldStopReconnecting as boolean) ) { - this.logger?.error(`${this.getLogPrefix()} Stopping reconnection attempts`); + logger.error(`${this.getLogPrefix()} Stopping reconnection attempts`); return; } } @@ -366,18 +391,21 @@ export class MCPConnection extends EventEmitter { await this.client.close(); this.transport = null; } catch (error) { - this.logger?.warn(`${this.getLogPrefix()} Error closing connection:`, error); + logger.warn(`${this.getLogPrefix()} Error closing connection:`, error); } } this.transport = this.constructTransport(this.options); this.setupTransportDebugHandlers(); - const connectTimeout = this.options.initTimeout ?? 10000; + const connectTimeout = this.options.initTimeout ?? 120000; await Promise.race([ this.client.connect(this.transport), new Promise((_resolve, reject) => - setTimeout(() => reject(new Error('Connection timeout')), connectTimeout), + setTimeout( + () => reject(new Error(`Connection timeout after ${connectTimeout}ms`)), + connectTimeout, + ), ), ]); @@ -385,9 +413,85 @@ export class MCPConnection extends EventEmitter { this.emit('connectionChange', 'connected'); this.reconnectAttempts = 0; } catch (error) { + // Check if it's an OAuth authentication error + if (this.isOAuthError(error)) { + logger.warn(`${this.getLogPrefix()} OAuth authentication required`); + this.oauthRequired = true; + const serverUrl = this.url; + logger.debug(`${this.getLogPrefix()} Server URL for OAuth: ${serverUrl}`); + + const oauthTimeout = this.options.initTimeout ?? 60000; + /** Promise that will resolve when OAuth is handled */ + const oauthHandledPromise = new Promise((resolve, reject) => { + let timeoutId: NodeJS.Timeout | null = null; + let oauthHandledListener: (() => void) | null = null; + let oauthFailedListener: ((error: Error) => void) | null = null; + + /** Cleanup function to remove listeners and clear timeout */ + const cleanup = () => { + if (timeoutId) { + clearTimeout(timeoutId); + } + if (oauthHandledListener) { + this.off('oauthHandled', oauthHandledListener); + } + if (oauthFailedListener) { + this.off('oauthFailed', oauthFailedListener); + } + }; + + // Success handler + oauthHandledListener = () => { + cleanup(); + resolve(); + }; + + // Failure handler + oauthFailedListener = (error: Error) => { + cleanup(); + reject(error); + }; + + // Timeout handler + timeoutId = setTimeout(() => { + cleanup(); + reject(new Error(`OAuth handling timeout after ${oauthTimeout}ms`)); + }, oauthTimeout); + + // Listen for both success and failure events + this.once('oauthHandled', oauthHandledListener); + this.once('oauthFailed', oauthFailedListener); + }); + + // Emit the event + this.emit('oauthRequired', { + serverName: this.serverName, + error, + serverUrl, + userId: this.userId, + }); + + try { + // Wait for OAuth to be handled + await oauthHandledPromise; + // Reset the oauthRequired flag + this.oauthRequired = false; + // Don't throw the error - just return so connection can be retried + logger.info( + `${this.getLogPrefix()} OAuth handled successfully, connection will be retried`, + ); + return; + } catch (oauthError) { + // OAuth failed or timed out + this.oauthRequired = false; + logger.error(`${this.getLogPrefix()} OAuth handling failed:`, oauthError); + // Re-throw the original authentication error + throw error; + } + } + this.connectionState = 'error'; this.emit('connectionChange', 'error'); - this.lastError = error instanceof Error ? error : new Error(String(error)); throw error; } finally { this.connectPromise = null; @@ -403,7 +507,7 @@ export class MCPConnection extends EventEmitter { } this.transport.onmessage = (msg) => { - this.logger?.debug(`${this.getLogPrefix()} Transport received: ${JSON.stringify(msg)}`); + logger.debug(`${this.getLogPrefix()} Transport received: ${JSON.stringify(msg)}`); }; const originalSend = this.transport.send.bind(this.transport); @@ -414,7 +518,7 @@ export class MCPConnection extends EventEmitter { } this.lastPingTime = Date.now(); } - this.logger?.debug(`${this.getLogPrefix()} Transport sending: ${JSON.stringify(msg)}`); + logger.debug(`${this.getLogPrefix()} Transport sending: ${JSON.stringify(msg)}`); return originalSend(msg); }; } @@ -427,14 +531,24 @@ export class MCPConnection extends EventEmitter { throw new Error('Connection not established'); } } catch (error) { - this.logger?.error(`${this.getLogPrefix()} Connection failed:`, error); + logger.error(`${this.getLogPrefix()} Connection failed:`, error); throw error; } } private setupTransportErrorHandlers(transport: Transport): void { transport.onerror = (error) => { - this.logger?.error(`${this.getLogPrefix()} Transport error:`, error); + logger.error(`${this.getLogPrefix()} Transport error:`, error); + + // Check if it's an OAuth authentication error + if (error && typeof error === 'object' && 'code' in error) { + const errorCode = (error as unknown as { code?: number }).code; + if (errorCode === 401 || errorCode === 403) { + logger.warn(`${this.getLogPrefix()} OAuth authentication error detected`); + this.emit('oauthError', error); + } + } + this.emit('connectionChange', 'error'); }; } @@ -562,22 +676,36 @@ export class MCPConnection extends EventEmitter { // } // } - // Public getters for state information - public getConnectionState(): t.ConnectionState { - return this.connectionState; - } - public async isConnected(): Promise { try { await this.client.ping(); return this.connectionState === 'connected'; } catch (error) { - this.logger?.error(`${this.getLogPrefix()} Ping failed:`, error); + logger.error(`${this.getLogPrefix()} Ping failed:`, error); return false; } } - public getLastError(): Error | null { - return this.lastError; + public setOAuthTokens(tokens: MCPOAuthTokens): void { + this.oauthTokens = tokens; + } + + private isOAuthError(error: unknown): boolean { + if (!error || typeof error !== 'object') { + return false; + } + + // Check for SSE error with 401 status + if ('message' in error && typeof error.message === 'string') { + return error.message.includes('401') || error.message.includes('Non-200 status code (401)'); + } + + // Check for error code + if ('code' in error) { + const code = (error as { code?: number }).code; + return code === 401 || code === 403; + } + + return false; } } diff --git a/packages/api/src/mcp/enum.ts b/packages/api/src/mcp/enum.ts index 995ddfb523..806e5c0054 100644 --- a/packages/api/src/mcp/enum.ts +++ b/packages/api/src/mcp/enum.ts @@ -1,3 +1,9 @@ export enum CONSTANTS { mcp_delimiter = '_mcp_', + /** System user ID for app-level OAuth tokens (all zeros ObjectId) */ + SYSTEM_USER_ID = '000000000000000000000000', +} + +export function isSystemUserId(userId?: string): boolean { + return userId === CONSTANTS.SYSTEM_USER_ID; } diff --git a/packages/api/src/mcp/manager.ts b/packages/api/src/mcp/manager.ts index af8f1632a7..0a60784457 100644 --- a/packages/api/src/mcp/manager.ts +++ b/packages/api/src/mcp/manager.ts @@ -1,11 +1,18 @@ +import { logger } from '@librechat/data-schemas'; import { CallToolResultSchema, ErrorCode, McpError } from '@modelcontextprotocol/sdk/types.js'; import type { RequestOptions } from '@modelcontextprotocol/sdk/shared/protocol.js'; +import type { OAuthClientInformation } from '@modelcontextprotocol/sdk/shared/auth.js'; import type { JsonSchemaType, MCPOptions, TUser } from 'librechat-data-provider'; -import type { Logger } from 'winston'; +import type { TokenMethods } from '@librechat/data-schemas'; +import type { FlowStateManager } from '~/flow/manager'; +import type { MCPOAuthTokens, MCPOAuthFlowMetadata } from './oauth/types'; +import type { FlowMetadata } from '~/flow/types'; import type * as t from './types'; +import { CONSTANTS, isSystemUserId } from './enum'; +import { MCPOAuthHandler } from './oauth/handler'; +import { MCPTokenStorage } from './oauth/tokens'; import { formatToolContent } from './parsers'; import { MCPConnection } from './connection'; -import { CONSTANTS } from './enum'; export interface CallToolOptions extends RequestOptions { user?: TUser; @@ -24,24 +31,10 @@ export class MCPManager { private processMCPEnv?: (obj: MCPOptions, user?: TUser) => MCPOptions; // Store the processing function /** Store MCP server instructions */ private serverInstructions: Map = new Map(); - private logger: Logger; - private static getDefaultLogger(): Logger { - return { - error: console.error, - warn: console.warn, - info: console.info, - debug: console.debug, - } as Logger; - } - - private constructor(logger?: Logger) { - this.logger = logger || MCPManager.getDefaultLogger(); - } - - public static getInstance(logger?: Logger): MCPManager { + public static getInstance(): MCPManager { if (!MCPManager.instance) { - MCPManager.instance = new MCPManager(logger); + MCPManager.instance = new MCPManager(); } // Check for idle connections when getInstance is called MCPManager.instance.checkIdleConnections(); @@ -49,79 +42,188 @@ export class MCPManager { } /** Stores configs and initializes app-level connections */ - public async initializeMCP( - mcpServers: t.MCPServers, - processMCPEnv?: (obj: MCPOptions) => MCPOptions, - ): Promise { - this.logger.info('[MCP] Initializing app-level servers'); + public async initializeMCP({ + mcpServers, + flowManager, + tokenMethods, + processMCPEnv, + }: { + mcpServers: t.MCPServers; + flowManager: FlowStateManager; + tokenMethods?: TokenMethods; + processMCPEnv?: (obj: MCPOptions) => MCPOptions; + }): Promise { this.processMCPEnv = processMCPEnv; // Store the function this.mcpConfigs = mcpServers; + if (!flowManager) { + logger.info('[MCP] No flow manager provided, OAuth will not be available'); + } + + if (!tokenMethods) { + logger.info('[MCP] No token methods provided, token persistence will not be available'); + } + const entries = Object.entries(mcpServers); const initializedServers = new Set(); const connectionResults = await Promise.allSettled( entries.map(async ([serverName, _config], i) => { /** Process env for app-level connections */ const config = this.processMCPEnv ? this.processMCPEnv(_config) : _config; - const connection = new MCPConnection(serverName, config, this.logger); + + /** Existing tokens for system-level connections */ + let tokens: MCPOAuthTokens | null = null; + if (tokenMethods?.findToken) { + try { + /** Refresh function for app-level connections */ + const refreshTokensFunction = async ( + refreshToken: string, + metadata: { + userId: string; + serverName: string; + identifier: string; + clientInfo?: OAuthClientInformation; + }, + ) => { + /** URL from config if available */ + const serverUrl = (config as t.SSEOptions | t.StreamableHTTPOptions).url; + return await MCPOAuthHandler.refreshOAuthTokens( + refreshToken, + { + serverName: metadata.serverName, + serverUrl, + clientInfo: metadata.clientInfo, + }, + config.oauth, + ); + }; + + /** Flow state to prevent concurrent token operations */ + const tokenFlowId = `tokens:${CONSTANTS.SYSTEM_USER_ID}:${serverName}`; + tokens = await flowManager.createFlowWithHandler( + tokenFlowId, + 'mcp_get_tokens', + async () => { + return await MCPTokenStorage.getTokens({ + userId: CONSTANTS.SYSTEM_USER_ID, + serverName, + findToken: tokenMethods.findToken, + refreshTokens: refreshTokensFunction, + createToken: tokenMethods.createToken, + updateToken: tokenMethods.updateToken, + }); + }, + ); + } catch { + logger.debug(`[MCP][${serverName}] No existing tokens found`); + } + } + + if (tokens) { + logger.info(`[MCP][${serverName}] Loaded OAuth tokens`); + } + + const connection = new MCPConnection(serverName, config, undefined, tokens); + + /** Listen for OAuth requirements */ + logger.info(`[MCP][${serverName}] Setting up OAuth event listener`); + connection.on('oauthRequired', async (data) => { + logger.debug(`[MCP][${serverName}] oauthRequired event received`); + const result = await this.handleOAuthRequired({ + ...data, + flowManager, + }); + + if (result?.tokens && tokenMethods?.createToken) { + try { + connection.setOAuthTokens(result.tokens); + await MCPTokenStorage.storeTokens({ + userId: CONSTANTS.SYSTEM_USER_ID, + serverName, + tokens: result.tokens, + createToken: tokenMethods.createToken, + updateToken: tokenMethods.updateToken, + findToken: tokenMethods.findToken, + clientInfo: result.clientInfo, + }); + logger.info(`[MCP][${serverName}] OAuth tokens saved to storage`); + } catch (error) { + logger.error(`[MCP][${serverName}] Failed to save OAuth tokens to storage`, error); + } + } + + // Only emit oauthHandled if we actually got tokens (OAuth succeeded) + if (result?.tokens) { + connection.emit('oauthHandled'); + } else { + // OAuth failed, emit oauthFailed to properly reject the promise + logger.warn(`[MCP][${serverName}] OAuth failed, emitting oauthFailed event`); + connection.emit('oauthFailed', new Error('OAuth authentication failed')); + } + }); try { + const connectTimeout = config.initTimeout ?? 30000; const connectionTimeout = new Promise((_, reject) => - setTimeout(() => reject(new Error('Connection timeout')), 30000), + setTimeout( + () => reject(new Error(`Connection timeout after ${connectTimeout}ms`)), + connectTimeout, + ), ); - const connectionAttempt = this.initializeServer(connection, `[MCP][${serverName}]`); + const connectionAttempt = this.initializeServer({ + connection, + logPrefix: `[MCP][${serverName}]`, + flowManager, + handleOAuth: false, + }); await Promise.race([connectionAttempt, connectionTimeout]); if (await connection.isConnected()) { initializedServers.add(i); - this.connections.set(serverName, connection); // Store in app-level map + this.connections.set(serverName, connection); - // Handle unified serverInstructions configuration + /** Unified `serverInstructions` configuration */ const configInstructions = config.serverInstructions; if (configInstructions !== undefined) { if (typeof configInstructions === 'string') { - // Custom instructions provided this.serverInstructions.set(serverName, configInstructions); - this.logger.info( + logger.info( `[MCP][${serverName}] Custom instructions stored for context inclusion: ${configInstructions}`, ); } else if (configInstructions === true) { - // Use server-provided instructions + /** Server-provided instructions */ const serverInstructions = connection.client.getInstructions(); if (serverInstructions) { this.serverInstructions.set(serverName, serverInstructions); - this.logger.info( + logger.info( `[MCP][${serverName}] Server instructions stored for context inclusion: ${serverInstructions}`, ); } else { - this.logger.info( + logger.info( `[MCP][${serverName}] serverInstructions=true but no server instructions available`, ); } } else { - // configInstructions is false - explicitly disabled - this.logger.info( + logger.info( `[MCP][${serverName}] Instructions explicitly disabled (serverInstructions=false)`, ); } } else { - this.logger.info( + logger.info( `[MCP][${serverName}] Instructions not included (serverInstructions not configured)`, ); } const serverCapabilities = connection.client.getServerCapabilities(); - this.logger.info( - `[MCP][${serverName}] Capabilities: ${JSON.stringify(serverCapabilities)}`, - ); + logger.info(`[MCP][${serverName}] Capabilities: ${JSON.stringify(serverCapabilities)}`); if (serverCapabilities?.tools) { const tools = await connection.client.listTools(); if (tools.tools.length) { - this.logger.info( + logger.info( `[MCP][${serverName}] Available tools: ${tools.tools .map((tool) => tool.name) .join(', ')}`, @@ -130,7 +232,7 @@ export class MCPManager { } } } catch (error) { - this.logger.error(`[MCP][${serverName}] Initialization failed`, error); + logger.error(`[MCP][${serverName}] Initialization failed`, error); throw error; } }), @@ -140,35 +242,47 @@ export class MCPManager { (result): result is PromiseRejectedResult => result.status === 'rejected', ); - this.logger.info( + logger.info( `[MCP] Initialized ${initializedServers.size}/${entries.length} app-level server(s)`, ); if (failedConnections.length > 0) { - this.logger.warn( + logger.warn( `[MCP] ${failedConnections.length}/${entries.length} app-level server(s) failed to initialize`, ); } entries.forEach(([serverName], index) => { if (initializedServers.has(index)) { - this.logger.info(`[MCP][${serverName}] ✓ Initialized`); + logger.info(`[MCP][${serverName}] ✓ Initialized`); } else { - this.logger.info(`[MCP][${serverName}] ✗ Failed`); + logger.info(`[MCP][${serverName}] ✗ Failed`); } }); if (initializedServers.size === entries.length) { - this.logger.info('[MCP] All app-level servers initialized successfully'); + logger.info('[MCP] All app-level servers initialized successfully'); } else if (initializedServers.size === 0) { - this.logger.warn('[MCP] No app-level servers initialized'); + logger.warn('[MCP] No app-level servers initialized'); } } /** Generic server initialization logic */ - private async initializeServer(connection: MCPConnection, logPrefix: string): Promise { + private async initializeServer({ + connection, + logPrefix, + flowManager, + handleOAuth = true, + }: { + connection: MCPConnection; + logPrefix: string; + flowManager: FlowStateManager; + handleOAuth?: boolean; + }): Promise { const maxAttempts = 3; let attempts = 0; + /** Whether OAuth has been handled by the connection */ + let oauthHandled = false; while (attempts < maxAttempts) { try { @@ -179,8 +293,34 @@ export class MCPManager { throw new Error('Connection attempt succeeded but status is not connected'); } catch (error) { attempts++; + + if (this.isOAuthError(error)) { + // Only handle OAuth if requested (not already handled by event listener) + if (handleOAuth) { + /** Check if OAuth was already handled by the connection */ + const errorWithFlag = error as (Error & { isOAuthError?: boolean }) | undefined; + if (!oauthHandled && errorWithFlag?.isOAuthError) { + oauthHandled = true; + logger.info(`${logPrefix} Handling OAuth`); + const serverUrl = connection.url; + if (serverUrl) { + await this.handleOAuthRequired({ + serverName: connection.serverName, + serverUrl, + flowManager, + }); + } + } else { + logger.info(`${logPrefix} OAuth already handled by connection`); + } + } + // Don't retry on OAuth errors - just throw + logger.info(`${logPrefix} OAuth required, stopping connection attempts`); + throw error; + } + if (attempts === maxAttempts) { - this.logger.error(`${logPrefix} Failed to connect after ${maxAttempts} attempts`, error); + logger.error(`${logPrefix} Failed to connect after ${maxAttempts} attempts`, error); throw error; // Re-throw the last error } await new Promise((resolve) => setTimeout(resolve, 2000 * attempts)); @@ -188,6 +328,25 @@ export class MCPManager { } } + private isOAuthError(error: unknown): boolean { + if (!error || typeof error !== 'object') { + return false; + } + + // Check for SSE error with 401 status + if ('message' in error && typeof error.message === 'string') { + return error.message.includes('401') || error.message.includes('Non-200 status code (401)'); + } + + // Check for error code + if ('code' in error) { + const code = (error as { code?: number }).code; + return code === 401 || code === 403; + } + + return false; + } + /** Check for and disconnect idle connections */ private checkIdleConnections(currentUserId?: string): void { const now = Date.now(); @@ -198,12 +357,12 @@ export class MCPManager { continue; } if (now - lastActivity > this.USER_CONNECTION_IDLE_TIMEOUT) { - this.logger.info( + logger.info( `[MCP][User: ${userId}] User idle for too long. Disconnecting all connections...`, ); // Disconnect all user connections asynchronously (fire and forget) this.disconnectUserConnections(userId).catch((err) => - this.logger.error(`[MCP][User: ${userId}] Error disconnecting idle connections:`, err), + logger.error(`[MCP][User: ${userId}] Error disconnecting idle connections:`, err), ); } } @@ -213,13 +372,29 @@ export class MCPManager { private updateUserLastActivity(userId: string): void { const now = Date.now(); this.userLastActivity.set(userId, now); - this.logger.debug( + logger.debug( `[MCP][User: ${userId}] Updated last activity timestamp: ${new Date(now).toISOString()}`, ); } /** Gets or creates a connection for a specific user */ - public async getUserConnection(serverName: string, user: TUser): Promise { + public async getUserConnection({ + user, + serverName, + flowManager, + tokenMethods, + oauthStart, + oauthEnd, + signal, + }: { + user: TUser; + serverName: string; + flowManager: FlowStateManager; + tokenMethods?: TokenMethods; + oauthStart?: (authURL: string) => Promise; + oauthEnd?: () => Promise; + signal?: AbortSignal; + }): Promise { const userId = user.id; if (!userId) { throw new McpError(ErrorCode.InvalidRequest, `[MCP] User object missing id property`); @@ -232,25 +407,22 @@ export class MCPManager { // Check if user is idle const lastActivity = this.userLastActivity.get(userId); if (lastActivity && now - lastActivity > this.USER_CONNECTION_IDLE_TIMEOUT) { - this.logger.info( - `[MCP][User: ${userId}] User idle for too long. Disconnecting all connections.`, - ); + logger.info(`[MCP][User: ${userId}] User idle for too long. Disconnecting all connections.`); // Disconnect all user connections try { await this.disconnectUserConnections(userId); } catch (err) { - this.logger.error(`[MCP][User: ${userId}] Error disconnecting idle connections:`, err); + logger.error(`[MCP][User: ${userId}] Error disconnecting idle connections:`, err); } connection = undefined; // Force creation of a new connection } else if (connection) { if (await connection.isConnected()) { - this.logger.debug(`[MCP][User: ${userId}][${serverName}] Reusing active connection`); - // Update timestamp on reuse + logger.debug(`[MCP][User: ${userId}][${serverName}] Reusing active connection`); this.updateUserLastActivity(userId); return connection; } else { // Connection exists but is not connected, attempt to remove potentially stale entry - this.logger.warn( + logger.warn( `[MCP][User: ${userId}][${serverName}] Found existing but disconnected connection object. Cleaning up.`, ); this.removeUserConnection(userId, serverName); // Clean up maps @@ -260,7 +432,7 @@ export class MCPManager { // If no valid connection exists, create a new one if (!connection) { - this.logger.info(`[MCP][User: ${userId}][${serverName}] Establishing new connection`); + logger.info(`[MCP][User: ${userId}][${serverName}] Establishing new connection`); } let config = this.mcpConfigs[serverName]; @@ -275,19 +447,122 @@ export class MCPManager { config = { ...(this.processMCPEnv(config, user) ?? {}) }; } - connection = new MCPConnection(serverName, config, this.logger, userId); + /** If no in-memory tokens, tokens from persistent storage */ + let tokens: MCPOAuthTokens | null = null; + if (tokenMethods?.findToken) { + try { + /** Refresh function for user-specific connections */ + const refreshTokensFunction = async ( + refreshToken: string, + metadata: { + userId: string; + serverName: string; + identifier: string; + clientInfo?: OAuthClientInformation; + }, + ) => { + /** URL from config since connection doesn't exist yet */ + const serverUrl = (config as t.SSEOptions | t.StreamableHTTPOptions).url; + return await MCPOAuthHandler.refreshOAuthTokens( + refreshToken, + { + serverName: metadata.serverName, + serverUrl, + clientInfo: metadata.clientInfo, + }, + config.oauth, + ); + }; + + /** Flow state to prevent concurrent token operations */ + const tokenFlowId = `tokens:${userId}:${serverName}`; + tokens = await flowManager.createFlowWithHandler( + tokenFlowId, + 'mcp_get_tokens', + async () => { + return await MCPTokenStorage.getTokens({ + userId, + serverName, + findToken: tokenMethods.findToken, + refreshTokens: refreshTokensFunction, + createToken: tokenMethods.createToken, + updateToken: tokenMethods.updateToken, + }); + }, + signal, + ); + } catch (error) { + logger.error( + `[MCP][User: ${userId}][${serverName}] Error loading OAuth tokens from storage`, + error, + ); + } + } + + if (tokens) { + logger.info(`[MCP][User: ${userId}][${serverName}] Loaded OAuth tokens`); + } + + connection = new MCPConnection(serverName, config, userId, tokens); + + connection.on('oauthRequired', async (data) => { + logger.info(`[MCP][User: ${userId}][${serverName}] oauthRequired event received`); + const result = await this.handleOAuthRequired({ + ...data, + flowManager, + oauthStart, + oauthEnd, + }); + + if (result?.tokens && tokenMethods?.createToken) { + try { + connection?.setOAuthTokens(result.tokens); + await MCPTokenStorage.storeTokens({ + userId, + serverName, + tokens: result.tokens, + createToken: tokenMethods.createToken, + updateToken: tokenMethods.updateToken, + findToken: tokenMethods.findToken, + clientInfo: result.clientInfo, + }); + logger.info(`[MCP][User: ${userId}][${serverName}] OAuth tokens saved to storage`); + } catch (error) { + logger.error( + `[MCP][User: ${userId}][${serverName}] Failed to save OAuth tokens to storage`, + error, + ); + } + } + + // Only emit oauthHandled if we actually got tokens (OAuth succeeded) + if (result?.tokens) { + connection?.emit('oauthHandled'); + } else { + // OAuth failed, emit oauthFailed to properly reject the promise + logger.warn( + `[MCP][User: ${userId}][${serverName}] OAuth failed, emitting oauthFailed event`, + ); + connection?.emit('oauthFailed', new Error('OAuth authentication failed')); + } + }); try { + const connectTimeout = config.initTimeout ?? 30000; const connectionTimeout = new Promise((_, reject) => - setTimeout(() => reject(new Error('Connection timeout')), 30000), + setTimeout( + () => reject(new Error(`Connection timeout after ${connectTimeout}ms`)), + connectTimeout, + ), ); - const connectionAttempt = this.initializeServer( + const connectionAttempt = this.initializeServer({ connection, - `[MCP][User: ${userId}][${serverName}]`, - ); + logPrefix: `[MCP][User: ${userId}][${serverName}]`, + flowManager, + }); await Promise.race([connectionAttempt, connectionTimeout]); - if (!(await connection.isConnected())) { + if (!(await connection?.isConnected())) { throw new Error('Failed to establish connection after initialization attempt.'); } @@ -295,18 +570,16 @@ export class MCPManager { this.userConnections.set(userId, new Map()); } this.userConnections.get(userId)?.set(serverName, connection); - this.logger.info(`[MCP][User: ${userId}][${serverName}] Connection successfully established`); + + logger.info(`[MCP][User: ${userId}][${serverName}] Connection successfully established`); // Update timestamp on creation this.updateUserLastActivity(userId); return connection; } catch (error) { - this.logger.error( - `[MCP][User: ${userId}][${serverName}] Failed to establish connection`, - error, - ); + logger.error(`[MCP][User: ${userId}][${serverName}] Failed to establish connection`, error); // Ensure partial connection state is cleaned up if initialization fails - await connection.disconnect().catch((disconnectError) => { - this.logger.error( + await connection?.disconnect().catch((disconnectError) => { + logger.error( `[MCP][User: ${userId}][${serverName}] Error during cleanup after failed connection`, disconnectError, ); @@ -330,7 +603,7 @@ export class MCPManager { } } - this.logger.debug(`[MCP][User: ${userId}][${serverName}] Removed connection entry.`); + logger.debug(`[MCP][User: ${userId}][${serverName}] Removed connection entry.`); } /** Disconnects and removes a specific user connection */ @@ -338,7 +611,7 @@ export class MCPManager { const userMap = this.userConnections.get(userId); const connection = userMap?.get(serverName); if (connection) { - this.logger.info(`[MCP][User: ${userId}][${serverName}] Disconnecting...`); + logger.info(`[MCP][User: ${userId}][${serverName}] Disconnecting...`); await connection.disconnect(); this.removeUserConnection(userId, serverName); } @@ -349,12 +622,12 @@ export class MCPManager { const userMap = this.userConnections.get(userId); const disconnectPromises: Promise[] = []; if (userMap) { - this.logger.info(`[MCP][User: ${userId}] Disconnecting all servers...`); + logger.info(`[MCP][User: ${userId}] Disconnecting all servers...`); const userServers = Array.from(userMap.keys()); for (const serverName of userServers) { disconnectPromises.push( this.disconnectUserConnection(userId, serverName).catch((error) => { - this.logger.error( + logger.error( `[MCP][User: ${userId}][${serverName}] Error during disconnection:`, error, ); @@ -364,7 +637,7 @@ export class MCPManager { await Promise.allSettled(disconnectPromises); // Ensure user activity timestamp is removed this.userLastActivity.delete(userId); - this.logger.info(`[MCP][User: ${userId}] All connections processed for disconnection.`); + logger.info(`[MCP][User: ${userId}] All connections processed for disconnection.`); } } @@ -378,17 +651,73 @@ export class MCPManager { return this.connections; } + /** Attempts to reconnect an app-level connection if it's disconnected */ + private async isConnectionActive({ + serverName, + connection, + flowManager, + skipReconnect = false, + }: { + serverName: string; + connection: MCPConnection; + flowManager: FlowStateManager; + skipReconnect?: boolean; + }): Promise { + if (await connection.isConnected()) { + return true; + } + + if (skipReconnect) { + logger.warn( + `[MCP][${serverName}] App-level connection is disconnected, skipping reconnection attempt`, + ); + return false; + } + + logger.warn( + `[MCP][${serverName}] App-level connection disconnected, attempting to reconnect...`, + ); + + try { + const config = this.mcpConfigs[serverName]; + if (!config) { + logger.error(`[MCP][${serverName}] Configuration not found for reconnection`); + return false; + } + + await this.initializeServer({ + connection, + logPrefix: `[MCP][${serverName}]`, + flowManager, + }); + + if (await connection.isConnected()) { + logger.info(`[MCP][${serverName}] App-level connection successfully reconnected`); + return true; + } else { + logger.warn(`[MCP][${serverName}] App-level connection reconnection failed`); + return false; + } + } catch (error) { + logger.error(`[MCP][${serverName}] Error during app-level connection reconnection:`, error); + return false; + } + } + /** * Maps available tools from all app-level connections into the provided object. * The object is modified in place. */ - public async mapAvailableTools(availableTools: t.LCAvailableTools): Promise { + public async mapAvailableTools( + availableTools: t.LCAvailableTools, + flowManager: FlowStateManager, + ): Promise { for (const [serverName, connection] of this.connections.entries()) { try { - if ((await connection.isConnected()) !== true) { - this.logger.warn( - `[MCP][${serverName}] Connection not established. Skipping tool mapping.`, - ); + /** Attempt to ensure connection is active, with reconnection if needed */ + const isActive = await this.isConnectionActive({ serverName, connection, flowManager }); + if (!isActive) { + logger.warn(`[MCP][${serverName}] Connection not available. Skipping tool mapping.`); continue; } @@ -405,7 +734,7 @@ export class MCPManager { }; } } catch (error) { - this.logger.warn(`[MCP][${serverName}] Error fetching tools for mapping:`, error); + logger.warn(`[MCP][${serverName}] Error fetching tools`, error); } } } @@ -413,19 +742,46 @@ export class MCPManager { /** * Loads tools from all app-level connections into the manifest. */ - public async loadManifestTools(manifestTools: t.LCToolManifest): Promise { + public async loadManifestTools({ + flowManager, + serverToolsCallback, + getServerTools, + }: { + flowManager: FlowStateManager; + serverToolsCallback?: (serverName: string, tools: t.LCManifestTool[]) => Promise; + getServerTools?: (serverName: string) => Promise; + }): Promise { const mcpTools: t.LCManifestTool[] = []; for (const [serverName, connection] of this.connections.entries()) { try { - if ((await connection.isConnected()) !== true) { - this.logger.warn( - `[MCP][${serverName}] Connection not established. Skipping manifest loading.`, + /** Attempt to ensure connection is active, with reconnection if needed */ + const isActive = await this.isConnectionActive({ + serverName, + connection, + flowManager, + skipReconnect: true, + }); + if (!isActive) { + logger.warn( + `[MCP][${serverName}] Connection not available for ${serverName} manifest tools.`, ); + if (typeof getServerTools !== 'function') { + logger.warn( + `[MCP][${serverName}] No \`getServerTools\` function provided, skipping tool loading.`, + ); + continue; + } + const serverTools = await getServerTools(serverName); + if (serverTools && serverTools.length > 0) { + logger.info(`[MCP][${serverName}] Loaded tools from cache for manifest`); + mcpTools.push(...serverTools); + } continue; } const tools = await connection.fetchTools(); + const serverTools: t.LCManifestTool[] = []; for (const tool of tools) { const pluginKey = `${tool.name}${CONSTANTS.mcp_delimiter}${serverName}`; const manifestTool: t.LCManifestTool = { @@ -439,13 +795,17 @@ export class MCPManager { manifestTool.chatMenu = false; } mcpTools.push(manifestTool); + serverTools.push(manifestTool); + } + if (typeof serverToolsCallback === 'function') { + await serverToolsCallback(serverName, serverTools); } } catch (error) { - this.logger.error(`[MCP][${serverName}] Error fetching tools for manifest:`, error); + logger.error(`[MCP][${serverName}] Error fetching tools for manifest:`, error); } } - return [...mcpTools, ...manifestTools]; + return mcpTools; } /** @@ -459,13 +819,22 @@ export class MCPManager { provider, toolArguments, options, + tokenMethods, + flowManager, + oauthStart, + oauthEnd, }: { serverName: string; toolName: string; provider: t.Provider; toolArguments?: Record; options?: CallToolOptions; + tokenMethods?: TokenMethods; + flowManager: FlowStateManager; + oauthStart?: (authURL: string) => Promise; + oauthEnd?: () => Promise; }): Promise { + /** User-specific connection */ let connection: MCPConnection | undefined; const { user, ...callOptions } = options ?? {}; const userId = user?.id; @@ -474,10 +843,18 @@ export class MCPManager { try { if (userId && user) { this.updateUserLastActivity(userId); - // Get or create user-specific connection - connection = await this.getUserConnection(serverName, user); + /** Get or create user-specific connection */ + connection = await this.getUserConnection({ + user, + serverName, + flowManager, + tokenMethods, + oauthStart, + oauthEnd, + signal: options?.signal, + }); } else { - // Use app-level connection + /** App-level connection */ connection = this.connections.get(serverName); if (!connection) { throw new McpError( @@ -488,7 +865,7 @@ export class MCPManager { } if (!(await connection.isConnected())) { - // This might happen if getUserConnection failed silently or app connection dropped + /** May happen if getUserConnection failed silently or app connection dropped */ throw new McpError( ErrorCode.InternalError, // Use InternalError for connection issues `${logPrefix} Connection is not active. Cannot execute tool ${toolName}.`, @@ -516,7 +893,7 @@ export class MCPManager { return formatToolContent(result, provider); } catch (error) { // Log with context and re-throw or handle as needed - this.logger.error(`${logPrefix}[${toolName}] Tool call failed`, error); + logger.error(`${logPrefix}[${toolName}] Tool call failed`, error); // Rethrowing allows the caller (createMCPTool) to handle the final user message throw error; } @@ -526,7 +903,7 @@ export class MCPManager { public async disconnectServer(serverName: string): Promise { const connection = this.connections.get(serverName); if (connection) { - this.logger.info(`[MCP][${serverName}] Disconnecting...`); + logger.info(`[MCP][${serverName}] Disconnecting...`); await connection.disconnect(); this.connections.delete(serverName); } @@ -534,7 +911,7 @@ export class MCPManager { /** Disconnects all app-level and user-level connections */ public async disconnectAll(): Promise { - this.logger.info('[MCP] Disconnecting all app-level and user-level connections...'); + logger.info('[MCP] Disconnecting all app-level and user-level connections...'); const userDisconnectPromises = Array.from(this.userConnections.keys()).map((userId) => this.disconnectUserConnections(userId), @@ -545,13 +922,13 @@ export class MCPManager { // Disconnect all app-level connections const appDisconnectPromises = Array.from(this.connections.values()).map((connection) => connection.disconnect().catch((error) => { - this.logger.error(`[MCP][${connection.serverName}] Error during disconnectAll:`, error); + logger.error(`[MCP][${connection.serverName}] Error during disconnectAll:`, error); }), ); await Promise.allSettled(appDisconnectPromises); this.connections.clear(); - this.logger.info('[MCP] All connections processed for disconnection.'); + logger.info('[MCP] All connections processed for disconnection.'); } /** Destroys the singleton instance and disconnects all connections */ @@ -559,7 +936,6 @@ export class MCPManager { if (MCPManager.instance) { await MCPManager.instance.disconnectAll(); MCPManager.instance = null; - const logger = MCPManager.getDefaultLogger(); logger.info('[MCP] Manager instance destroyed.'); } } @@ -620,4 +996,102 @@ ${formattedInstructions} Please follow these instructions when using tools from the respective MCP servers.`; } + + /** Handles OAuth authentication requirements */ + private async handleOAuthRequired({ + serverName, + serverUrl, + flowManager, + userId = CONSTANTS.SYSTEM_USER_ID, + oauthStart, + oauthEnd, + }: { + serverName: string; + flowManager: FlowStateManager; + userId?: string; + serverUrl?: string; + oauthStart?: (authURL: string) => Promise; + oauthEnd?: () => Promise; + }): Promise<{ tokens: MCPOAuthTokens | null; clientInfo?: OAuthClientInformation } | null> { + const userPart = isSystemUserId(userId) ? '' : `[User: ${userId}]`; + const logPrefix = `[MCP]${userPart}[${serverName}]`; + logger.debug(`${logPrefix} \`handleOAuthRequired\` called with serverUrl: ${serverUrl}`); + + if (!flowManager || !serverUrl) { + logger.error( + `${logPrefix} OAuth required but flow manager not available or server URL missing for ${serverName}`, + ); + logger.warn(`${logPrefix} Please configure OAuth credentials for ${serverName}`); + return null; + } + + try { + const config = this.mcpConfigs[serverName]; + logger.debug(`${logPrefix} Checking for existing OAuth flow for ${serverName}...`); + + /** Flow ID to check if a flow already exists */ + const flowId = MCPOAuthHandler.generateFlowId(userId, serverName); + + /** Check if there's already an ongoing OAuth flow for this flowId */ + const existingFlow = await flowManager.getFlowState(flowId, 'mcp_oauth'); + if (existingFlow && existingFlow.status === 'PENDING') { + logger.debug( + `${logPrefix} OAuth flow already exists for ${flowId}, waiting for completion`, + ); + /** Tokens from existing flow to complete */ + const tokens = await flowManager.createFlow(flowId, 'mcp_oauth'); + if (typeof oauthEnd === 'function') { + await oauthEnd(); + } + logger.info(`${logPrefix} OAuth flow completed, tokens received for ${serverName}`); + + /** Client information from the existing flow metadata */ + const existingMetadata = existingFlow.metadata as unknown as MCPOAuthFlowMetadata; + const clientInfo = existingMetadata?.clientInfo; + + return { tokens, clientInfo }; + } + + logger.debug(`${logPrefix} Initiating new OAuth flow for ${serverName}...`); + const { + authorizationUrl, + flowId: newFlowId, + flowMetadata, + } = await MCPOAuthHandler.initiateOAuthFlow(serverName, serverUrl, userId, config?.oauth); + + if (typeof oauthStart === 'function') { + logger.info(`${logPrefix} OAuth flow started, issued authorization URL to user`); + await oauthStart(authorizationUrl); + } else { + logger.info(` +═══════════════════════════════════════════════════════════════════════ +Please visit the following URL to authenticate: + +${authorizationUrl} + +${logPrefix} Flow ID: ${newFlowId} +═══════════════════════════════════════════════════════════════════════ +`); + } + + /** Tokens from the new flow */ + const tokens = await flowManager.createFlow( + newFlowId, + 'mcp_oauth', + flowMetadata as FlowMetadata, + ); + if (typeof oauthEnd === 'function') { + await oauthEnd(); + } + logger.info(`${logPrefix} OAuth flow completed, tokens received for ${serverName}`); + + /** Client information from the flow metadata */ + const clientInfo = flowMetadata?.clientInfo; + + return { tokens, clientInfo }; + } catch (error) { + logger.error(`${logPrefix} Failed to complete OAuth flow for ${serverName}`, error); + return null; + } + } } diff --git a/packages/api/src/mcp/oauth/handler.ts b/packages/api/src/mcp/oauth/handler.ts new file mode 100644 index 0000000000..ab9bbe46c1 --- /dev/null +++ b/packages/api/src/mcp/oauth/handler.ts @@ -0,0 +1,603 @@ +import { randomBytes } from 'crypto'; +import { logger } from '@librechat/data-schemas'; +import { + discoverOAuthMetadata, + registerClient, + startAuthorization, + exchangeAuthorization, + discoverOAuthProtectedResourceMetadata, +} from '@modelcontextprotocol/sdk/client/auth.js'; +import { OAuthMetadataSchema } from '@modelcontextprotocol/sdk/shared/auth.js'; +import type { MCPOptions } from 'librechat-data-provider'; +import type { FlowStateManager } from '~/flow/manager'; +import type { + OAuthClientInformation, + OAuthProtectedResourceMetadata, + MCPOAuthFlowMetadata, + MCPOAuthTokens, + OAuthMetadata, +} from './types'; + +/** Type for the OAuth metadata from the SDK */ +type SDKOAuthMetadata = Parameters[1]['metadata']; + +export class MCPOAuthHandler { + private static readonly FLOW_TYPE = 'mcp_oauth'; + private static readonly FLOW_TTL = 10 * 60 * 1000; // 10 minutes + + /** + * Discovers OAuth metadata from the server + */ + private static async discoverMetadata(serverUrl: string): Promise<{ + metadata: OAuthMetadata; + resourceMetadata?: OAuthProtectedResourceMetadata; + authServerUrl: URL; + }> { + logger.debug(`[MCPOAuth] discoverMetadata called with serverUrl: ${serverUrl}`); + + let authServerUrl = new URL(serverUrl); + let resourceMetadata: OAuthProtectedResourceMetadata | undefined; + + try { + // Try to discover resource metadata first + logger.debug( + `[MCPOAuth] Attempting to discover protected resource metadata from ${serverUrl}`, + ); + resourceMetadata = await discoverOAuthProtectedResourceMetadata(serverUrl); + + if (resourceMetadata?.authorization_servers?.length) { + authServerUrl = new URL(resourceMetadata.authorization_servers[0]); + logger.debug( + `[MCPOAuth] Found authorization server from resource metadata: ${authServerUrl}`, + ); + } else { + logger.debug(`[MCPOAuth] No authorization servers found in resource metadata`); + } + } catch (error) { + logger.debug('[MCPOAuth] Resource metadata discovery failed, continuing with server URL', { + error, + }); + } + + // Discover OAuth metadata + logger.debug(`[MCPOAuth] Discovering OAuth metadata from ${authServerUrl}`); + const rawMetadata = await discoverOAuthMetadata(authServerUrl); + + if (!rawMetadata) { + logger.error(`[MCPOAuth] Failed to discover OAuth metadata from ${authServerUrl}`); + throw new Error('Failed to discover OAuth metadata'); + } + + logger.debug(`[MCPOAuth] OAuth metadata discovered successfully`); + const metadata = await OAuthMetadataSchema.parseAsync(rawMetadata); + + logger.debug(`[MCPOAuth] OAuth metadata parsed successfully`); + return { + metadata: metadata as unknown as OAuthMetadata, + resourceMetadata, + authServerUrl, + }; + } + + /** + * Registers an OAuth client dynamically + */ + private static async registerOAuthClient( + serverUrl: string, + metadata: OAuthMetadata, + resourceMetadata?: OAuthProtectedResourceMetadata, + redirectUri?: string, + ): Promise { + logger.debug(`[MCPOAuth] Starting client registration for ${serverUrl}, server metadata:`, { + grant_types_supported: metadata.grant_types_supported, + response_types_supported: metadata.response_types_supported, + token_endpoint_auth_methods_supported: metadata.token_endpoint_auth_methods_supported, + scopes_supported: metadata.scopes_supported, + }); + + /** Client metadata based on what the server supports */ + const clientMetadata = { + client_name: 'LibreChat MCP Client', + redirect_uris: [redirectUri || this.getDefaultRedirectUri()], + grant_types: ['authorization_code'] as string[], + response_types: ['code'] as string[], + token_endpoint_auth_method: 'client_secret_basic', + scope: undefined as string | undefined, + }; + + const supportedGrantTypes = metadata.grant_types_supported || ['authorization_code']; + const requestedGrantTypes = ['authorization_code']; + + if (supportedGrantTypes.includes('refresh_token')) { + requestedGrantTypes.push('refresh_token'); + logger.debug( + `[MCPOAuth] Server ${serverUrl} supports \`refresh_token\` grant type, adding to request`, + ); + } else { + logger.debug(`[MCPOAuth] Server ${serverUrl} does not support \`refresh_token\` grant type`); + } + clientMetadata.grant_types = requestedGrantTypes; + + clientMetadata.response_types = metadata.response_types_supported || ['code']; + + if (metadata.token_endpoint_auth_methods_supported) { + // Prefer client_secret_basic if supported, otherwise use the first supported method + if (metadata.token_endpoint_auth_methods_supported.includes('client_secret_basic')) { + clientMetadata.token_endpoint_auth_method = 'client_secret_basic'; + } else if (metadata.token_endpoint_auth_methods_supported.includes('client_secret_post')) { + clientMetadata.token_endpoint_auth_method = 'client_secret_post'; + } else if (metadata.token_endpoint_auth_methods_supported.includes('none')) { + clientMetadata.token_endpoint_auth_method = 'none'; + } else { + clientMetadata.token_endpoint_auth_method = + metadata.token_endpoint_auth_methods_supported[0]; + } + } + + const availableScopes = resourceMetadata?.scopes_supported || metadata.scopes_supported; + if (availableScopes) { + clientMetadata.scope = availableScopes.join(' '); + } + + logger.debug(`[MCPOAuth] Registering client for ${serverUrl} with metadata:`, clientMetadata); + + const clientInfo = await registerClient(serverUrl, { + metadata: metadata as unknown as SDKOAuthMetadata, + clientMetadata, + }); + + logger.debug(`[MCPOAuth] Client registered successfully for ${serverUrl}:`, { + client_id: clientInfo.client_id, + has_client_secret: !!clientInfo.client_secret, + grant_types: clientInfo.grant_types, + scope: clientInfo.scope, + }); + + return clientInfo; + } + + /** + * Initiates the OAuth flow for an MCP server + */ + static async initiateOAuthFlow( + serverName: string, + serverUrl: string, + userId: string, + config: MCPOptions['oauth'] | undefined, + ): Promise<{ authorizationUrl: string; flowId: string; flowMetadata: MCPOAuthFlowMetadata }> { + logger.debug(`[MCPOAuth] initiateOAuthFlow called for ${serverName} with URL: ${serverUrl}`); + + const flowId = this.generateFlowId(userId, serverName); + const state = this.generateState(); + + logger.debug(`[MCPOAuth] Generated flowId: ${flowId}, state: ${state}`); + + try { + // Check if we have pre-configured OAuth settings + if (config?.authorization_url && config?.token_url && config?.client_id) { + logger.debug(`[MCPOAuth] Using pre-configured OAuth settings for ${serverName}`); + /** Metadata based on pre-configured settings */ + const metadata: OAuthMetadata = { + authorization_endpoint: config.authorization_url, + token_endpoint: config.token_url, + issuer: serverUrl, + scopes_supported: config.scope?.split(' '), + }; + + const clientInfo: OAuthClientInformation = { + client_id: config.client_id, + client_secret: config.client_secret, + redirect_uris: [config.redirect_uri || this.getDefaultRedirectUri(serverName)], + scope: config.scope, + }; + + logger.debug(`[MCPOAuth] Starting authorization with pre-configured settings`); + const { authorizationUrl, codeVerifier } = await startAuthorization(serverUrl, { + metadata: metadata as unknown as SDKOAuthMetadata, + clientInformation: clientInfo, + redirectUrl: clientInfo.redirect_uris?.[0] || this.getDefaultRedirectUri(serverName), + scope: config.scope, + }); + + /** Add state parameter with flowId to the authorization URL */ + authorizationUrl.searchParams.set('state', flowId); + logger.debug(`[MCPOAuth] Added state parameter to authorization URL`); + + const flowMetadata: MCPOAuthFlowMetadata = { + serverName, + userId, + serverUrl, + state, + codeVerifier, + clientInfo, + metadata, + }; + + logger.debug(`[MCPOAuth] Authorization URL generated: ${authorizationUrl.toString()}`); + return { + authorizationUrl: authorizationUrl.toString(), + flowId, + flowMetadata, + }; + } + + logger.debug(`[MCPOAuth] Starting auto-discovery of OAuth metadata from ${serverUrl}`); + const { metadata, resourceMetadata, authServerUrl } = await this.discoverMetadata(serverUrl); + + logger.debug(`[MCPOAuth] OAuth metadata discovered, auth server URL: ${authServerUrl}`); + + /** Dynamic client registration based on the discovered metadata */ + const redirectUri = config?.redirect_uri || this.getDefaultRedirectUri(serverName); + logger.debug(`[MCPOAuth] Registering OAuth client with redirect URI: ${redirectUri}`); + + const clientInfo = await this.registerOAuthClient( + authServerUrl.toString(), + metadata, + resourceMetadata, + redirectUri, + ); + + logger.debug(`[MCPOAuth] Client registered with ID: ${clientInfo.client_id}`); + + /** Authorization Scope */ + const scope = + config?.scope || + resourceMetadata?.scopes_supported?.join(' ') || + metadata.scopes_supported?.join(' '); + + logger.debug(`[MCPOAuth] Starting authorization with scope: ${scope}`); + + let authorizationUrl: URL; + let codeVerifier: string; + + try { + logger.debug(`[MCPOAuth] Calling startAuthorization...`); + const authResult = await startAuthorization(serverUrl, { + metadata: metadata as unknown as SDKOAuthMetadata, + clientInformation: clientInfo, + redirectUrl: redirectUri, + scope, + }); + + authorizationUrl = authResult.authorizationUrl; + codeVerifier = authResult.codeVerifier; + + logger.debug(`[MCPOAuth] startAuthorization completed successfully`); + logger.debug(`[MCPOAuth] Authorization URL: ${authorizationUrl.toString()}`); + + /** Add state parameter with flowId to the authorization URL */ + authorizationUrl.searchParams.set('state', flowId); + logger.debug(`[MCPOAuth] Added state parameter to authorization URL`); + } catch (error) { + logger.error(`[MCPOAuth] startAuthorization failed:`, error); + throw error; + } + + const flowMetadata: MCPOAuthFlowMetadata = { + serverName, + userId, + serverUrl, + state, + codeVerifier, + clientInfo, + metadata, + resourceMetadata, + }; + + logger.debug( + `[MCPOAuth] Authorization URL generated for ${serverName}: ${authorizationUrl.toString()}`, + ); + + const result = { + authorizationUrl: authorizationUrl.toString(), + flowId, + flowMetadata, + }; + + logger.debug( + `[MCPOAuth] Returning from initiateOAuthFlow with result ${flowId} for ${serverName}`, + result, + ); + return result; + } catch (error) { + logger.error('[MCPOAuth] Failed to initiate OAuth flow', { error, serverName, userId }); + throw error; + } + } + + /** + * Completes the OAuth flow by exchanging the authorization code for tokens + */ + static async completeOAuthFlow( + flowId: string, + authorizationCode: string, + flowManager: FlowStateManager, + ): Promise { + try { + /** Flow state which contains our metadata */ + const flowState = await flowManager.getFlowState(flowId, this.FLOW_TYPE); + if (!flowState) { + throw new Error('OAuth flow not found'); + } + + const flowMetadata = flowState.metadata as MCPOAuthFlowMetadata; + if (!flowMetadata) { + throw new Error('OAuth flow metadata not found'); + } + + const metadata = flowMetadata; + if (!metadata.metadata || !metadata.clientInfo || !metadata.codeVerifier) { + throw new Error('Invalid flow metadata'); + } + + const tokens = await exchangeAuthorization(metadata.serverUrl, { + metadata: metadata.metadata as unknown as SDKOAuthMetadata, + clientInformation: metadata.clientInfo, + authorizationCode, + codeVerifier: metadata.codeVerifier, + redirectUri: metadata.clientInfo.redirect_uris?.[0] || this.getDefaultRedirectUri(), + }); + + logger.debug('[MCPOAuth] Raw tokens from exchange:', { + access_token: tokens.access_token ? '[REDACTED]' : undefined, + refresh_token: tokens.refresh_token ? '[REDACTED]' : undefined, + expires_in: tokens.expires_in, + token_type: tokens.token_type, + scope: tokens.scope, + }); + + const mcpTokens: MCPOAuthTokens = { + ...tokens, + obtained_at: Date.now(), + expires_at: tokens.expires_in ? Date.now() + tokens.expires_in * 1000 : undefined, + }; + + /** Now complete the flow with the tokens */ + await flowManager.completeFlow(flowId, this.FLOW_TYPE, mcpTokens); + + return mcpTokens; + } catch (error) { + logger.error('[MCPOAuth] Failed to complete OAuth flow', { error, flowId }); + await flowManager.failFlow(flowId, this.FLOW_TYPE, error as Error); + throw error; + } + } + + /** + * Gets the OAuth flow metadata + */ + static async getFlowState( + flowId: string, + flowManager: FlowStateManager, + ): Promise { + const flowState = await flowManager.getFlowState(flowId, this.FLOW_TYPE); + if (!flowState) { + return null; + } + return flowState.metadata as MCPOAuthFlowMetadata; + } + + /** + * Generates a flow ID for the OAuth flow + * @returns Consistent ID so concurrent requests share the same flow + */ + public static generateFlowId(userId: string, serverName: string): string { + return `${userId}:${serverName}`; + } + + /** + * Generates a secure state parameter + */ + private static generateState(): string { + return randomBytes(32).toString('base64url'); + } + + /** + * Gets the default redirect URI for a server + */ + private static getDefaultRedirectUri(serverName?: string): string { + const baseUrl = process.env.DOMAIN_SERVER || 'http://localhost:3080'; + return serverName + ? `${baseUrl}/api/mcp/${serverName}/oauth/callback` + : `${baseUrl}/api/mcp/oauth/callback`; + } + + /** + * Refreshes OAuth tokens using a refresh token + */ + static async refreshOAuthTokens( + refreshToken: string, + metadata: { serverName: string; serverUrl?: string; clientInfo?: OAuthClientInformation }, + config?: MCPOptions['oauth'], + ): Promise { + logger.debug(`[MCPOAuth] Refreshing tokens for ${metadata.serverName}`); + + try { + /** If we have stored client information from the original flow, use that first */ + if (metadata.clientInfo?.client_id) { + logger.debug( + `[MCPOAuth] Using stored client information for token refresh for ${metadata.serverName}`, + ); + logger.debug( + `[MCPOAuth] Client ID: ${metadata.clientInfo.client_id} for ${metadata.serverName}`, + ); + logger.debug( + `[MCPOAuth] Has client secret: ${!!metadata.clientInfo.client_secret} for ${metadata.serverName}`, + ); + logger.debug(`[MCPOAuth] Stored client info for ${metadata.serverName}:`, { + client_id: metadata.clientInfo.client_id, + has_client_secret: !!metadata.clientInfo.client_secret, + grant_types: metadata.clientInfo.grant_types, + scope: metadata.clientInfo.scope, + }); + + /** Use the stored client information and metadata to determine the token URL */ + let tokenUrl: string; + if (config?.token_url) { + tokenUrl = config.token_url; + } else if (!metadata.serverUrl) { + throw new Error('No token URL available for refresh'); + } else { + /** Auto-discover OAuth configuration for refresh */ + const { metadata: oauthMetadata } = await this.discoverMetadata(metadata.serverUrl); + if (!oauthMetadata.token_endpoint) { + throw new Error('No token endpoint found in OAuth metadata'); + } + tokenUrl = oauthMetadata.token_endpoint; + } + + const body = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + + /** Add scope if available */ + if (metadata.clientInfo.scope) { + body.append('scope', metadata.clientInfo.scope); + } + + const headers: HeadersInit = { + 'Content-Type': 'application/x-www-form-urlencoded', + Accept: 'application/json', + }; + + /** Use client_secret for authentication if available */ + if (metadata.clientInfo.client_secret) { + const clientAuth = Buffer.from( + `${metadata.clientInfo.client_id}:${metadata.clientInfo.client_secret}`, + ).toString('base64'); + headers['Authorization'] = `Basic ${clientAuth}`; + } else { + /** For public clients, client_id must be in the body */ + body.append('client_id', metadata.clientInfo.client_id); + } + + logger.debug(`[MCPOAuth] Refresh request to: ${tokenUrl}`, { + body: body.toString(), + headers, + }); + + const response = await fetch(tokenUrl, { + method: 'POST', + headers, + body, + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error( + `Token refresh failed: ${response.status} ${response.statusText} - ${errorText}`, + ); + } + + const tokens = await response.json(); + + return { + ...tokens, + obtained_at: Date.now(), + expires_at: tokens.expires_in ? Date.now() + tokens.expires_in * 1000 : undefined, + }; + } + + // Fallback: If we have pre-configured OAuth settings, use them + if (config?.token_url && config?.client_id) { + logger.debug(`[MCPOAuth] Using pre-configured OAuth settings for token refresh`); + + const tokenUrl = new URL(config.token_url); + const clientAuth = config.client_secret + ? Buffer.from(`${config.client_id}:${config.client_secret}`).toString('base64') + : null; + + const body = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + + if (config.scope) { + body.append('scope', config.scope); + } + + const headers: HeadersInit = { + 'Content-Type': 'application/x-www-form-urlencoded', + Accept: 'application/json', + }; + + if (clientAuth) { + headers['Authorization'] = `Basic ${clientAuth}`; + } else { + // Use client_id in body for public clients + body.append('client_id', config.client_id); + } + + const response = await fetch(tokenUrl, { + method: 'POST', + headers, + body, + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error( + `Token refresh failed: ${response.status} ${response.statusText} - ${errorText}`, + ); + } + + const tokens = await response.json(); + + return { + ...tokens, + obtained_at: Date.now(), + expires_at: tokens.expires_in ? Date.now() + tokens.expires_in * 1000 : undefined, + }; + } + + /** For auto-discovered OAuth, we need the server URL */ + if (!metadata.serverUrl) { + throw new Error('Server URL required for auto-discovered OAuth token refresh'); + } + + /** Auto-discover OAuth configuration for refresh */ + const { metadata: oauthMetadata } = await this.discoverMetadata(metadata.serverUrl); + + if (!oauthMetadata.token_endpoint) { + throw new Error('No token endpoint found in OAuth metadata'); + } + + const tokenUrl = new URL(oauthMetadata.token_endpoint); + + const body = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: refreshToken, + }); + + const headers: HeadersInit = { + 'Content-Type': 'application/x-www-form-urlencoded', + Accept: 'application/json', + }; + + const response = await fetch(tokenUrl, { + method: 'POST', + headers, + body, + }); + + if (!response.ok) { + const errorText = await response.text(); + throw new Error( + `Token refresh failed: ${response.status} ${response.statusText} - ${errorText}`, + ); + } + + const tokens = await response.json(); + + return { + ...tokens, + obtained_at: Date.now(), + expires_at: tokens.expires_in ? Date.now() + tokens.expires_in * 1000 : undefined, + }; + } catch (error) { + logger.error(`[MCPOAuth] Failed to refresh tokens for ${metadata.serverName}`, error); + throw error; + } + } +} diff --git a/packages/api/src/mcp/oauth/index.ts b/packages/api/src/mcp/oauth/index.ts new file mode 100644 index 0000000000..ff82bd92f7 --- /dev/null +++ b/packages/api/src/mcp/oauth/index.ts @@ -0,0 +1,3 @@ +export * from './types'; +export * from './handler'; +export * from './tokens'; diff --git a/packages/api/src/mcp/oauth/tokens.ts b/packages/api/src/mcp/oauth/tokens.ts new file mode 100644 index 0000000000..4aeb11482a --- /dev/null +++ b/packages/api/src/mcp/oauth/tokens.ts @@ -0,0 +1,382 @@ +import { logger } from '@librechat/data-schemas'; +import type { OAuthTokens, OAuthClientInformation } from '@modelcontextprotocol/sdk/shared/auth.js'; +import type { TokenMethods, IToken } from '@librechat/data-schemas'; +import type { MCPOAuthTokens, ExtendedOAuthTokens } from './types'; +import { encryptV2, decryptV2 } from '~/crypto'; +import { isSystemUserId } from '~/mcp/enum'; + +interface StoreTokensParams { + userId: string; + serverName: string; + tokens: OAuthTokens | ExtendedOAuthTokens | MCPOAuthTokens; + createToken: TokenMethods['createToken']; + updateToken?: TokenMethods['updateToken']; + findToken?: TokenMethods['findToken']; + clientInfo?: OAuthClientInformation; + /** Optional: Pass existing token state to avoid duplicate DB calls */ + existingTokens?: { + accessToken?: IToken | null; + refreshToken?: IToken | null; + clientInfoToken?: IToken | null; + }; +} + +interface GetTokensParams { + userId: string; + serverName: string; + findToken: TokenMethods['findToken']; + refreshTokens?: ( + refreshToken: string, + metadata: { userId: string; serverName: string; identifier: string }, + ) => Promise; + createToken?: TokenMethods['createToken']; + updateToken?: TokenMethods['updateToken']; +} + +export class MCPTokenStorage { + static getLogPrefix(userId: string, serverName: string): string { + return isSystemUserId(userId) + ? `[MCP][${serverName}]` + : `[MCP][User: ${userId}][${serverName}]`; + } + + /** + * Stores OAuth tokens for an MCP server + * + * @param params.existingTokens - Optional: Pass existing token state to avoid duplicate DB calls. + * This is useful when refreshing tokens, as getTokens() already has the token state. + */ + static async storeTokens({ + userId, + serverName, + tokens, + createToken, + updateToken, + findToken, + clientInfo, + existingTokens, + }: StoreTokensParams): Promise { + const logPrefix = this.getLogPrefix(userId, serverName); + + try { + const identifier = `mcp:${serverName}`; + + // Encrypt and store access token + const encryptedAccessToken = await encryptV2(tokens.access_token); + + logger.debug( + `${logPrefix} Token expires_in: ${'expires_in' in tokens ? tokens.expires_in : 'N/A'}, expires_at: ${'expires_at' in tokens ? tokens.expires_at : 'N/A'}`, + ); + + // Handle both expires_in and expires_at formats + let accessTokenExpiry: Date; + if ('expires_at' in tokens && tokens.expires_at) { + /** MCPOAuthTokens format - already has calculated expiry */ + logger.debug(`${logPrefix} Using expires_at: ${tokens.expires_at}`); + accessTokenExpiry = new Date(tokens.expires_at); + } else if (tokens.expires_in) { + /** Standard OAuthTokens format - calculate expiry */ + logger.debug(`${logPrefix} Using expires_in: ${tokens.expires_in}`); + accessTokenExpiry = new Date(Date.now() + tokens.expires_in * 1000); + } else { + /** No expiry provided - default to 1 year */ + logger.debug(`${logPrefix} No expiry provided, using default`); + accessTokenExpiry = new Date(Date.now() + 365 * 24 * 60 * 60 * 1000); + } + + logger.debug(`${logPrefix} Calculated expiry date: ${accessTokenExpiry.toISOString()}`); + logger.debug( + `${logPrefix} Date object: ${JSON.stringify({ + time: accessTokenExpiry.getTime(), + valid: !isNaN(accessTokenExpiry.getTime()), + iso: accessTokenExpiry.toISOString(), + })}`, + ); + + // Ensure the date is valid before passing to createToken + if (isNaN(accessTokenExpiry.getTime())) { + logger.error(`${logPrefix} Invalid expiry date calculated, using default`); + accessTokenExpiry = new Date(Date.now() + 365 * 24 * 60 * 60 * 1000); + } + + // Calculate expiresIn (seconds from now) + const expiresIn = Math.floor((accessTokenExpiry.getTime() - Date.now()) / 1000); + + const accessTokenData = { + userId, + type: 'mcp_oauth', + identifier, + token: encryptedAccessToken, + expiresIn: expiresIn > 0 ? expiresIn : 365 * 24 * 60 * 60, // Default to 1 year if negative + }; + + // Check if token already exists and update if it does + if (findToken && updateToken) { + // Use provided existing token state if available, otherwise look it up + const existingToken = + existingTokens?.accessToken !== undefined + ? existingTokens.accessToken + : await findToken({ userId, identifier }); + + if (existingToken) { + await updateToken({ userId, identifier }, accessTokenData); + logger.debug(`${logPrefix} Updated existing access token`); + } else { + await createToken(accessTokenData); + logger.debug(`${logPrefix} Created new access token`); + } + } else { + // Create new token if it's initial store or update methods not provided + await createToken(accessTokenData); + logger.debug(`${logPrefix} Created access token (no update methods available)`); + } + + // Store refresh token if available + if (tokens.refresh_token) { + const encryptedRefreshToken = await encryptV2(tokens.refresh_token); + const extendedTokens = tokens as ExtendedOAuthTokens; + const refreshTokenExpiry = extendedTokens.refresh_token_expires_in + ? new Date(Date.now() + extendedTokens.refresh_token_expires_in * 1000) + : new Date(Date.now() + 365 * 24 * 60 * 60 * 1000); // Default to 1 year + + /** Calculated expiresIn for refresh token */ + const refreshExpiresIn = Math.floor((refreshTokenExpiry.getTime() - Date.now()) / 1000); + + const refreshTokenData = { + userId, + type: 'mcp_oauth_refresh', + identifier: `${identifier}:refresh`, + token: encryptedRefreshToken, + expiresIn: refreshExpiresIn > 0 ? refreshExpiresIn : 365 * 24 * 60 * 60, + }; + + // Check if refresh token already exists and update if it does + if (findToken && updateToken) { + // Use provided existing token state if available, otherwise look it up + const existingRefreshToken = + existingTokens?.refreshToken !== undefined + ? existingTokens.refreshToken + : await findToken({ + userId, + identifier: `${identifier}:refresh`, + }); + + if (existingRefreshToken) { + await updateToken({ userId, identifier: `${identifier}:refresh` }, refreshTokenData); + logger.debug(`${logPrefix} Updated existing refresh token`); + } else { + await createToken(refreshTokenData); + logger.debug(`${logPrefix} Created new refresh token`); + } + } else { + await createToken(refreshTokenData); + logger.debug(`${logPrefix} Created refresh token (no update methods available)`); + } + } + + /** Store client information if provided */ + if (clientInfo) { + logger.debug(`${logPrefix} Storing client info:`, { + client_id: clientInfo.client_id, + has_client_secret: !!clientInfo.client_secret, + }); + const encryptedClientInfo = await encryptV2(JSON.stringify(clientInfo)); + + const clientInfoData = { + userId, + type: 'mcp_oauth_client', + identifier: `${identifier}:client`, + token: encryptedClientInfo, + expiresIn: 365 * 24 * 60 * 60, + }; + + // Check if client info already exists and update if it does + if (findToken && updateToken) { + // Use provided existing token state if available, otherwise look it up + const existingClientInfo = + existingTokens?.clientInfoToken !== undefined + ? existingTokens.clientInfoToken + : await findToken({ + userId, + identifier: `${identifier}:client`, + }); + + if (existingClientInfo) { + await updateToken({ userId, identifier: `${identifier}:client` }, clientInfoData); + logger.debug(`${logPrefix} Updated existing client info`); + } else { + await createToken(clientInfoData); + logger.debug(`${logPrefix} Created new client info`); + } + } else { + await createToken(clientInfoData); + logger.debug(`${logPrefix} Created client info (no update methods available)`); + } + } + + logger.debug(`${logPrefix} Stored OAuth tokens`); + } catch (error) { + const logPrefix = this.getLogPrefix(userId, serverName); + logger.error(`${logPrefix} Failed to store tokens`, error); + throw error; + } + } + + /** + * Retrieves OAuth tokens for an MCP server + */ + static async getTokens({ + userId, + serverName, + findToken, + createToken, + updateToken, + refreshTokens, + }: GetTokensParams): Promise { + const logPrefix = this.getLogPrefix(userId, serverName); + + try { + const identifier = `mcp:${serverName}`; + + // Get access token + const accessTokenData = await findToken({ + userId, + type: 'mcp_oauth', + identifier, + }); + + /** Check if access token is missing or expired */ + const isMissing = !accessTokenData; + const isExpired = accessTokenData?.expiresAt && new Date() >= accessTokenData.expiresAt; + + if (isMissing || isExpired) { + logger.info(`${logPrefix} Access token ${isMissing ? 'missing' : 'expired'}`); + + /** Refresh data if we have a refresh token and refresh function */ + const refreshTokenData = await findToken({ + userId, + type: 'mcp_oauth_refresh', + identifier: `${identifier}:refresh`, + }); + + if (!refreshTokenData) { + logger.info( + `${logPrefix} Access token ${isMissing ? 'missing' : 'expired'} and no refresh token available`, + ); + return null; + } + + if (!refreshTokens) { + logger.warn( + `${logPrefix} Access token ${isMissing ? 'missing' : 'expired'}, refresh token available but no \`refreshTokens\` provided`, + ); + return null; + } + + if (!createToken) { + logger.warn( + `${logPrefix} Access token ${isMissing ? 'missing' : 'expired'}, refresh token available but no \`createToken\` function provided`, + ); + return null; + } + + try { + logger.info(`${logPrefix} Attempting to refresh token`); + const decryptedRefreshToken = await decryptV2(refreshTokenData.token); + + /** Client information if available */ + let clientInfo; + let clientInfoData; + try { + clientInfoData = await findToken({ + userId, + type: 'mcp_oauth_client', + identifier: `${identifier}:client`, + }); + if (clientInfoData) { + const decryptedClientInfo = await decryptV2(clientInfoData.token); + clientInfo = JSON.parse(decryptedClientInfo); + logger.debug(`${logPrefix} Retrieved client info:`, { + client_id: clientInfo.client_id, + has_client_secret: !!clientInfo.client_secret, + }); + } + } catch { + logger.debug(`${logPrefix} No client info found`); + } + + const metadata = { + userId, + serverName, + identifier, + clientInfo, + }; + + const newTokens = await refreshTokens(decryptedRefreshToken, metadata); + + // Store the refreshed tokens (handles both create and update) + // Pass existing token state to avoid duplicate DB calls + await this.storeTokens({ + userId, + serverName, + tokens: newTokens, + createToken, + updateToken, + findToken, + clientInfo, + existingTokens: { + accessToken: accessTokenData, // We know this is expired/missing + refreshToken: refreshTokenData, // We already have this + clientInfoToken: clientInfoData, // We already looked this up + }, + }); + + logger.info(`${logPrefix} Successfully refreshed and stored OAuth tokens`); + return newTokens; + } catch (refreshError) { + logger.error(`${logPrefix} Failed to refresh tokens`, refreshError); + // Check if it's an unauthorized_client error (refresh not supported) + const errorMessage = + refreshError instanceof Error ? refreshError.message : String(refreshError); + if (errorMessage.includes('unauthorized_client')) { + logger.info( + `${logPrefix} Server does not support refresh tokens for this client. New authentication required.`, + ); + } + return null; + } + } + + // If we reach here, access token should exist and be valid + if (!accessTokenData) { + return null; + } + + const decryptedAccessToken = await decryptV2(accessTokenData.token); + + /** Get refresh token if available */ + const refreshTokenData = await findToken({ + userId, + type: 'mcp_oauth_refresh', + identifier: `${identifier}:refresh`, + }); + + const tokens: MCPOAuthTokens = { + access_token: decryptedAccessToken, + token_type: 'Bearer', + obtained_at: accessTokenData.createdAt.getTime(), + expires_at: accessTokenData.expiresAt?.getTime(), + }; + + if (refreshTokenData) { + tokens.refresh_token = await decryptV2(refreshTokenData.token); + } + + logger.debug(`${logPrefix} Loaded existing OAuth tokens from storage`); + return tokens; + } catch (error) { + logger.error(`${logPrefix} Failed to retrieve tokens`, error); + return null; + } + } +} diff --git a/packages/api/src/mcp/oauth/types.ts b/packages/api/src/mcp/oauth/types.ts new file mode 100644 index 0000000000..7fa9d69921 --- /dev/null +++ b/packages/api/src/mcp/oauth/types.ts @@ -0,0 +1,98 @@ +import type { OAuthTokens } from '@modelcontextprotocol/sdk/shared/auth.js'; +import type { FlowMetadata } from '~/flow/types'; + +export interface OAuthMetadata { + /** OAuth authorization endpoint */ + authorization_endpoint: string; + /** OAuth token endpoint */ + token_endpoint: string; + /** OAuth issuer */ + issuer?: string; + /** Supported scopes */ + scopes_supported?: string[]; + /** Response types supported */ + response_types_supported?: string[]; + /** Grant types supported */ + grant_types_supported?: string[]; + /** Token endpoint auth methods supported */ + token_endpoint_auth_methods_supported?: string[]; + /** Code challenge methods supported */ + code_challenge_methods_supported?: string[]; +} + +export interface OAuthProtectedResourceMetadata { + /** Resource identifier */ + resource: string; + /** Authorization servers */ + authorization_servers?: string[]; + /** Scopes supported by the resource */ + scopes_supported?: string[]; +} + +export interface OAuthClientInformation { + /** Client ID */ + client_id: string; + /** Client secret (optional for public clients) */ + client_secret?: string; + /** Client name */ + client_name?: string; + /** Redirect URIs */ + redirect_uris?: string[]; + /** Grant types */ + grant_types?: string[]; + /** Response types */ + response_types?: string[]; + /** Scope */ + scope?: string; + /** Token endpoint auth method */ + token_endpoint_auth_method?: string; +} + +export interface MCPOAuthState { + /** Current step in the OAuth flow */ + step: 'discovery' | 'registration' | 'authorization' | 'token_exchange' | 'complete' | 'error'; + /** Server name */ + serverName: string; + /** User ID */ + userId: string; + /** OAuth metadata from discovery */ + metadata?: OAuthMetadata; + /** Resource metadata */ + resourceMetadata?: OAuthProtectedResourceMetadata; + /** Client information */ + clientInfo?: OAuthClientInformation; + /** Authorization URL */ + authorizationUrl?: string; + /** Code verifier for PKCE */ + codeVerifier?: string; + /** State parameter for OAuth flow */ + state?: string; + /** Error information */ + error?: string; + /** Timestamp */ + timestamp: number; +} + +export interface MCPOAuthFlowMetadata extends FlowMetadata { + serverName: string; + userId: string; + serverUrl: string; + state: string; + codeVerifier?: string; + clientInfo?: OAuthClientInformation; + metadata?: OAuthMetadata; + resourceMetadata?: OAuthProtectedResourceMetadata; +} + +export interface MCPOAuthTokens extends OAuthTokens { + /** When the tokens were obtained */ + obtained_at: number; + /** Calculated expiry time */ + expires_at?: number; +} + +/** Extended OAuth tokens that may include refresh token expiry */ +export interface ExtendedOAuthTokens extends OAuthTokens { + /** Refresh token expiry in seconds (non-standard, some providers include this) */ + refresh_token_expires_in?: number; +} diff --git a/packages/api/src/oauth/index.ts b/packages/api/src/oauth/index.ts new file mode 100644 index 0000000000..e56053c166 --- /dev/null +++ b/packages/api/src/oauth/index.ts @@ -0,0 +1 @@ +export * from './tokens'; diff --git a/packages/api/src/oauth/tokens.ts b/packages/api/src/oauth/tokens.ts new file mode 100644 index 0000000000..8489bdcb97 --- /dev/null +++ b/packages/api/src/oauth/tokens.ts @@ -0,0 +1,324 @@ +import axios from 'axios'; +import { logger } from '@librechat/data-schemas'; +import { TokenExchangeMethodEnum } from 'librechat-data-provider'; +import type { TokenMethods } from '@librechat/data-schemas'; +import type { AxiosError } from 'axios'; +import { encryptV2, decryptV2 } from '~/crypto'; +import { logAxiosError } from '~/utils'; + +export function createHandleOAuthToken({ + findToken, + updateToken, + createToken, +}: { + findToken: TokenMethods['findToken']; + updateToken: TokenMethods['updateToken']; + createToken: TokenMethods['createToken']; +}) { + /** + * Handles the OAuth token by creating or updating the token. + * @param fields + * @param fields.userId - The user's ID. + * @param fields.token - The full token to store. + * @param fields.identifier - Unique, alternative identifier for the token. + * @param fields.expiresIn - The number of seconds until the token expires. + * @param fields.metadata - Additional metadata to store with the token. + * @param [fields.type="oauth"] - The type of token. Default is 'oauth'. + */ + return async function handleOAuthToken({ + token, + userId, + identifier, + expiresIn, + metadata, + type = 'oauth', + }: { + token: string; + userId: string; + identifier: string; + expiresIn?: number | string | null; + metadata?: Record; + type?: string; + }) { + const encrypedToken = await encryptV2(token); + let expiresInNumber = 3600; + if (typeof expiresIn === 'number') { + expiresInNumber = expiresIn; + } else if (expiresIn != null) { + expiresInNumber = parseInt(expiresIn, 10) || 3600; + } + const tokenData = { + type, + userId, + metadata, + identifier, + token: encrypedToken, + expiresIn: expiresInNumber, + }; + + const existingToken = await findToken({ userId, identifier }); + if (existingToken) { + return await updateToken({ identifier }, tokenData); + } else { + return await createToken(tokenData); + } + }; +} + +/** + * Processes the access tokens and stores them in the database. + * @param tokenData + * @param tokenData.access_token + * @param tokenData.expires_in + * @param [tokenData.refresh_token] + * @param [tokenData.refresh_token_expires_in] + * @param metadata + * @param metadata.userId + * @param metadata.identifier + */ +async function processAccessTokens( + tokenData: { + access_token: string; + expires_in: number; + refresh_token?: string; + refresh_token_expires_in?: number; + }, + { userId, identifier }: { userId: string; identifier: string }, + { + findToken, + updateToken, + createToken, + }: { + findToken: TokenMethods['findToken']; + updateToken: TokenMethods['updateToken']; + createToken: TokenMethods['createToken']; + }, +) { + const { access_token, expires_in = 3600, refresh_token, refresh_token_expires_in } = tokenData; + if (!access_token) { + logger.error('Access token not found: ', tokenData); + throw new Error('Access token not found'); + } + const handleOAuthToken = createHandleOAuthToken({ + findToken, + updateToken, + createToken, + }); + await handleOAuthToken({ + identifier, + token: access_token, + expiresIn: expires_in, + userId, + }); + + if (refresh_token != null) { + logger.debug('Processing refresh token'); + await handleOAuthToken({ + token: refresh_token, + type: 'oauth_refresh', + userId, + identifier: `${identifier}:refresh`, + expiresIn: refresh_token_expires_in ?? null, + }); + } + logger.debug('Access tokens processed'); +} + +/** + * Refreshes the access token using the refresh token. + * @param fields + * @param fields.userId - The ID of the user. + * @param fields.client_url - The URL of the OAuth provider. + * @param fields.identifier - The identifier for the token. + * @param fields.refresh_token - The refresh token to use. + * @param fields.token_exchange_method - The token exchange method ('default_post' or 'basic_auth_header'). + * @param fields.encrypted_oauth_client_id - The client ID for the OAuth provider. + * @param fields.encrypted_oauth_client_secret - The client secret for the OAuth provider. + */ +export async function refreshAccessToken( + { + userId, + client_url, + identifier, + refresh_token, + token_exchange_method, + encrypted_oauth_client_id, + encrypted_oauth_client_secret, + }: { + userId: string; + client_url: string; + identifier: string; + refresh_token: string; + token_exchange_method: TokenExchangeMethodEnum; + encrypted_oauth_client_id: string; + encrypted_oauth_client_secret: string; + }, + { + findToken, + updateToken, + createToken, + }: { + findToken: TokenMethods['findToken']; + updateToken: TokenMethods['updateToken']; + createToken: TokenMethods['createToken']; + }, +): Promise<{ + access_token: string; + expires_in: number; + refresh_token?: string; + refresh_token_expires_in?: number; +}> { + try { + const oauth_client_id = await decryptV2(encrypted_oauth_client_id); + const oauth_client_secret = await decryptV2(encrypted_oauth_client_secret); + + const headers: Record = { + 'Content-Type': 'application/x-www-form-urlencoded', + Accept: 'application/json', + }; + + const params = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token, + }); + + if (token_exchange_method === TokenExchangeMethodEnum.BasicAuthHeader) { + const basicAuth = Buffer.from(`${oauth_client_id}:${oauth_client_secret}`).toString('base64'); + headers['Authorization'] = `Basic ${basicAuth}`; + } else { + params.append('client_id', oauth_client_id); + params.append('client_secret', oauth_client_secret); + } + + const response = await axios({ + method: 'POST', + url: client_url, + headers, + data: params.toString(), + }); + await processAccessTokens( + response.data, + { + userId, + identifier, + }, + { + findToken, + updateToken, + createToken, + }, + ); + logger.debug(`Access token refreshed successfully for ${identifier}`); + return response.data; + } catch (error) { + const message = 'Error refreshing OAuth tokens'; + throw new Error( + logAxiosError({ + message, + error: error as AxiosError, + }), + ); + } +} + +/** + * Handles the OAuth callback and exchanges the authorization code for tokens. + * @param {object} fields + * @param {string} fields.code - The authorization code returned by the provider. + * @param {string} fields.userId - The ID of the user. + * @param {string} fields.identifier - The identifier for the token. + * @param {string} fields.client_url - The URL of the OAuth provider. + * @param {string} fields.redirect_uri - The redirect URI for the OAuth provider. + * @param {string} fields.token_exchange_method - The token exchange method ('default_post' or 'basic_auth_header'). + * @param {string} fields.encrypted_oauth_client_id - The client ID for the OAuth provider. + * @param {string} fields.encrypted_oauth_client_secret - The client secret for the OAuth provider. + */ +export async function getAccessToken( + { + code, + userId, + identifier, + client_url, + redirect_uri, + token_exchange_method, + encrypted_oauth_client_id, + encrypted_oauth_client_secret, + }: { + code: string; + userId: string; + identifier: string; + client_url: string; + redirect_uri: string; + token_exchange_method: TokenExchangeMethodEnum; + encrypted_oauth_client_id: string; + encrypted_oauth_client_secret: string; + }, + { + findToken, + updateToken, + createToken, + }: { + findToken: TokenMethods['findToken']; + updateToken: TokenMethods['updateToken']; + createToken: TokenMethods['createToken']; + }, +): Promise<{ + access_token: string; + expires_in: number; + refresh_token?: string; + refresh_token_expires_in?: number; +}> { + const oauth_client_id = await decryptV2(encrypted_oauth_client_id); + const oauth_client_secret = await decryptV2(encrypted_oauth_client_secret); + + const headers: Record = { + 'Content-Type': 'application/x-www-form-urlencoded', + Accept: 'application/json', + }; + + const params = new URLSearchParams({ + code, + grant_type: 'authorization_code', + redirect_uri, + }); + + if (token_exchange_method === TokenExchangeMethodEnum.BasicAuthHeader) { + const basicAuth = Buffer.from(`${oauth_client_id}:${oauth_client_secret}`).toString('base64'); + headers['Authorization'] = `Basic ${basicAuth}`; + } else { + params.append('client_id', oauth_client_id); + params.append('client_secret', oauth_client_secret); + } + + try { + const response = await axios({ + method: 'POST', + url: client_url, + headers, + data: params.toString(), + }); + + await processAccessTokens( + response.data, + { + userId, + identifier, + }, + { + findToken, + updateToken, + createToken, + }, + ); + logger.debug(`Access tokens successfully created for ${identifier}`); + return response.data; + } catch (error) { + const message = 'Error exchanging OAuth code'; + throw new Error( + logAxiosError({ + message, + error: error as AxiosError, + }), + ); + } +} diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index d851891ac4..145487d7ef 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -1136,6 +1136,10 @@ export enum CacheKeys { * Key for in-progress flow states. */ FLOWS = 'flows', + /** + * Key for individual MCP Tool Manifests. + */ + MCP_TOOLS = 'mcp_tools', /** * Key for pending chat requests (concurrency check) */ diff --git a/packages/data-provider/src/mcp.ts b/packages/data-provider/src/mcp.ts index 2d7be8458f..ffcb65cda1 100644 --- a/packages/data-provider/src/mcp.ts +++ b/packages/data-provider/src/mcp.ts @@ -1,6 +1,7 @@ import { z } from 'zod'; import type { TUser } from './types'; import { extractEnvVariable } from './utils'; +import { TokenExchangeMethodEnum } from './types/assistants'; const BaseOptionsSchema = z.object({ iconPath: z.string().optional(), @@ -15,6 +16,29 @@ const BaseOptionsSchema = z.object({ * - string: Use custom instructions (overrides server-provided) */ serverInstructions: z.union([z.boolean(), z.string()]).optional(), + /** + * OAuth configuration for SSE and Streamable HTTP transports + * - Optional: OAuth can be auto-discovered on 401 responses + * - Pre-configured values will skip discovery steps + */ + oauth: z + .object({ + /** OAuth authorization endpoint (optional - can be auto-discovered) */ + authorization_url: z.string().url().optional(), + /** OAuth token endpoint (optional - can be auto-discovered) */ + token_url: z.string().url().optional(), + /** OAuth client ID (optional - can use dynamic registration) */ + client_id: z.string().optional(), + /** OAuth client secret (optional - can use dynamic registration) */ + client_secret: z.string().optional(), + /** OAuth scopes to request */ + scope: z.string().optional(), + /** OAuth redirect URI (defaults to /api/mcp/{serverName}/oauth/callback) */ + redirect_uri: z.string().url().optional(), + /** Token exchange method */ + token_exchange_method: z.nativeEnum(TokenExchangeMethodEnum).optional(), + }) + .optional(), }); export const StdioOptionsSchema = BaseOptionsSchema.extend({ diff --git a/packages/data-schemas/src/methods/index.ts b/packages/data-schemas/src/methods/index.ts index 67f8255c8d..be4308ecd9 100644 --- a/packages/data-schemas/src/methods/index.ts +++ b/packages/data-schemas/src/methods/index.ts @@ -20,7 +20,7 @@ export function createMethods(mongoose: typeof import('mongoose')) { }; } -export type { MemoryMethods, ShareMethods }; +export type { MemoryMethods, ShareMethods, TokenMethods }; export type AllMethods = UserMethods & SessionMethods & TokenMethods & From 10c0d7d47408be372310cdc94010f76fc2074909 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Tue, 17 Jun 2025 21:11:39 -0400 Subject: [PATCH 04/16] =?UTF-8?q?=E2=AC=87=EF=B8=8F=20fix:=20Image=20Downl?= =?UTF-8?q?oad=20Browser=20Compatibility=20(#7950)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Add null check for SelectedTTS in MessageAudio component to prevent rendering issues * fix: image download browser compatibility with error handling and fallback mechanism - Updated the downloadImage function to use fetch for improved reliability and added error handling. - Implemented a fallback to the original download method in case of fetch failure. - Ensured the download link uses a blob URL for better compatibility with various image types. --- .../Chat/Messages/Content/Image.tsx | 34 +++++++++++++++---- .../components/Chat/Messages/MessageAudio.tsx | 3 ++ 2 files changed, 30 insertions(+), 7 deletions(-) diff --git a/client/src/components/Chat/Messages/Content/Image.tsx b/client/src/components/Chat/Messages/Content/Image.tsx index 85c3fdb3f2..ba4f65671a 100644 --- a/client/src/components/Chat/Messages/Content/Image.tsx +++ b/client/src/components/Chat/Messages/Content/Image.tsx @@ -46,13 +46,33 @@ const Image = ({ [placeholderDimensions, height, width], ); - const downloadImage = () => { - const link = document.createElement('a'); - link.href = imagePath; - link.download = altText; - document.body.appendChild(link); - link.click(); - document.body.removeChild(link); + const downloadImage = async () => { + try { + const response = await fetch(imagePath); + if (!response.ok) { + throw new Error(`Failed to fetch image: ${response.status}`); + } + + const blob = await response.blob(); + const url = window.URL.createObjectURL(blob); + + const link = document.createElement('a'); + link.href = url; + link.download = altText || 'image.png'; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + + window.URL.revokeObjectURL(url); + } catch (error) { + console.error('Download failed:', error); + const link = document.createElement('a'); + link.href = imagePath; + link.download = altText || 'image.png'; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + } }; return ( diff --git a/client/src/components/Chat/Messages/MessageAudio.tsx b/client/src/components/Chat/Messages/MessageAudio.tsx index 29805e7afb..eb4c52a407 100644 --- a/client/src/components/Chat/Messages/MessageAudio.tsx +++ b/client/src/components/Chat/Messages/MessageAudio.tsx @@ -14,6 +14,9 @@ function MessageAudio(props: TMessageAudio) { }; const SelectedTTS = TTSComponents[engineTTS]; + if (!SelectedTTS) { + return null; + } return ; } From 3c9357580ef800eb0018c4504d1ef155d9785bde Mon Sep 17 00:00:00 2001 From: Rakshit Tiwari Date: Wed, 18 Jun 2025 06:42:15 +0530 Subject: [PATCH 05/16] =?UTF-8?q?=F0=9F=96=BC=EF=B8=8F=20feat:=20Add=20sup?= =?UTF-8?q?port=20for=20HEIC=20image=20format=20(#7914)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add HEIC image format support with client-side conversion - Add HEIC/HEIF mime types to supported image formats - Install heic-to library for client-side HEIC to JPEG conversion - Create heicConverter utility with detection and conversion functions - Integrate HEIC processing into file upload flow - Add error handling and localization for HEIC conversion failures - Maintain backward compatibility with existing image formats - Resolves #5570 * feat: Add UI feedback during HEIC conversion - Show file thumbnail * Addressing eslint errors * Addressing the vite bundler issue --- client/package.json | 1 + client/src/hooks/Files/useFileHandling.ts | 132 +++++++++++++++++----- client/src/locales/en/translation.json | 2 + client/src/utils/heicConverter.ts | 79 +++++++++++++ client/vite.config.ts | 11 +- package-lock.json | 7 ++ packages/data-provider/src/file-config.ts | 10 +- 7 files changed, 204 insertions(+), 38 deletions(-) create mode 100644 client/src/utils/heicConverter.ts diff --git a/client/package.json b/client/package.json index 7cb983d218..5218ebdc6b 100644 --- a/client/package.json +++ b/client/package.json @@ -65,6 +65,7 @@ "export-from-json": "^1.7.2", "filenamify": "^6.0.0", "framer-motion": "^11.5.4", + "heic-to": "^1.1.14", "html-to-image": "^1.11.11", "i18next": "^24.2.2", "i18next-browser-languagedetector": "^8.0.3", diff --git a/client/src/hooks/Files/useFileHandling.ts b/client/src/hooks/Files/useFileHandling.ts index 69a99fef33..9e03f29334 100644 --- a/client/src/hooks/Files/useFileHandling.ts +++ b/client/src/hooks/Files/useFileHandling.ts @@ -1,24 +1,25 @@ -import { v4 } from 'uuid'; -import debounce from 'lodash/debounce'; import { useQueryClient } from '@tanstack/react-query'; -import React, { useState, useEffect, useCallback, useRef, useMemo } from 'react'; +import type { TEndpointsConfig, TError } from 'librechat-data-provider'; import { - QueryKeys, - EModelEndpoint, - mergeFileConfig, - isAgentsEndpoint, - isAssistantsEndpoint, defaultAssistantsVersion, fileConfig as defaultFileConfig, + EModelEndpoint, + isAgentsEndpoint, + isAssistantsEndpoint, + mergeFileConfig, + QueryKeys, } from 'librechat-data-provider'; -import type { TEndpointsConfig, TError } from 'librechat-data-provider'; +import debounce from 'lodash/debounce'; +import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'; +import { v4 } from 'uuid'; import type { ExtendedFile, FileSetter } from '~/common'; -import { useUploadFileMutation, useGetFileConfig } from '~/data-provider'; +import { useGetFileConfig, useUploadFileMutation } from '~/data-provider'; import useLocalize, { TranslationKeys } from '~/hooks/useLocalize'; -import { useDelayedUploadToast } from './useDelayedUploadToast'; -import { useToastContext } from '~/Providers/ToastContext'; import { useChatContext } from '~/Providers/ChatContext'; +import { useToastContext } from '~/Providers/ToastContext'; import { logger, validateFiles } from '~/utils'; +import { processFileForUpload } from '~/utils/heicConverter'; +import { useDelayedUploadToast } from './useDelayedUploadToast'; import useUpdateFiles from './useUpdateFiles'; type UseFileHandling = { @@ -262,41 +263,110 @@ const useFileHandling = (params?: UseFileHandling) => { for (const originalFile of fileList) { const file_id = v4(); try { - const preview = URL.createObjectURL(originalFile); - const extendedFile: ExtendedFile = { + // Create initial preview with original file + const initialPreview = URL.createObjectURL(originalFile); + + // Create initial ExtendedFile to show immediately + const initialExtendedFile: ExtendedFile = { file_id, file: originalFile, type: originalFile.type, - preview, - progress: 0.2, + preview: initialPreview, + progress: 0.1, // Show as processing size: originalFile.size, }; if (_toolResource != null && _toolResource !== '') { - extendedFile.tool_resource = _toolResource; + initialExtendedFile.tool_resource = _toolResource; } - const isImage = originalFile.type.split('/')[0] === 'image'; - const tool_resource = - extendedFile.tool_resource ?? params?.additionalMetadata?.tool_resource; - if (isAgentsEndpoint(endpoint) && !isImage && tool_resource == null) { - /** Note: this needs to be removed when we can support files to providers */ - setError('com_error_files_unsupported_capability'); - continue; + // Add file immediately to show in UI + addFile(initialExtendedFile); + + // Check if HEIC conversion is needed and show toast + const isHEIC = + originalFile.type === 'image/heic' || + originalFile.type === 'image/heif' || + originalFile.name.toLowerCase().match(/\.(heic|heif)$/); + + if (isHEIC) { + showToast({ + message: localize('com_info_heic_converting'), + status: 'info', + duration: 3000, + }); } - addFile(extendedFile); + // Process file for HEIC conversion if needed + const processedFile = await processFileForUpload( + originalFile, + 0.9, + (conversionProgress) => { + // Update progress during HEIC conversion (0.1 to 0.5 range for conversion) + const adjustedProgress = 0.1 + conversionProgress * 0.4; + replaceFile({ + ...initialExtendedFile, + progress: adjustedProgress, + }); + }, + ); - if (isImage) { - loadImage(extendedFile, preview); - continue; + // If file was converted, update with new file and preview + if (processedFile !== originalFile) { + URL.revokeObjectURL(initialPreview); // Clean up original preview + const newPreview = URL.createObjectURL(processedFile); + + const updatedExtendedFile: ExtendedFile = { + ...initialExtendedFile, + file: processedFile, + type: processedFile.type, + preview: newPreview, + progress: 0.5, // Conversion complete, ready for upload + size: processedFile.size, + }; + + replaceFile(updatedExtendedFile); + + const isImage = processedFile.type.split('/')[0] === 'image'; + if (isImage) { + loadImage(updatedExtendedFile, newPreview); + continue; + } + + await startUpload(updatedExtendedFile); + } else { + // File wasn't converted, proceed with original + const isImage = originalFile.type.split('/')[0] === 'image'; + const tool_resource = + initialExtendedFile.tool_resource ?? params?.additionalMetadata?.tool_resource; + if (isAgentsEndpoint(endpoint) && !isImage && tool_resource == null) { + /** Note: this needs to be removed when we can support files to providers */ + setError('com_error_files_unsupported_capability'); + continue; + } + + // Update progress to show ready for upload + const readyExtendedFile = { + ...initialExtendedFile, + progress: 0.2, + }; + replaceFile(readyExtendedFile); + + if (isImage) { + loadImage(readyExtendedFile, initialPreview); + continue; + } + + await startUpload(readyExtendedFile); } - - await startUpload(extendedFile); } catch (error) { deleteFileById(file_id); console.log('file handling error', error); - setError('com_error_files_process'); + if (error instanceof Error && error.message.includes('HEIC')) { + setError('com_error_heic_conversion'); + } else { + setError('com_error_files_process'); + } } } }; diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index d7171a032f..c314329edf 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -276,6 +276,8 @@ "com_error_files_upload": "An error occurred while uploading the file.", "com_error_files_upload_canceled": "The file upload request was canceled. Note: the file upload may still be processing and will need to be manually deleted.", "com_error_files_validation": "An error occurred while validating the file.", + "com_error_heic_conversion": "Failed to convert HEIC image to JPEG. Please try converting the image manually or use a different format.", + "com_info_heic_converting": "Converting HEIC image to JPEG...", "com_error_input_length": "The latest message token count is too long, exceeding the token limit, or your token limit parameters are misconfigured, adversely affecting the context window. More info: {{0}}. Please shorten your message, adjust the max context size from the conversation parameters, or fork the conversation to continue.", "com_error_invalid_agent_provider": "The \"{{0}}\" provider is not available for use with Agents. Please go to your agent's settings and select a currently available provider.", "com_error_invalid_user_key": "Invalid key provided. Please provide a valid key and try again.", diff --git a/client/src/utils/heicConverter.ts b/client/src/utils/heicConverter.ts new file mode 100644 index 0000000000..a14e09a050 --- /dev/null +++ b/client/src/utils/heicConverter.ts @@ -0,0 +1,79 @@ +import { heicTo, isHeic } from 'heic-to'; + +/** + * Check if a file is in HEIC format + * @param file - The file to check + * @returns Promise - True if the file is HEIC + */ +export const isHEICFile = async (file: File): Promise => { + try { + return await isHeic(file); + } catch (error) { + console.warn('Error checking if file is HEIC:', error); + // Fallback to mime type check + return file.type === 'image/heic' || file.type === 'image/heif'; + } +}; + +/** + * Convert HEIC file to JPEG + * @param file - The HEIC file to convert + * @param quality - JPEG quality (0-1), default is 0.9 + * @param onProgress - Optional callback to track conversion progress + * @returns Promise - The converted JPEG file + */ +export const convertHEICToJPEG = async ( + file: File, + quality: number = 0.9, + onProgress?: (progress: number) => void, +): Promise => { + try { + // Report conversion start + onProgress?.(0.3); + + const convertedBlob = await heicTo({ + blob: file, + type: 'image/jpeg', + quality, + }); + + // Report conversion completion + onProgress?.(0.8); + + // Create a new File object with the converted blob + const convertedFile = new File([convertedBlob], file.name.replace(/\.(heic|heif)$/i, '.jpg'), { + type: 'image/jpeg', + lastModified: file.lastModified, + }); + + // Report file creation completion + onProgress?.(1.0); + + return convertedFile; + } catch (error) { + console.error('Error converting HEIC to JPEG:', error); + throw new Error('Failed to convert HEIC image to JPEG'); + } +}; + +/** + * Process a file, converting it from HEIC to JPEG if necessary + * @param file - The file to process + * @param quality - JPEG quality for conversion (0-1), default is 0.9 + * @param onProgress - Optional callback to track conversion progress + * @returns Promise - The processed file (converted if it was HEIC, original otherwise) + */ +export const processFileForUpload = async ( + file: File, + quality: number = 0.9, + onProgress?: (progress: number) => void, +): Promise => { + const isHEIC = await isHEICFile(file); + + if (isHEIC) { + console.log('HEIC file detected, converting to JPEG...'); + return convertHEICToJPEG(file, quality, onProgress); + } + + return file; +}; diff --git a/client/vite.config.ts b/client/vite.config.ts index 98451b6c06..4ce4fc3b86 100644 --- a/client/vite.config.ts +++ b/client/vite.config.ts @@ -1,10 +1,10 @@ -import path from 'path'; -import { defineConfig } from 'vite'; import react from '@vitejs/plugin-react'; -import { VitePWA } from 'vite-plugin-pwa'; +import path from 'path'; +import type { Plugin } from 'vite'; +import { defineConfig } from 'vite'; import { compression } from 'vite-plugin-compression2'; import { nodePolyfills } from 'vite-plugin-node-polyfills'; -import type { Plugin } from 'vite'; +import { VitePWA } from 'vite-plugin-pwa'; // https://vitejs.dev/config/ export default defineConfig(({ command }) => ({ @@ -169,6 +169,9 @@ export default defineConfig(({ command }) => ({ if (id.includes('react-select') || id.includes('downshift')) { return 'advanced-inputs'; } + if (id.includes('heic-to')) { + return 'heic-converter'; + } // Existing chunks if (id.includes('@radix-ui')) { diff --git a/package-lock.json b/package-lock.json index ed1ef74865..d9dc4cc2ed 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2464,6 +2464,7 @@ "export-from-json": "^1.7.2", "filenamify": "^6.0.0", "framer-motion": "^11.5.4", + "heic-to": "^1.1.14", "html-to-image": "^1.11.11", "i18next": "^24.2.2", "i18next-browser-languagedetector": "^8.0.3", @@ -32074,6 +32075,12 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/heic-to": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/heic-to/-/heic-to-1.1.14.tgz", + "integrity": "sha512-CxJE27BF6JcQvrL1giK478iSZr7EJNTnAN2Th1rAJiN1BSMYZxDLm4PL/p/ha3aSqVHvCo+YNk++5tIj0JVxLQ==", + "license": "LGPL-3.0" + }, "node_modules/highlight.js": { "version": "11.8.0", "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.8.0.tgz", diff --git a/packages/data-provider/src/file-config.ts b/packages/data-provider/src/file-config.ts index 050fb2541d..80521b4c47 100644 --- a/packages/data-provider/src/file-config.ts +++ b/packages/data-provider/src/file-config.ts @@ -1,6 +1,6 @@ import { z } from 'zod'; import { EModelEndpoint } from './schemas'; -import type { FileConfig, EndpointFileConfig } from './types/files'; +import type { EndpointFileConfig, FileConfig } from './types/files'; export const supportsFiles = { [EModelEndpoint.openAI]: true, @@ -49,6 +49,8 @@ export const fullMimeTypesList = [ 'text/javascript', 'image/gif', 'image/png', + 'image/heic', + 'image/heif', 'application/x-tar', 'application/typescript', 'application/xml', @@ -80,6 +82,8 @@ export const codeInterpreterMimeTypesList = [ 'text/javascript', 'image/gif', 'image/png', + 'image/heic', + 'image/heif', 'application/x-tar', 'application/typescript', 'application/xml', @@ -105,7 +109,7 @@ export const retrievalMimeTypesList = [ 'text/plain', ]; -export const imageExtRegex = /\.(jpg|jpeg|png|gif|webp)$/i; +export const imageExtRegex = /\.(jpg|jpeg|png|gif|webp|heic|heif)$/i; export const excelMimeTypes = /^application\/(vnd\.ms-excel|msexcel|x-msexcel|x-ms-excel|x-excel|x-dos_ms_excel|xls|x-xls|vnd\.openxmlformats-officedocument\.spreadsheetml\.sheet)$/; @@ -116,7 +120,7 @@ export const textMimeTypes = export const applicationMimeTypes = /^(application\/(epub\+zip|csv|json|pdf|x-tar|typescript|vnd\.openxmlformats-officedocument\.(wordprocessingml\.document|presentationml\.presentation|spreadsheetml\.sheet)|xml|zip))$/; -export const imageMimeTypes = /^image\/(jpeg|gif|png|webp)$/; +export const imageMimeTypes = /^image\/(jpeg|gif|png|webp|heic|heif)$/; export const supportedMimeTypes = [ textMimeTypes, From dba0ec4320fcf7e5c8c5ec69abc914fb5d5ffadb Mon Sep 17 00:00:00 2001 From: Robin Anderson Date: Wed, 18 Jun 2025 02:27:31 +0100 Subject: [PATCH 06/16] =?UTF-8?q?=F0=9F=94=A7=20chore:=20update=20pricing?= =?UTF-8?q?=20for=20OpenAI=20o3=20(#7948)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit `o3` is now 80% cheaper, at $2/Mt input and $8/Mt output. https://openai.com/api/pricing/ --- api/models/tx.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/models/tx.js b/api/models/tx.js index ddd098b80f..f3ba38652d 100644 --- a/api/models/tx.js +++ b/api/models/tx.js @@ -78,7 +78,7 @@ const tokenValues = Object.assign( 'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 }, 'o4-mini': { prompt: 1.1, completion: 4.4 }, 'o3-mini': { prompt: 1.1, completion: 4.4 }, - o3: { prompt: 10, completion: 40 }, + o3: { prompt: 2, completion: 8 }, 'o1-mini': { prompt: 1.1, completion: 4.4 }, 'o1-preview': { prompt: 15, completion: 60 }, o1: { prompt: 15, completion: 60 }, From c7e4523d7c725c0a7fa0d364954241263eea73cb Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Wed, 18 Jun 2025 00:58:51 -0400 Subject: [PATCH 07/16] =?UTF-8?q?=F0=9F=8E=AF=20refactor:=20LaTeX=20and=20?= =?UTF-8?q?Math=20Rendering=20(#7952)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: Markdown LaTeX processing - Added micromark-extension-llm-math as a dependency in package.json and package-lock.json. - Updated Vite configuration to alias micromark-extension-math. - Modified Markdown components to use singleDollarTextMath: false for improved LaTeX rendering. - Refactored latex utility functions to enhance LaTeX processing and escaping mechanisms. * chore: linting of `EditTextPart` * fix: handle key up to initiate edit of latest user message by adding id prop to Edit Message HoverButton * chore: linting in Artifact component * refactor: enhance LaTeX preprocessing functionality - Updated `preprocessLaTeX` to improve handling of currency and LaTeX expressions. - Introduced optimized regex patterns for better performance. - Added support for escaping mhchem commands and handling code blocks. - Enhanced tests for various LaTeX scenarios, including currency and special characters. - Refactored existing tests to align with new preprocessing logic. * chore: filter out false positives in unused packages workflow - Added a grep command to exclude the micromark-extension-llm-math package from the list of unused dependencies in the GitHub Actions workflow. --- .github/workflows/unused-packages.yml | 2 + client/package.json | 1 + client/src/components/Artifacts/Artifact.tsx | 4 +- .../Chat/Messages/Content/Markdown.tsx | 2 +- .../Chat/Messages/Content/MarkdownLite.tsx | 2 +- .../Messages/Content/Parts/EditTextPart.tsx | 6 +- .../components/Chat/Messages/HoverButtons.tsx | 4 + .../Prompts/Groups/VariableForm.tsx | 2 +- .../src/components/Prompts/PromptDetails.tsx | 2 +- .../src/components/Prompts/PromptEditor.tsx | 2 +- client/src/utils/latex.spec.ts | 313 +++++++++--------- client/src/utils/latex.ts | 231 ++++++++----- client/vite.config.ts | 1 + package-lock.json | 20 ++ 14 files changed, 341 insertions(+), 251 deletions(-) diff --git a/.github/workflows/unused-packages.yml b/.github/workflows/unused-packages.yml index 442e70e52c..dc6ce3ba56 100644 --- a/.github/workflows/unused-packages.yml +++ b/.github/workflows/unused-packages.yml @@ -98,6 +98,8 @@ jobs: cd client UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "") UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "") + # Filter out false positives + UNUSED=$(echo "$UNUSED" | grep -v "^micromark-extension-llm-math$" || echo "") echo "CLIENT_UNUSED<> $GITHUB_ENV echo "$UNUSED" >> $GITHUB_ENV echo "EOF" >> $GITHUB_ENV diff --git a/client/package.json b/client/package.json index 5218ebdc6b..67cbec2820 100644 --- a/client/package.json +++ b/client/package.json @@ -75,6 +75,7 @@ "lodash": "^4.17.21", "lucide-react": "^0.394.0", "match-sorter": "^6.3.4", + "micromark-extension-llm-math": "^3.1.0", "qrcode.react": "^4.2.0", "rc-input-number": "^7.4.2", "react": "^18.2.0", diff --git a/client/src/components/Artifacts/Artifact.tsx b/client/src/components/Artifacts/Artifact.tsx index 2b06a2ccc0..902ac9191a 100644 --- a/client/src/components/Artifacts/Artifact.tsx +++ b/client/src/components/Artifacts/Artifact.tsx @@ -40,7 +40,7 @@ const defaultType = 'unknown'; const defaultIdentifier = 'lc-no-identifier'; export function Artifact({ - node, + node: _node, ...props }: Artifact & { children: React.ReactNode | { props: { children: React.ReactNode } }; @@ -95,7 +95,7 @@ export function Artifact({ setArtifacts((prevArtifacts) => { if ( prevArtifacts?.[artifactKey] != null && - prevArtifacts[artifactKey].content === content + prevArtifacts[artifactKey]?.content === content ) { return prevArtifacts; } diff --git a/client/src/components/Chat/Messages/Content/Markdown.tsx b/client/src/components/Chat/Messages/Content/Markdown.tsx index 740bf66670..7bd6511cfa 100644 --- a/client/src/components/Chat/Messages/Content/Markdown.tsx +++ b/client/src/components/Chat/Messages/Content/Markdown.tsx @@ -204,7 +204,7 @@ const Markdown = memo(({ content = '', isLatestMessage }: TContentProps) => { remarkGfm, remarkDirective, artifactPlugin, - [remarkMath, { singleDollarTextMath: true }], + [remarkMath, { singleDollarTextMath: false }], unicodeCitation, ]; diff --git a/client/src/components/Chat/Messages/Content/MarkdownLite.tsx b/client/src/components/Chat/Messages/Content/MarkdownLite.tsx index 972395c425..019783607c 100644 --- a/client/src/components/Chat/Messages/Content/MarkdownLite.tsx +++ b/client/src/components/Chat/Messages/Content/MarkdownLite.tsx @@ -32,7 +32,7 @@ const MarkdownLite = memo( /** @ts-ignore */ supersub, remarkGfm, - [remarkMath, { singleDollarTextMath: true }], + [remarkMath, { singleDollarTextMath: false }], ]} /** @ts-ignore */ rehypePlugins={rehypePlugins} diff --git a/client/src/components/Chat/Messages/Content/Parts/EditTextPart.tsx b/client/src/components/Chat/Messages/Content/Parts/EditTextPart.tsx index e6736b192e..1ce207fe1c 100644 --- a/client/src/components/Chat/Messages/Content/Parts/EditTextPart.tsx +++ b/client/src/components/Chat/Messages/Content/Parts/EditTextPart.tsx @@ -117,9 +117,9 @@ const EditTextPart = ({ messages.map((msg) => msg.messageId === messageId ? { - ...msg, - content: updatedContent, - } + ...msg, + content: updatedContent, + } : msg, ), ); diff --git a/client/src/components/Chat/Messages/HoverButtons.tsx b/client/src/components/Chat/Messages/HoverButtons.tsx index 644852c0b4..a13266f04c 100644 --- a/client/src/components/Chat/Messages/HoverButtons.tsx +++ b/client/src/components/Chat/Messages/HoverButtons.tsx @@ -25,6 +25,7 @@ type THoverButtons = { }; type HoverButtonProps = { + id?: string; onClick: (e?: React.MouseEvent) => void; title: string; icon: React.ReactNode; @@ -67,6 +68,7 @@ const extractMessageContent = (message: TMessage): string => { const HoverButton = memo( ({ + id, onClick, title, icon, @@ -89,6 +91,7 @@ const HoverButton = memo( return ( - )} - + + + {localize('com_ui_delete_tool_confirm')} + + } + selection={{ + selectHandler: () => removeTool(currentTool.tool_id), + selectClasses: + 'bg-red-700 dark:bg-red-600 hover:bg-red-800 dark:hover:bg-red-800 transition-color duration-200 text-white', + selectText: localize('com_ui_delete'), + }} + /> + + ); + } + + // Group tool with accordion + return ( + + + +
setIsHovering(true)} + onMouseLeave={() => setIsHovering(false)} + onFocus={() => setIsFocused(true)} + onBlur={(e) => { + // Check if focus is moving to a child element + if (!e.currentTarget.contains(e.relatedTarget)) { + setIsFocused(false); + } + }} + > + + + + +
+ + + + + + + + +
+ {currentTool.tools?.map((subTool) => ( + + ))} +
+
+
+
} selection={{ - selectHandler: () => removeTool(currentTool.pluginKey), + selectHandler: () => removeTool(currentTool.tool_id), selectClasses: 'bg-red-700 dark:bg-red-600 hover:bg-red-800 dark:hover:bg-red-800 transition-color duration-200 text-white', selectText: localize('com_ui_delete'), diff --git a/client/src/components/Tools/ToolItem.tsx b/client/src/components/Tools/ToolItem.tsx index cfb318a898..0b16b0ba42 100644 --- a/client/src/components/Tools/ToolItem.tsx +++ b/client/src/components/Tools/ToolItem.tsx @@ -1,9 +1,9 @@ -import { TPlugin } from 'librechat-data-provider'; import { XCircle, PlusCircleIcon, Wrench } from 'lucide-react'; +import { AgentToolType } from 'librechat-data-provider'; import { useLocalize } from '~/hooks'; type ToolItemProps = { - tool: TPlugin; + tool: AgentToolType; onAddTool: () => void; onRemoveTool: () => void; isInstalled?: boolean; @@ -19,15 +19,19 @@ function ToolItem({ tool, onAddTool, onRemoveTool, isInstalled = false }: ToolIt } }; + const name = tool.metadata?.name || tool.tool_id; + const description = tool.metadata?.description || ''; + const icon = tool.metadata?.icon; + return (
- {tool.icon != null && tool.icon ? ( + {icon ? ( {localize('com_ui_logo', ) : ( @@ -40,12 +44,12 @@ function ToolItem({ tool, onAddTool, onRemoveTool, isInstalled = false }: ToolIt
- {tool.name} + {name}
{!isInstalled ? (
-
{tool.description}
+
{description}
); } diff --git a/client/src/components/Tools/ToolSelectDialog.tsx b/client/src/components/Tools/ToolSelectDialog.tsx index e5c18eda47..b3bc558405 100644 --- a/client/src/components/Tools/ToolSelectDialog.tsx +++ b/client/src/components/Tools/ToolSelectDialog.tsx @@ -1,17 +1,19 @@ import { useEffect } from 'react'; import { Search, X } from 'lucide-react'; -import { Dialog, DialogPanel, DialogTitle, Description } from '@headlessui/react'; import { useFormContext } from 'react-hook-form'; import { isAgentsEndpoint } from 'librechat-data-provider'; +import { Dialog, DialogPanel, DialogTitle, Description } from '@headlessui/react'; import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query'; import type { AssistantsEndpoint, EModelEndpoint, TPluginAction, + AgentToolType, TError, } from 'librechat-data-provider'; -import type { TPluginStoreDialogProps } from '~/common/types'; +import type { AgentForm, TPluginStoreDialogProps } from '~/common'; import { PluginPagination, PluginAuthForm } from '~/components/Plugins/Store'; +import { useAgentPanelContext } from '~/Providers/AgentPanelContext'; import { useLocalize, usePluginDialogHelpers } from '~/hooks'; import { useAvailableToolsQuery } from '~/data-provider'; import ToolItem from './ToolItem'; @@ -20,14 +22,13 @@ function ToolSelectDialog({ isOpen, endpoint, setIsOpen, - toolsFormKey, }: TPluginStoreDialogProps & { - toolsFormKey: string; endpoint: AssistantsEndpoint | EModelEndpoint.agents; }) { const localize = useLocalize(); - const { getValues, setValue } = useFormContext(); + const { getValues, setValue } = useFormContext(); const { data: tools } = useAvailableToolsQuery(endpoint); + const { groupedTools } = useAgentPanelContext(); const isAgentTools = isAgentsEndpoint(endpoint); const { @@ -66,11 +67,23 @@ function ToolSelectDialog({ }, 5000); }; + const toolsFormKey = 'tools'; const handleInstall = (pluginAction: TPluginAction) => { const addFunction = () => { - const fns = getValues(toolsFormKey).slice(); - fns.push(pluginAction.pluginKey); - setValue(toolsFormKey, fns); + const installedToolIds: string[] = getValues(toolsFormKey) || []; + // Add the parent + installedToolIds.push(pluginAction.pluginKey); + + // If this tool is a group, add subtools too + const groupObj = groupedTools[pluginAction.pluginKey]; + if (groupObj?.tools && groupObj.tools.length > 0) { + for (const sub of groupObj.tools) { + if (!installedToolIds.includes(sub.tool_id)) { + installedToolIds.push(sub.tool_id); + } + } + } + setValue(toolsFormKey, Array.from(new Set(installedToolIds))); // no duplicates just in case }; if (!pluginAction.auth) { @@ -87,17 +100,21 @@ function ToolSelectDialog({ setShowPluginAuthForm(false); }; - const onRemoveTool = (tool: string) => { - setShowPluginAuthForm(false); + const onRemoveTool = (toolId: string) => { + const groupObj = groupedTools[toolId]; + const toolIdsToRemove = [toolId]; + if (groupObj?.tools && groupObj.tools.length > 0) { + toolIdsToRemove.push(...groupObj.tools.map((sub) => sub.tool_id)); + } + // Remove these from the formTools updateUserPlugins.mutate( - { pluginKey: tool, action: 'uninstall', auth: null, isEntityTool: true }, + { pluginKey: toolId, action: 'uninstall', auth: {}, isEntityTool: true }, { - onError: (error: unknown) => { - handleInstallError(error as TError); - }, + onError: (error: unknown) => handleInstallError(error as TError), onSuccess: () => { - const fns = getValues(toolsFormKey).filter((fn: string) => fn !== tool); - setValue(toolsFormKey, fns); + const remainingToolIds = + getValues(toolsFormKey)?.filter((toolId) => !toolIdsToRemove.includes(toolId)) || []; + setValue(toolsFormKey, remainingToolIds); }, }, ); @@ -113,17 +130,33 @@ function ToolSelectDialog({ if (authConfig && authConfig.length > 0 && !authenticated) { setShowPluginAuthForm(true); } else { - handleInstall({ pluginKey, action: 'install', auth: null }); + handleInstall({ + pluginKey, + action: 'install', + auth: {}, + }); } }; - const filteredTools = tools?.filter((tool) => - tool.name.toLowerCase().includes(searchValue.toLowerCase()), + const filteredTools = Object.values(groupedTools || {}).filter( + (tool: AgentToolType & { tools?: AgentToolType[] }) => { + // Check if the parent tool matches + if (tool.metadata?.name?.toLowerCase().includes(searchValue.toLowerCase())) { + return true; + } + // Check if any child tools match + if (tool.tools) { + return tool.tools.some((childTool) => + childTool.metadata?.name?.toLowerCase().includes(searchValue.toLowerCase()), + ); + } + return false; + }, ); useEffect(() => { if (filteredTools) { - setMaxPage(Math.ceil(filteredTools.length / itemsPerPage)); + setMaxPage(Math.ceil(Object.keys(filteredTools || {}).length / itemsPerPage)); if (searchChanged) { setCurrentPage(1); setSearchChanged(false); @@ -155,7 +188,7 @@ function ToolSelectDialog({ {/* Full-screen container to center the panel */}
@@ -228,9 +261,9 @@ function ToolSelectDialog({ onAddTool(tool.pluginKey)} - onRemoveTool={() => onRemoveTool(tool.pluginKey)} + isInstalled={getValues(toolsFormKey)?.includes(tool.tool_id) || false} + onAddTool={() => onAddTool(tool.tool_id)} + onRemoveTool={() => onRemoveTool(tool.tool_id)} /> ))}
diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index c314329edf..0d6d40f398 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -1040,5 +1040,8 @@ "com_ui_trust_app": "I trust this application", "com_agents_mcp_trust_subtext": "Custom connectors are not verified by LibreChat", "com_ui_icon": "Icon", - "com_agents_mcp_icon_size": "Minimum size 128 x 128 px" + "com_agents_mcp_icon_size": "Minimum size 128 x 128 px", + "com_ui_tool_collection_prefix": "A collection of tools from", + "com_ui_tool_info": "Tool Information", + "com_ui_tool_more_info": "More information about this tool" } diff --git a/packages/data-provider/src/actions.ts b/packages/data-provider/src/actions.ts index e0e9b10d7b..cc768154e5 100644 --- a/packages/data-provider/src/actions.ts +++ b/packages/data-provider/src/actions.ts @@ -3,15 +3,11 @@ import _axios from 'axios'; import { URL } from 'url'; import crypto from 'crypto'; import { load } from 'js-yaml'; -import type { - FunctionTool, - Schema, - Reference, - ActionMetadata, - ActionMetadataRuntime, -} from './types/assistants'; +import type { ActionMetadata, ActionMetadataRuntime } from './types/agents'; +import type { FunctionTool, Schema, Reference } from './types/assistants'; +import { AuthTypeEnum, AuthorizationTypeEnum } from './types/agents'; import type { OpenAPIV3 } from 'openapi-types'; -import { Tools, AuthTypeEnum, AuthorizationTypeEnum } from './types/assistants'; +import { Tools } from './types/assistants'; export type ParametersSchema = { type: string; diff --git a/packages/data-provider/src/data-service.ts b/packages/data-provider/src/data-service.ts index c78553f3a3..b956364835 100644 --- a/packages/data-provider/src/data-service.ts +++ b/packages/data-provider/src/data-service.ts @@ -2,6 +2,7 @@ import type { AxiosResponse } from 'axios'; import type * as t from './types'; import * as endpoints from './api-endpoints'; import * as a from './types/assistants'; +import * as ag from './types/agents'; import * as m from './types/mutations'; import * as q from './types/queries'; import * as f from './types/files'; @@ -351,7 +352,7 @@ export const updateAction = (data: m.UpdateActionVariables): Promise { +export function getActions(): Promise { return request.get( endpoints.agents({ path: 'actions', @@ -407,7 +408,7 @@ export const updateAgent = ({ export const duplicateAgent = ({ agent_id, -}: m.DuplicateAgentBody): Promise<{ agent: a.Agent; actions: a.Action[] }> => { +}: m.DuplicateAgentBody): Promise<{ agent: a.Agent; actions: ag.Action[] }> => { return request.post( endpoints.agents({ path: `${agent_id}/duplicate`, diff --git a/packages/data-provider/src/mcp.ts b/packages/data-provider/src/mcp.ts index ffcb65cda1..990b46e511 100644 --- a/packages/data-provider/src/mcp.ts +++ b/packages/data-provider/src/mcp.ts @@ -1,7 +1,7 @@ import { z } from 'zod'; import type { TUser } from './types'; import { extractEnvVariable } from './utils'; -import { TokenExchangeMethodEnum } from './types/assistants'; +import { TokenExchangeMethodEnum } from './types/agents'; const BaseOptionsSchema = z.object({ iconPath: z.string().optional(), diff --git a/packages/data-provider/src/types/agents.ts b/packages/data-provider/src/types/agents.ts index d4218440c6..ff286c21f4 100644 --- a/packages/data-provider/src/types/agents.ts +++ b/packages/data-provider/src/types/agents.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/no-namespace */ import { StepTypes, ContentTypes, ToolCallTypes } from './runs'; +import type { TAttachment, TPlugin } from 'src/schemas'; import type { FunctionToolCall } from './assistants'; -import type { TAttachment } from 'src/schemas'; export namespace Agents { export type MessageType = 'human' | 'ai' | 'generic' | 'system' | 'function' | 'tool' | 'remove'; @@ -279,3 +279,79 @@ export type ToolCallResult = { conversationId: string; attachments?: TAttachment[]; }; + +export enum AuthTypeEnum { + ServiceHttp = 'service_http', + OAuth = 'oauth', + None = 'none', +} + +export enum AuthorizationTypeEnum { + Bearer = 'bearer', + Basic = 'basic', + Custom = 'custom', +} + +export enum TokenExchangeMethodEnum { + DefaultPost = 'default_post', + BasicAuthHeader = 'basic_auth_header', +} + +export type Action = { + action_id: string; + type?: string; + settings?: Record; + metadata: ActionMetadata; + version: number | string; +} & ({ assistant_id: string; agent_id?: never } | { assistant_id?: never; agent_id: string }); + +export type ActionMetadata = { + api_key?: string; + auth?: ActionAuth; + domain?: string; + privacy_policy_url?: string; + raw_spec?: string; + oauth_client_id?: string; + oauth_client_secret?: string; +}; + +export type ActionAuth = { + authorization_type?: AuthorizationTypeEnum; + custom_auth_header?: string; + type?: AuthTypeEnum; + authorization_content_type?: string; + authorization_url?: string; + client_url?: string; + scope?: string; + token_exchange_method?: TokenExchangeMethodEnum; +}; + +export type ActionMetadataRuntime = ActionMetadata & { + oauth_access_token?: string; + oauth_refresh_token?: string; + oauth_token_expires_at?: Date; +}; + +export type MCP = { + mcp_id: string; + metadata: MCPMetadata; +} & ({ assistant_id: string; agent_id?: never } | { assistant_id?: never; agent_id: string }); + +export type MCPMetadata = Omit & { + name?: string; + description?: string; + url?: string; + tools?: string[]; + auth?: MCPAuth; + icon?: string; + trust?: boolean; +}; + +export type MCPAuth = ActionAuth; + +export type AgentToolType = { + tool_id: string; + metadata: ToolMetadata; +} & ({ assistant_id: string; agent_id?: never } | { assistant_id?: never; agent_id: string }); + +export type ToolMetadata = TPlugin; diff --git a/packages/data-provider/src/types/assistants.ts b/packages/data-provider/src/types/assistants.ts index c1e6f16965..c5dadc382b 100644 --- a/packages/data-provider/src/types/assistants.ts +++ b/packages/data-provider/src/types/assistants.ts @@ -487,77 +487,6 @@ export const actionDomainSeparator = '---'; export const hostImageIdSuffix = '_host_copy'; export const hostImageNamePrefix = 'host_copy_'; -export enum AuthTypeEnum { - ServiceHttp = 'service_http', - OAuth = 'oauth', - None = 'none', -} - -export enum AuthorizationTypeEnum { - Bearer = 'bearer', - Basic = 'basic', - Custom = 'custom', -} - -export enum TokenExchangeMethodEnum { - DefaultPost = 'default_post', - BasicAuthHeader = 'basic_auth_header', -} - -export type ActionAuth = { - authorization_type?: AuthorizationTypeEnum; - custom_auth_header?: string; - type?: AuthTypeEnum; - authorization_content_type?: string; - authorization_url?: string; - client_url?: string; - scope?: string; - token_exchange_method?: TokenExchangeMethodEnum; -}; - -export type MCPAuth = ActionAuth; - -export type ActionMetadata = { - api_key?: string; - auth?: ActionAuth; - domain?: string; - privacy_policy_url?: string; - raw_spec?: string; - oauth_client_id?: string; - oauth_client_secret?: string; -}; - -export type MCPMetadata = Omit & { - name?: string; - description?: string; - url?: string; - tools?: string[]; - auth?: MCPAuth; - icon?: string; - trust?: boolean; -}; - -export type ActionMetadataRuntime = ActionMetadata & { - oauth_access_token?: string; - oauth_refresh_token?: string; - oauth_token_expires_at?: Date; -}; - -/* Assistant types */ - -export type Action = { - action_id: string; - type?: string; - settings?: Record; - metadata: ActionMetadata; - version: number | string; -} & ({ assistant_id: string; agent_id?: never } | { assistant_id?: never; agent_id: string }); - -export type MCP = { - mcp_id: string; - metadata: MCPMetadata; -} & ({ assistant_id: string; agent_id?: never } | { assistant_id?: never; agent_id: string }); - export type AssistantAvatar = { filepath: string; source: string; diff --git a/packages/data-provider/src/types/mutations.ts b/packages/data-provider/src/types/mutations.ts index 4e1f01debe..cd6cae75c8 100644 --- a/packages/data-provider/src/types/mutations.ts +++ b/packages/data-provider/src/types/mutations.ts @@ -6,14 +6,13 @@ import { Assistant, AssistantCreateParams, AssistantUpdateParams, - ActionMetadata, FunctionTool, AssistantDocument, - Action, Agent, AgentCreateParams, AgentUpdateParams, } from './assistants'; +import { Action, ActionMetadata } from './agents'; export type MutationOptions< Response, From 3e4b01de82e30897fc01cdd9327ae5823fde9023 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Thu, 19 Jun 2025 18:27:55 -0400 Subject: [PATCH 09/16] =?UTF-8?q?=F0=9F=97=9D=EF=B8=8F=20feat:=20User=20Pr?= =?UTF-8?q?ovided=20Credentials=20for=20MCP=20Servers=20(#7980)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🗝️ feat: Per-User Credentials for MCP Servers chore: add aider to gitignore feat: fill custom variables to MCP server feat: replace placeholders with custom user MCP variables feat: handle MCP install/uninstall (uses pluginauths) feat: add MCP custom variables dialog to MCPSelect feat: add MCP custom variables dialog to the side panel feat: do not require to fill MCP credentials for in tools dialog feat: add translations keys (en+cs) for custom MCP variables fix: handle LIBRECHAT_USER_ID correctly during MCP var replacement style: remove unused MCP translation keys style: fix eslint for MCP custom vars chore: move aider gitignore to AI section * feat: Add Plugin Authentication Methods to data-schemas * refactor: Replace PluginAuth model methods with new utility functions for improved code organization and maintainability * refactor: Move IPluginAuth interface to types directory for better organization and update pluginAuth schema to use the new import * refactor: Remove unused getUsersPluginsAuthValuesMap function and streamline PluginService.js; add new getPluginAuthMap function for improved plugin authentication handling * chore: fix typing for optional tools property with GenericTool[] type * chore: update librechat-data-provider version to 0.7.88 * refactor: optimize getUserMCPAuthMap function by reducing variable usage and improving server key collection logic * refactor: streamline MCP tool creation by removing customUserVars parameter and enhancing user-specific authentication handling to avoid closure encapsulation * refactor: extract processSingleValue function to streamline MCP environment variable processing and enhance readability * refactor: enhance MCP tool processing logic by simplifying conditions and improving authentication handling for custom user variables * ci: fix action tests * chore: fix imports, remove comments * chore: remove non-english translations * fix: remove newline at end of translation.json file --------- Co-authored-by: Aleš Kůtek --- .gitignore | 1 + api/app/clients/tools/util/handleTools.js | 8 +- api/server/controllers/PluginController.js | 57 +++- api/server/controllers/UserController.js | 61 ++++- api/server/controllers/agents/client.js | 24 +- api/server/routes/config.js | 17 +- api/server/services/Config/getCustomConfig.js | 48 +++- api/server/services/MCP.js | 6 +- api/server/services/PluginService.js | 56 ++-- .../src/components/Chat/Input/MCPSelect.tsx | 207 ++++++++++++-- .../src/components/SidePanel/MCP/MCPPanel.tsx | 253 ++++++++++++++++++ .../SidePanel/MCP/MCPPanelSkeleton.tsx | 21 ++ .../src/components/Tools/ToolSelectDialog.tsx | 25 +- client/src/components/ui/MCPConfigDialog.tsx | 122 +++++++++ client/src/components/ui/MultiSelect.tsx | 51 ++-- client/src/hooks/Nav/useSideNavLinks.ts | 20 ++ client/src/locales/en/translation.json | 13 + package-lock.json | 2 +- packages/api/src/agents/auth.ts | 93 +++++++ packages/api/src/agents/run.ts | 10 +- packages/api/src/index.ts | 1 + packages/api/src/mcp/auth.ts | 58 ++++ packages/api/src/mcp/manager.ts | 37 ++- packages/api/src/mcp/types/index.ts | 10 +- packages/api/src/mcp/utils.ts | 3 + packages/data-provider/package.json | 2 +- packages/data-provider/specs/actions.spec.ts | 10 +- packages/data-provider/specs/mcp.spec.ts | 183 +++++++++++++ packages/data-provider/src/config.ts | 15 +- packages/data-provider/src/data-service.ts | 6 +- packages/data-provider/src/mcp.ts | 82 ++++-- packages/data-schemas/src/methods/index.ts | 7 +- .../data-schemas/src/methods/pluginAuth.ts | 140 ++++++++++ .../data-schemas/src/schema/pluginAuth.ts | 12 +- packages/data-schemas/src/types/index.ts | 1 + packages/data-schemas/src/types/pluginAuth.ts | 40 +++ 36 files changed, 1536 insertions(+), 166 deletions(-) create mode 100644 client/src/components/SidePanel/MCP/MCPPanel.tsx create mode 100644 client/src/components/SidePanel/MCP/MCPPanelSkeleton.tsx create mode 100644 client/src/components/ui/MCPConfigDialog.tsx create mode 100644 packages/api/src/agents/auth.ts create mode 100644 packages/api/src/mcp/auth.ts create mode 100644 packages/data-schemas/src/methods/pluginAuth.ts create mode 100644 packages/data-schemas/src/types/pluginAuth.ts diff --git a/.gitignore b/.gitignore index f49594afdf..c9658f17e6 100644 --- a/.gitignore +++ b/.gitignore @@ -55,6 +55,7 @@ bower_components/ # AI .clineignore .cursor +.aider* # Floobits .floo diff --git a/api/app/clients/tools/util/handleTools.js b/api/app/clients/tools/util/handleTools.js index b5a40fc4a3..c233c0f762 100644 --- a/api/app/clients/tools/util/handleTools.js +++ b/api/app/clients/tools/util/handleTools.js @@ -1,15 +1,14 @@ +const { mcpToolPattern } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); const { SerpAPI } = require('@langchain/community/tools/serpapi'); const { Calculator } = require('@langchain/community/tools/calculator'); const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents'); const { Tools, - Constants, EToolResources, loadWebSearchAuth, replaceSpecialVars, } = require('librechat-data-provider'); -const { getUserPluginAuthValue } = require('~/server/services/PluginService'); const { availableTools, manifestToolMap, @@ -29,12 +28,11 @@ const { } = require('../'); const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process'); const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch'); +const { getUserPluginAuthValue } = require('~/server/services/PluginService'); const { loadAuthValues } = require('~/server/services/Tools/credentials'); const { getCachedTools } = require('~/server/services/Config'); const { createMCPTool } = require('~/server/services/MCP'); -const mcpToolPattern = new RegExp(`^.+${Constants.mcp_delimiter}.+$`); - /** * Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values. * Tools without required authentication or with valid authentication are considered valid. @@ -94,7 +92,7 @@ const validateTools = async (user, tools = []) => { return Array.from(validToolsSet.values()); } catch (err) { logger.error('[validateTools] There was a problem validating tools', err); - throw new Error('There was a problem validating tools'); + throw new Error(err); } }; diff --git a/api/server/controllers/PluginController.js b/api/server/controllers/PluginController.js index 98e9cbfc45..f7aad84aeb 100644 --- a/api/server/controllers/PluginController.js +++ b/api/server/controllers/PluginController.js @@ -5,6 +5,7 @@ const { getToolkitKey } = require('~/server/services/ToolService'); const { getMCPManager, getFlowStateManager } = require('~/config'); const { availableTools } = require('~/app/clients/tools'); const { getLogStores } = require('~/cache'); +const { Constants } = require('librechat-data-provider'); /** * Filters out duplicate plugins from the list of plugins. @@ -173,16 +174,56 @@ const getAvailableTools = async (req, res) => { }); const toolDefinitions = await getCachedTools({ includeGlobal: true }); - const tools = authenticatedPlugins.filter( - (plugin) => - toolDefinitions[plugin.pluginKey] !== undefined || - (plugin.toolkit === true && - Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey)), - ); - await cache.set(CacheKeys.TOOLS, tools); - res.status(200).json(tools); + const toolsOutput = []; + for (const plugin of authenticatedPlugins) { + const isToolDefined = toolDefinitions[plugin.pluginKey] !== undefined; + const isToolkit = + plugin.toolkit === true && + Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey); + + if (!isToolDefined && !isToolkit) { + continue; + } + + const toolToAdd = { ...plugin }; + + if (!plugin.pluginKey.includes(Constants.mcp_delimiter)) { + toolsOutput.push(toolToAdd); + continue; + } + + const parts = plugin.pluginKey.split(Constants.mcp_delimiter); + const serverName = parts[parts.length - 1]; + const serverConfig = customConfig?.mcpServers?.[serverName]; + + if (!serverConfig?.customUserVars) { + toolsOutput.push(toolToAdd); + continue; + } + + const customVarKeys = Object.keys(serverConfig.customUserVars); + + if (customVarKeys.length === 0) { + toolToAdd.authConfig = []; + toolToAdd.authenticated = true; + } else { + toolToAdd.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({ + authField: key, + label: value.title || key, + description: value.description || '', + })); + toolToAdd.authenticated = false; + } + + toolsOutput.push(toolToAdd); + } + + const finalTools = filterUniquePlugins(toolsOutput); + await cache.set(CacheKeys.TOOLS, finalTools); + res.status(200).json(finalTools); } catch (error) { + logger.error('[getAvailableTools]', error); res.status(500).json({ message: error.message }); } }; diff --git a/api/server/controllers/UserController.js b/api/server/controllers/UserController.js index bcffb2189c..69791dd7a5 100644 --- a/api/server/controllers/UserController.js +++ b/api/server/controllers/UserController.js @@ -1,5 +1,6 @@ const { Tools, + Constants, FileSources, webSearchKeys, extractWebSearchEnvVars, @@ -23,6 +24,7 @@ const { processDeleteRequest } = require('~/server/services/Files/process'); const { Transaction, Balance, User } = require('~/db/models'); const { deleteToolCalls } = require('~/models/ToolCall'); const { deleteAllSharedLinks } = require('~/models'); +const { getMCPManager } = require('~/config'); const getUserController = async (req, res) => { /** @type {MongoUser} */ @@ -102,10 +104,22 @@ const updateUserPluginsController = async (req, res) => { } let keys = Object.keys(auth); - if (keys.length === 0 && pluginKey !== Tools.web_search) { + const values = Object.values(auth); // Used in 'install' block + + const isMCPTool = pluginKey.startsWith('mcp_') || pluginKey.includes(Constants.mcp_delimiter); + + // Early exit condition: + // If keys are empty (meaning auth: {} was likely sent for uninstall, or auth was empty for install) + // AND it's not web_search (which has special key handling to populate `keys` for uninstall) + // AND it's NOT (an uninstall action FOR an MCP tool - we need to proceed for this case to clear all its auth) + // THEN return. + if ( + keys.length === 0 && + pluginKey !== Tools.web_search && + !(action === 'uninstall' && isMCPTool) + ) { return res.status(200).send(); } - const values = Object.values(auth); /** @type {number} */ let status = 200; @@ -132,16 +146,53 @@ const updateUserPluginsController = async (req, res) => { } } } else if (action === 'uninstall') { - for (let i = 0; i < keys.length; i++) { - authService = await deleteUserPluginAuth(user.id, keys[i]); + // const isMCPTool was defined earlier + if (isMCPTool && keys.length === 0) { + // This handles the case where auth: {} is sent for an MCP tool uninstall. + // It means "delete all credentials associated with this MCP pluginKey". + authService = await deleteUserPluginAuth(user.id, null, true, pluginKey); if (authService instanceof Error) { - logger.error('[authService]', authService); + logger.error( + `[authService] Error deleting all auth for MCP tool ${pluginKey}:`, + authService, + ); ({ status, message } = authService); } + } else { + // This handles: + // 1. Web_search uninstall (keys will be populated with all webSearchKeys if auth was {}). + // 2. Other tools uninstall (if keys were provided). + // 3. MCP tool uninstall if specific keys were provided in `auth` (not current frontend behavior). + // If keys is empty for non-MCP tools (and not web_search), this loop won't run, and nothing is deleted. + for (let i = 0; i < keys.length; i++) { + authService = await deleteUserPluginAuth(user.id, keys[i]); // Deletes by authField name + if (authService instanceof Error) { + logger.error('[authService] Error deleting specific auth key:', authService); + ({ status, message } = authService); + } + } } } if (status === 200) { + // If auth was updated successfully, disconnect MCP sessions as they might use these credentials + if (pluginKey.startsWith(Constants.mcp_prefix)) { + try { + const mcpManager = getMCPManager(user.id); + if (mcpManager) { + logger.info( + `[updateUserPluginsController] Disconnecting MCP connections for user ${user.id} after plugin auth update for ${pluginKey}.`, + ); + await mcpManager.disconnectUserConnections(user.id); + } + } catch (disconnectError) { + logger.error( + `[updateUserPluginsController] Error disconnecting MCP connections for user ${user.id} after plugin auth update:`, + disconnectError, + ); + // Do not fail the request for this, but log it. + } + } return res.status(status).send(); } diff --git a/api/server/controllers/agents/client.js b/api/server/controllers/agents/client.js index 41e457e5b8..6769348d95 100644 --- a/api/server/controllers/agents/client.js +++ b/api/server/controllers/agents/client.js @@ -31,11 +31,15 @@ const { } = require('librechat-data-provider'); const { DynamicStructuredTool } = require('@langchain/core/tools'); const { getBufferString, HumanMessage } = require('@langchain/core/messages'); -const { getCustomEndpointConfig, checkCapability } = require('~/server/services/Config'); +const { + getCustomEndpointConfig, + createGetMCPAuthMap, + checkCapability, +} = require('~/server/services/Config'); const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts'); const { initializeAgent } = require('~/server/services/Endpoints/agents/agent'); const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens'); -const { setMemory, deleteMemory, getFormattedMemories } = require('~/models'); +const { getFormattedMemories, deleteMemory, setMemory } = require('~/models'); const { encodeAndFormat } = require('~/server/services/Files/images/encode'); const initOpenAI = require('~/server/services/Endpoints/openAI/initialize'); const { checkAccess } = require('~/server/middleware/roles/access'); @@ -679,6 +683,8 @@ class AgentClient extends BaseClient { version: 'v2', }; + const getUserMCPAuthMap = await createGetMCPAuthMap(); + const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name)); let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages( payload, @@ -798,6 +804,20 @@ class AgentClient extends BaseClient { run.Graph.contentData = contentData; } + try { + if (getUserMCPAuthMap) { + config.configurable.userMCPAuthMap = await getUserMCPAuthMap({ + tools: agent.tools, + userId: this.options.req.user.id, + }); + } + } catch (err) { + logger.error( + `[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent ${agent.id}`, + err, + ); + } + await run.processStream({ messages }, config, { keepContent: i !== 0, tokenCounter: createTokenCounter(this.getEncoding()), diff --git a/api/server/routes/config.js b/api/server/routes/config.js index a53a636d05..e50fb9f452 100644 --- a/api/server/routes/config.js +++ b/api/server/routes/config.js @@ -1,10 +1,11 @@ const express = require('express'); +const { logger } = require('@librechat/data-schemas'); const { CacheKeys, defaultSocialLogins, Constants } = require('librechat-data-provider'); +const { getCustomConfig } = require('~/server/services/Config/getCustomConfig'); const { getLdapConfig } = require('~/server/services/Config/ldap'); const { getProjectByName } = require('~/models/Project'); const { isEnabled } = require('~/server/utils'); const { getLogStores } = require('~/cache'); -const { logger } = require('~/config'); const router = express.Router(); const emailLoginEnabled = @@ -21,12 +22,15 @@ const publicSharedLinksEnabled = router.get('/', async function (req, res) { const cache = getLogStores(CacheKeys.CONFIG_STORE); + const cachedStartupConfig = await cache.get(CacheKeys.STARTUP_CONFIG); if (cachedStartupConfig) { res.send(cachedStartupConfig); return; } + const config = await getCustomConfig(); + const isBirthday = () => { const today = new Date(); return today.getMonth() === 1 && today.getDate() === 11; @@ -96,6 +100,17 @@ router.get('/', async function (req, res) { bundlerURL: process.env.SANDPACK_BUNDLER_URL, staticBundlerURL: process.env.SANDPACK_STATIC_BUNDLER_URL, }; + + payload.mcpServers = {}; + if (config.mcpServers) { + for (const serverName in config.mcpServers) { + const serverConfig = config.mcpServers[serverName]; + payload.mcpServers[serverName] = { + customUserVars: serverConfig?.customUserVars || {}, + }; + } + } + /** @type {TCustomConfig['webSearch']} */ const webSearchConfig = req.app.locals.webSearch; if ( diff --git a/api/server/services/Config/getCustomConfig.js b/api/server/services/Config/getCustomConfig.js index 74828789fc..0851b89a46 100644 --- a/api/server/services/Config/getCustomConfig.js +++ b/api/server/services/Config/getCustomConfig.js @@ -1,6 +1,10 @@ +const { logger } = require('@librechat/data-schemas'); +const { getUserMCPAuthMap } = require('@librechat/api'); const { CacheKeys, EModelEndpoint } = require('librechat-data-provider'); const { normalizeEndpointName, isEnabled } = require('~/server/utils'); const loadCustomConfig = require('./loadCustomConfig'); +const { getCachedTools } = require('./getCachedTools'); +const { findPluginAuthsByKeys } = require('~/models'); const getLogStores = require('~/cache/getLogStores'); /** @@ -50,4 +54,46 @@ const getCustomEndpointConfig = async (endpoint) => { ); }; -module.exports = { getCustomConfig, getBalanceConfig, getCustomEndpointConfig }; +async function createGetMCPAuthMap() { + const customConfig = await getCustomConfig(); + const mcpServers = customConfig?.mcpServers; + const hasCustomUserVars = Object.values(mcpServers).some((server) => server.customUserVars); + if (!hasCustomUserVars) { + return; + } + + /** + * @param {Object} params + * @param {GenericTool[]} [params.tools] + * @param {string} params.userId + * @returns {Promise> | undefined>} + */ + return async function ({ tools, userId }) { + try { + if (!tools || tools.length === 0) { + return; + } + const appTools = await getCachedTools({ + userId, + }); + return await getUserMCPAuthMap({ + tools, + userId, + appTools, + findPluginAuthsByKeys, + }); + } catch (err) { + logger.error( + `[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent`, + err, + ); + } + }; +} + +module.exports = { + getCustomConfig, + getBalanceConfig, + createGetMCPAuthMap, + getCustomEndpointConfig, +}; diff --git a/api/server/services/MCP.js b/api/server/services/MCP.js index 9720305668..527fe2d514 100644 --- a/api/server/services/MCP.js +++ b/api/server/services/MCP.js @@ -168,6 +168,9 @@ async function createMCPTool({ req, res, toolKey, provider: _provider }) { derivedSignal.addEventListener('abort', abortHandler, { once: true }); } + const customUserVars = + config?.configurable?.userMCPAuthMap?.[`${Constants.mcp_prefix}${serverName}`]; + const result = await mcpManager.callTool({ serverName, toolName, @@ -175,8 +178,9 @@ async function createMCPTool({ req, res, toolKey, provider: _provider }) { toolArguments, options: { signal: derivedSignal, - user: config?.configurable?.user, }, + user: config?.configurable?.user, + customUserVars, flowManager, tokenMethods: { findToken, diff --git a/api/server/services/PluginService.js b/api/server/services/PluginService.js index 04c5abb32b..af42e0471c 100644 --- a/api/server/services/PluginService.js +++ b/api/server/services/PluginService.js @@ -1,6 +1,6 @@ const { logger } = require('@librechat/data-schemas'); const { encrypt, decrypt } = require('@librechat/api'); -const { PluginAuth } = require('~/db/models'); +const { findOnePluginAuth, updatePluginAuth, deletePluginAuth } = require('~/models'); /** * Asynchronously retrieves and decrypts the authentication value for a user's plugin, based on a specified authentication field. @@ -25,7 +25,7 @@ const { PluginAuth } = require('~/db/models'); */ const getUserPluginAuthValue = async (userId, authField, throwError = true) => { try { - const pluginAuth = await PluginAuth.findOne({ userId, authField }).lean(); + const pluginAuth = await findOnePluginAuth({ userId, authField }); if (!pluginAuth) { throw new Error(`No plugin auth ${authField} found for user ${userId}`); } @@ -79,23 +79,12 @@ const getUserPluginAuthValue = async (userId, authField, throwError = true) => { const updateUserPluginAuth = async (userId, authField, pluginKey, value) => { try { const encryptedValue = await encrypt(value); - const pluginAuth = await PluginAuth.findOne({ userId, authField }).lean(); - if (pluginAuth) { - return await PluginAuth.findOneAndUpdate( - { userId, authField }, - { $set: { value: encryptedValue } }, - { new: true, upsert: true }, - ).lean(); - } else { - const newPluginAuth = await new PluginAuth({ - userId, - authField, - value: encryptedValue, - pluginKey, - }); - await newPluginAuth.save(); - return newPluginAuth.toObject(); - } + return await updatePluginAuth({ + userId, + authField, + pluginKey, + value: encryptedValue, + }); } catch (err) { logger.error('[updateUserPluginAuth]', err); return err; @@ -105,26 +94,25 @@ const updateUserPluginAuth = async (userId, authField, pluginKey, value) => { /** * @async * @param {string} userId - * @param {string} authField - * @param {boolean} [all] + * @param {string | null} authField - The specific authField to delete, or null if `all` is true. + * @param {boolean} [all=false] - Whether to delete all auths for the user (or for a specific pluginKey if provided). + * @param {string} [pluginKey] - Optional. If `all` is true and `pluginKey` is provided, delete all auths for this user and pluginKey. * @returns {Promise} * @throws {Error} */ -const deleteUserPluginAuth = async (userId, authField, all = false) => { - if (all) { - try { - const response = await PluginAuth.deleteMany({ userId }); - return response; - } catch (err) { - logger.error('[deleteUserPluginAuth]', err); - return err; - } - } - +const deleteUserPluginAuth = async (userId, authField, all = false, pluginKey) => { try { - return await PluginAuth.deleteOne({ userId, authField }); + return await deletePluginAuth({ + userId, + authField, + pluginKey, + all, + }); } catch (err) { - logger.error('[deleteUserPluginAuth]', err); + logger.error( + `[deleteUserPluginAuth] Error deleting ${all ? 'all' : 'single'} auth(s) for userId: ${userId}${pluginKey ? ` and pluginKey: ${pluginKey}` : ''}`, + err, + ); return err; } }; diff --git a/client/src/components/Chat/Input/MCPSelect.tsx b/client/src/components/Chat/Input/MCPSelect.tsx index 0cb0206bcd..ebe56c8024 100644 --- a/client/src/components/Chat/Input/MCPSelect.tsx +++ b/client/src/components/Chat/Input/MCPSelect.tsx @@ -1,13 +1,31 @@ -import React, { memo, useRef, useMemo, useEffect, useCallback } from 'react'; +import React, { memo, useRef, useMemo, useEffect, useCallback, useState } from 'react'; import { useRecoilState } from 'recoil'; +import { Settings2 } from 'lucide-react'; +import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query'; import { Constants, EModelEndpoint, LocalStorageKeys } from 'librechat-data-provider'; +import type { TPlugin, TPluginAuthConfig, TUpdateUserPlugins } from 'librechat-data-provider'; +import MCPConfigDialog, { type ConfigFieldDetail } from '~/components/ui/MCPConfigDialog'; import { useAvailableToolsQuery } from '~/data-provider'; import useLocalStorage from '~/hooks/useLocalStorageAlt'; import MultiSelect from '~/components/ui/MultiSelect'; import { ephemeralAgentByConvoId } from '~/store'; +import { useToastContext } from '~/Providers'; import MCPIcon from '~/components/ui/MCPIcon'; import { useLocalize } from '~/hooks'; +interface McpServerInfo { + name: string; + pluginKey: string; + authConfig?: TPluginAuthConfig[]; + authenticated?: boolean; +} + +// Helper function to extract mcp_serverName from a full pluginKey like action_mcp_serverName +const getBaseMCPPluginKey = (fullPluginKey: string): string => { + const parts = fullPluginKey.split(Constants.mcp_delimiter); + return Constants.mcp_prefix + parts[parts.length - 1]; +}; + const storageCondition = (value: unknown, rawCurrentValue?: string | null) => { if (rawCurrentValue) { try { @@ -24,20 +42,45 @@ const storageCondition = (value: unknown, rawCurrentValue?: string | null) => { function MCPSelect({ conversationId }: { conversationId?: string | null }) { const localize = useLocalize(); + const { showToast } = useToastContext(); const key = conversationId ?? Constants.NEW_CONVO; const hasSetFetched = useRef(null); + const [isConfigModalOpen, setIsConfigModalOpen] = useState(false); + const [selectedToolForConfig, setSelectedToolForConfig] = useState(null); - const { data: mcpServerSet, isFetched } = useAvailableToolsQuery(EModelEndpoint.agents, { - select: (data) => { - const serverNames = new Set(); + const { data: mcpToolDetails, isFetched } = useAvailableToolsQuery(EModelEndpoint.agents, { + select: (data: TPlugin[]) => { + const mcpToolsMap = new Map(); data.forEach((tool) => { const isMCP = tool.pluginKey.includes(Constants.mcp_delimiter); if (isMCP && tool.chatMenu !== false) { const parts = tool.pluginKey.split(Constants.mcp_delimiter); - serverNames.add(parts[parts.length - 1]); + const serverName = parts[parts.length - 1]; + if (!mcpToolsMap.has(serverName)) { + mcpToolsMap.set(serverName, { + name: serverName, + pluginKey: tool.pluginKey, + authConfig: tool.authConfig, + authenticated: tool.authenticated, + }); + } } }); - return serverNames; + return Array.from(mcpToolsMap.values()); + }, + }); + + const updateUserPluginsMutation = useUpdateUserPluginsMutation({ + onSuccess: () => { + setIsConfigModalOpen(false); + showToast({ message: localize('com_nav_mcp_vars_updated'), status: 'success' }); + }, + onError: (error: unknown) => { + console.error('Error updating MCP auth:', error); + showToast({ + message: localize('com_nav_mcp_vars_update_error'), + status: 'error', + }); }, }); @@ -76,12 +119,12 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) { return; } hasSetFetched.current = key; - if ((mcpServerSet?.size ?? 0) > 0) { - setMCPValues(mcpValues.filter((mcp) => mcpServerSet?.has(mcp))); + if ((mcpToolDetails?.length ?? 0) > 0) { + setMCPValues(mcpValues.filter((mcp) => mcpToolDetails?.some((tool) => tool.name === mcp))); return; } setMCPValues([]); - }, [isFetched, setMCPValues, mcpServerSet, key, mcpValues]); + }, [isFetched, setMCPValues, mcpToolDetails, key, mcpValues]); const renderSelectedValues = useCallback( (values: string[], placeholder?: string) => { @@ -96,28 +139,140 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) { [localize], ); - const mcpServers = useMemo(() => { - return Array.from(mcpServerSet ?? []); - }, [mcpServerSet]); + const mcpServerNames = useMemo(() => { + return (mcpToolDetails ?? []).map((tool) => tool.name); + }, [mcpToolDetails]); - if (!mcpServerSet || mcpServerSet.size === 0) { + const handleConfigSave = useCallback( + (targetName: string, authData: Record) => { + if (selectedToolForConfig && selectedToolForConfig.name === targetName) { + const basePluginKey = getBaseMCPPluginKey(selectedToolForConfig.pluginKey); + + const payload: TUpdateUserPlugins = { + pluginKey: basePluginKey, + action: 'install', + auth: authData, + }; + updateUserPluginsMutation.mutate(payload); + } + }, + [selectedToolForConfig, updateUserPluginsMutation], + ); + + const handleConfigRevoke = useCallback( + (targetName: string) => { + if (selectedToolForConfig && selectedToolForConfig.name === targetName) { + const basePluginKey = getBaseMCPPluginKey(selectedToolForConfig.pluginKey); + + const payload: TUpdateUserPlugins = { + pluginKey: basePluginKey, + action: 'uninstall', + auth: {}, + }; + updateUserPluginsMutation.mutate(payload); + } + }, + [selectedToolForConfig, updateUserPluginsMutation], + ); + + const renderItemContent = useCallback( + (serverName: string, defaultContent: React.ReactNode) => { + const tool = mcpToolDetails?.find((t) => t.name === serverName); + const hasAuthConfig = tool?.authConfig && tool.authConfig.length > 0; + + // Common wrapper for the main content (check mark + text) + // Ensures Check & Text are adjacent and the group takes available space. + const mainContentWrapper = ( +
{defaultContent}
+ ); + + if (tool && hasAuthConfig) { + return ( +
+ {mainContentWrapper} + +
+ ); + } + // For items without a settings icon, return the consistently wrapped main content. + return mainContentWrapper; + }, + [mcpToolDetails, setSelectedToolForConfig, setIsConfigModalOpen], + ); + + if (!mcpToolDetails || mcpToolDetails.length === 0) { return null; } return ( - } - selectItemsClassName="border border-blue-600/50 bg-blue-500/10 hover:bg-blue-700/10" - selectClassName="group relative inline-flex items-center justify-center md:justify-start gap-1.5 rounded-full border border-border-medium text-sm font-medium transition-all md:w-full size-9 p-2 md:p-3 bg-transparent shadow-sm hover:bg-surface-hover hover:shadow-md active:shadow-inner" - /> + <> + } + selectItemsClassName="border border-blue-600/50 bg-blue-500/10 hover:bg-blue-700/10" + selectClassName="group relative inline-flex items-center justify-center md:justify-start gap-1.5 rounded-full border border-border-medium text-sm font-medium transition-all md:w-full size-9 p-2 md:p-3 bg-transparent shadow-sm hover:bg-surface-hover hover:shadow-md active:shadow-inner" + /> + {selectedToolForConfig && ( + { + const schema: Record = {}; + if (selectedToolForConfig?.authConfig) { + selectedToolForConfig.authConfig.forEach((field) => { + schema[field.authField] = { + title: field.label, + description: field.description, + }; + }); + } + return schema; + })()} + initialValues={(() => { + const initial: Record = {}; + // Note: Actual initial values might need to be fetched if they are stored user-specifically + if (selectedToolForConfig?.authConfig) { + selectedToolForConfig.authConfig.forEach((field) => { + initial[field.authField] = ''; // Or fetched value + }); + } + return initial; + })()} + onSave={(authData) => { + if (selectedToolForConfig) { + handleConfigSave(selectedToolForConfig.name, authData); + } + }} + onRevoke={() => { + if (selectedToolForConfig) { + handleConfigRevoke(selectedToolForConfig.name); + } + }} + isSubmitting={updateUserPluginsMutation.isLoading} + /> + )} + ); } diff --git a/client/src/components/SidePanel/MCP/MCPPanel.tsx b/client/src/components/SidePanel/MCP/MCPPanel.tsx new file mode 100644 index 0000000000..aa2bf72112 --- /dev/null +++ b/client/src/components/SidePanel/MCP/MCPPanel.tsx @@ -0,0 +1,253 @@ +import React, { useState, useCallback, useMemo, useEffect } from 'react'; +import { ChevronLeft } from 'lucide-react'; +import { Constants } from 'librechat-data-provider'; +import { useForm, Controller } from 'react-hook-form'; +import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query'; +import type { TUpdateUserPlugins } from 'librechat-data-provider'; +import { Button, Input, Label } from '~/components/ui'; +import { useGetStartupConfig } from '~/data-provider'; +import MCPPanelSkeleton from './MCPPanelSkeleton'; +import { useToastContext } from '~/Providers'; +import { useLocalize } from '~/hooks'; + +interface ServerConfigWithVars { + serverName: string; + config: { + customUserVars: Record; + }; +} + +export default function MCPPanel() { + const localize = useLocalize(); + const { showToast } = useToastContext(); + const { data: startupConfig, isLoading: startupConfigLoading } = useGetStartupConfig(); + const [selectedServerNameForEditing, setSelectedServerNameForEditing] = useState( + null, + ); + + const mcpServerDefinitions = useMemo(() => { + if (!startupConfig?.mcpServers) { + return []; + } + return Object.entries(startupConfig.mcpServers) + .filter( + ([, serverConfig]) => + serverConfig.customUserVars && Object.keys(serverConfig.customUserVars).length > 0, + ) + .map(([serverName, config]) => ({ + serverName, + iconPath: null, + config: { + ...config, + customUserVars: config.customUserVars ?? {}, + }, + })); + }, [startupConfig?.mcpServers]); + + const updateUserPluginsMutation = useUpdateUserPluginsMutation({ + onSuccess: () => { + showToast({ message: localize('com_nav_mcp_vars_updated'), status: 'success' }); + }, + onError: (error) => { + console.error('Error updating MCP custom user variables:', error); + showToast({ + message: localize('com_nav_mcp_vars_update_error'), + status: 'error', + }); + }, + }); + + const handleSaveServerVars = useCallback( + (serverName: string, updatedValues: Record) => { + const payload: TUpdateUserPlugins = { + pluginKey: `${Constants.mcp_prefix}${serverName}`, + action: 'install', // 'install' action is used to set/update credentials/variables + auth: updatedValues, + }; + updateUserPluginsMutation.mutate(payload); + }, + [updateUserPluginsMutation], + ); + + const handleRevokeServerVars = useCallback( + (serverName: string) => { + const payload: TUpdateUserPlugins = { + pluginKey: `${Constants.mcp_prefix}${serverName}`, + action: 'uninstall', // 'uninstall' action clears the variables + auth: {}, // Empty auth for uninstall + }; + updateUserPluginsMutation.mutate(payload); + }, + [updateUserPluginsMutation], + ); + + const handleServerClickToEdit = (serverName: string) => { + setSelectedServerNameForEditing(serverName); + }; + + const handleGoBackToList = () => { + setSelectedServerNameForEditing(null); + }; + + if (startupConfigLoading) { + return ; + } + + if (mcpServerDefinitions.length === 0) { + return ( +
+ {localize('com_sidepanel_mcp_no_servers_with_vars')} +
+ ); + } + + if (selectedServerNameForEditing) { + // Editing View + const serverBeingEdited = mcpServerDefinitions.find( + (s) => s.serverName === selectedServerNameForEditing, + ); + + if (!serverBeingEdited) { + // Fallback to list view if server not found + setSelectedServerNameForEditing(null); + return ( +
+ {localize('com_ui_error')}: {localize('com_ui_mcp_server_not_found')} +
+ ); + } + + return ( +
+ +

+ {localize('com_sidepanel_mcp_variables_for', { '0': serverBeingEdited.serverName })} +

+ +
+ ); + } else { + // Server List View + return ( +
+
+ {mcpServerDefinitions.map((server) => ( + + ))} +
+
+ ); + } +} + +// Inner component for the form - remains the same +interface MCPVariableEditorProps { + server: ServerConfigWithVars; + onSave: (serverName: string, updatedValues: Record) => void; + onRevoke: (serverName: string) => void; + isSubmitting: boolean; +} + +function MCPVariableEditor({ server, onSave, onRevoke, isSubmitting }: MCPVariableEditorProps) { + const localize = useLocalize(); + + const { + control, + handleSubmit, + reset, + formState: { errors, isDirty }, + } = useForm>({ + defaultValues: {}, // Initialize empty, will be reset by useEffect + }); + + useEffect(() => { + // Always initialize with empty strings based on the schema + const initialFormValues = Object.keys(server.config.customUserVars).reduce( + (acc, key) => { + acc[key] = ''; + return acc; + }, + {} as Record, + ); + reset(initialFormValues); + }, [reset, server.config.customUserVars]); + + const onFormSubmit = (data: Record) => { + onSave(server.serverName, data); + }; + + const handleRevokeClick = () => { + onRevoke(server.serverName); + }; + + return ( +
+ {Object.entries(server.config.customUserVars).map(([key, details]) => ( +
+ + ( + + )} + /> + {details.description && ( +

+ )} + {errors[key] &&

{errors[key]?.message}

} +
+ ))} +
+ {Object.keys(server.config.customUserVars).length > 0 && ( + + )} + +
+
+ ); +} diff --git a/client/src/components/SidePanel/MCP/MCPPanelSkeleton.tsx b/client/src/components/SidePanel/MCP/MCPPanelSkeleton.tsx new file mode 100644 index 0000000000..61afbfcc2f --- /dev/null +++ b/client/src/components/SidePanel/MCP/MCPPanelSkeleton.tsx @@ -0,0 +1,21 @@ +import React from 'react'; +import { Skeleton } from '~/components/ui'; + +export default function MCPPanelSkeleton() { + return ( +
+ {[1, 2].map((serverIdx) => ( +
+ {/* Server Name */} + {[1, 2].map((varIdx) => ( +
+ {/* Variable Title */} + {/* Input Field */} + {/* Description */} +
+ ))} +
+ ))} +
+ ); +} diff --git a/client/src/components/Tools/ToolSelectDialog.tsx b/client/src/components/Tools/ToolSelectDialog.tsx index b3bc558405..cf8c958921 100644 --- a/client/src/components/Tools/ToolSelectDialog.tsx +++ b/client/src/components/Tools/ToolSelectDialog.tsx @@ -1,7 +1,7 @@ import { useEffect } from 'react'; import { Search, X } from 'lucide-react'; import { useFormContext } from 'react-hook-form'; -import { isAgentsEndpoint } from 'librechat-data-provider'; +import { Constants, isAgentsEndpoint } from 'librechat-data-provider'; import { Dialog, DialogPanel, DialogTitle, Description } from '@headlessui/react'; import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query'; import type { @@ -125,16 +125,23 @@ function ToolSelectDialog({ const getAvailablePluginFromKey = tools?.find((p) => p.pluginKey === pluginKey); setSelectedPlugin(getAvailablePluginFromKey); - const { authConfig, authenticated = false } = getAvailablePluginFromKey ?? {}; + const isMCPTool = pluginKey.includes(Constants.mcp_delimiter); - if (authConfig && authConfig.length > 0 && !authenticated) { - setShowPluginAuthForm(true); + if (isMCPTool) { + // MCP tools have their variables configured elsewhere (e.g., MCPPanel or MCPSelect), + // so we directly proceed to install without showing the auth form. + handleInstall({ pluginKey, action: 'install', auth: {} }); } else { - handleInstall({ - pluginKey, - action: 'install', - auth: {}, - }); + const { authConfig, authenticated = false } = getAvailablePluginFromKey ?? {}; + if (authConfig && authConfig.length > 0 && !authenticated) { + setShowPluginAuthForm(true); + } else { + handleInstall({ + pluginKey, + action: 'install', + auth: {}, + }); + } } }; diff --git a/client/src/components/ui/MCPConfigDialog.tsx b/client/src/components/ui/MCPConfigDialog.tsx new file mode 100644 index 0000000000..d1a53bd902 --- /dev/null +++ b/client/src/components/ui/MCPConfigDialog.tsx @@ -0,0 +1,122 @@ +import React, { useEffect } from 'react'; +import { useForm, Controller } from 'react-hook-form'; +import { Input, Label, OGDialog, Button } from '~/components/ui'; +import OGDialogTemplate from '~/components/ui/OGDialogTemplate'; +import { useLocalize } from '~/hooks'; + +export interface ConfigFieldDetail { + title: string; + description: string; +} + +interface MCPConfigDialogProps { + isOpen: boolean; + onOpenChange: (isOpen: boolean) => void; + fieldsSchema: Record; + initialValues: Record; + onSave: (updatedValues: Record) => void; + isSubmitting?: boolean; + onRevoke?: () => void; + serverName: string; +} + +export default function MCPConfigDialog({ + isOpen, + onOpenChange, + fieldsSchema, + initialValues, + onSave, + isSubmitting = false, + onRevoke, + serverName, +}: MCPConfigDialogProps) { + const localize = useLocalize(); + const { + control, + handleSubmit, + reset, + formState: { errors, _ }, + } = useForm>({ + defaultValues: initialValues, + }); + + useEffect(() => { + if (isOpen) { + reset(initialValues); + } + }, [isOpen, initialValues, reset]); + + const onFormSubmit = (data: Record) => { + onSave(data); + }; + + const handleRevoke = () => { + if (onRevoke) { + onRevoke(); + } + }; + + const dialogTitle = localize('com_ui_configure_mcp_variables_for', { 0: serverName }); + const dialogDescription = localize('com_ui_mcp_dialog_desc'); + + return ( + + + {Object.entries(fieldsSchema).map(([key, details]) => ( +
+ + ( + + )} + /> + {details.description && ( +

+ )} + {errors[key] &&

{errors[key]?.message}

} +
+ ))} + + } + selection={{ + selectHandler: handleSubmit(onFormSubmit), + selectClasses: 'bg-green-500 hover:bg-green-600 text-white', + selectText: isSubmitting ? localize('com_ui_saving') : localize('com_ui_save'), + }} + buttons={ + onRevoke && ( + + ) + } + footerClassName="flex justify-end gap-2 px-6 pb-6 pt-2" + showCancelButton={true} + /> +
+ ); +} diff --git a/client/src/components/ui/MultiSelect.tsx b/client/src/components/ui/MultiSelect.tsx index ddbd5c90a4..e0b74a4577 100644 --- a/client/src/components/ui/MultiSelect.tsx +++ b/client/src/components/ui/MultiSelect.tsx @@ -26,6 +26,11 @@ interface MultiSelectProps { selectItemsClassName?: string; selectedValues: T[]; setSelectedValues: (values: T[]) => void; + renderItemContent?: ( + value: T, + defaultContent: React.ReactNode, + isSelected: boolean, + ) => React.ReactNode; } function defaultRender(values: T[], placeholder?: string) { @@ -54,9 +59,9 @@ export default function MultiSelect({ selectItemsClassName, selectedValues = [], setSelectedValues, + renderItemContent, }: MultiSelectProps) { const selectRef = useRef(null); - // const [selectedValues, setSelectedValues] = React.useState(defaultSelectedValues); const handleValueChange = (values: T[]) => { setSelectedValues(values); @@ -105,23 +110,33 @@ export default function MultiSelect({ popoverClassName, )} > - {items.map((value) => ( - - - {value} - - ))} + {items.map((value) => { + const defaultContent = ( + <> + + {value} + + ); + const isCurrentItemSelected = selectedValues.includes(value); + return ( + + {renderItemContent + ? renderItemContent(value, defaultContent, isCurrentItemSelected) + : defaultContent} + + ); + })}
diff --git a/client/src/hooks/Nav/useSideNavLinks.ts b/client/src/hooks/Nav/useSideNavLinks.ts index 822bde3ace..abc4688f73 100644 --- a/client/src/hooks/Nav/useSideNavLinks.ts +++ b/client/src/hooks/Nav/useSideNavLinks.ts @@ -17,7 +17,10 @@ import PanelSwitch from '~/components/SidePanel/Builder/PanelSwitch'; import PromptsAccordion from '~/components/Prompts/PromptsAccordion'; import Parameters from '~/components/SidePanel/Parameters/Panel'; import FilesPanel from '~/components/SidePanel/Files/Panel'; +import MCPPanel from '~/components/SidePanel/MCP/MCPPanel'; import { Blocks, AttachmentIcon } from '~/components/svg'; +import { useGetStartupConfig } from '~/data-provider'; +import MCPIcon from '~/components/ui/MCPIcon'; import { useHasAccess } from '~/hooks'; export default function useSideNavLinks({ @@ -59,6 +62,7 @@ export default function useSideNavLinks({ permissionType: PermissionTypes.AGENTS, permission: Permissions.CREATE, }); + const { data: startupConfig } = useGetStartupConfig(); const Links = useMemo(() => { const links: NavLink[] = []; @@ -149,6 +153,21 @@ export default function useSideNavLinks({ }); } + if ( + startupConfig?.mcpServers && + Object.values(startupConfig.mcpServers).some( + (server) => server.customUserVars && Object.keys(server.customUserVars).length > 0, + ) + ) { + links.push({ + title: 'com_nav_setting_mcp', + label: '', + icon: MCPIcon, + id: 'mcp-settings', + Component: MCPPanel, + }); + } + links.push({ title: 'com_sidepanel_hide_panel', label: '', @@ -171,6 +190,7 @@ export default function useSideNavLinks({ hasAccessToBookmarks, hasAccessToCreateAgents, hidePanel, + startupConfig, ]); return Links; diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index 0d6d40f398..5bc7a38f6b 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -423,6 +423,8 @@ "com_nav_log_out": "Log out", "com_nav_long_audio_warning": "Longer texts will take longer to process.", "com_nav_maximize_chat_space": "Maximize chat space", + "com_nav_mcp_vars_update_error": "Error updating MCP custom user variables: {{0}}", + "com_nav_mcp_vars_updated": "MCP custom user variables updated successfully.", "com_nav_modular_chat": "Enable switching Endpoints mid-conversation", "com_nav_my_files": "My Files", "com_nav_not_supported": "Not Supported", @@ -447,6 +449,7 @@ "com_nav_setting_chat": "Chat", "com_nav_setting_data": "Data controls", "com_nav_setting_general": "General", + "com_nav_setting_mcp": "MCP Settings", "com_nav_setting_personalization": "Personalization", "com_nav_setting_speech": "Speech", "com_nav_settings": "Settings", @@ -480,8 +483,15 @@ "com_sidepanel_conversation_tags": "Bookmarks", "com_sidepanel_hide_panel": "Hide Panel", "com_sidepanel_manage_files": "Manage Files", + "com_sidepanel_mcp_enter_value": "Enter value for {{0}}", + "com_sidepanel_mcp_no_servers_with_vars": "No MCP servers with configurable variables.", + "com_sidepanel_mcp_variables_for": "MCP Variables for {{0}}", "com_sidepanel_parameters": "Parameters", "com_sources_image_alt": "Search result image", + "com_ui_configure_mcp_variables_for": "Configure Variables for {{0}}", + "com_ui_mcp_dialog_desc": "Please enter the necessary information below.", + "com_ui_mcp_enter_var": "Enter value for {{0}}", + "com_ui_saving": "Saving...", "com_sources_more_sources": "+{{count}} sources", "com_sources_tab_all": "All", "com_sources_tab_images": "Images", @@ -570,6 +580,7 @@ "com_ui_authentication_type": "Authentication Type", "com_ui_avatar": "Avatar", "com_ui_azure": "Azure", + "com_ui_back": "Back", "com_ui_back_to_chat": "Back to Chat", "com_ui_back_to_prompts": "Back to Prompts", "com_ui_backup_codes": "Backup Codes", @@ -795,6 +806,7 @@ "com_ui_manage": "Manage", "com_ui_max_tags": "Maximum number allowed is {{0}}, using latest values.", "com_ui_mcp_servers": "MCP Servers", + "com_ui_mcp_server_not_found": "Server not found.", "com_ui_memories": "Memories", "com_ui_memories_allow_create": "Allow creating Memories", "com_ui_memories_allow_opt_out": "Allow users to opt out of Memories", @@ -1020,6 +1032,7 @@ "com_user_message": "You", "com_warning_resubmit_unsupported": "Resubmitting the AI message is not supported for this endpoint.", "com_ui_add_mcp": "Add MCP", + "com_ui_add_mcp": "Add MCP", "com_ui_add_mcp_server": "Add MCP Server", "com_ui_edit_mcp_server": "Edit MCP Server", "com_agents_mcps_disabled": "You need to create an agent before adding MCPs.", diff --git a/package-lock.json b/package-lock.json index 75ce0ea9fd..a7384d0f90 100644 --- a/package-lock.json +++ b/package-lock.json @@ -46322,7 +46322,7 @@ }, "packages/data-provider": { "name": "librechat-data-provider", - "version": "0.7.87", + "version": "0.7.88", "license": "ISC", "dependencies": { "axios": "^1.8.2", diff --git a/packages/api/src/agents/auth.ts b/packages/api/src/agents/auth.ts new file mode 100644 index 0000000000..564ef84b5a --- /dev/null +++ b/packages/api/src/agents/auth.ts @@ -0,0 +1,93 @@ +import { logger } from '@librechat/data-schemas'; +import type { IPluginAuth, PluginAuthMethods } from '@librechat/data-schemas'; +import { decrypt } from '../crypto/encryption'; + +export interface GetPluginAuthMapParams { + userId: string; + pluginKeys: string[]; + throwError?: boolean; + findPluginAuthsByKeys: PluginAuthMethods['findPluginAuthsByKeys']; +} + +export type PluginAuthMap = Record>; + +/** + * Retrieves and decrypts authentication values for multiple plugins + * @returns A map where keys are pluginKeys and values are objects of authField:decryptedValue pairs + */ +export async function getPluginAuthMap({ + userId, + pluginKeys, + throwError = true, + findPluginAuthsByKeys, +}: GetPluginAuthMapParams): Promise { + try { + /** Early return for empty plugin keys */ + if (!pluginKeys?.length) { + return {}; + } + + /** All plugin auths for current user query */ + const pluginAuths = await findPluginAuthsByKeys({ userId, pluginKeys }); + + /** Group auth records by pluginKey for efficient lookup */ + const authsByPlugin = new Map(); + for (const auth of pluginAuths) { + if (!auth.pluginKey) { + logger.warn(`[getPluginAuthMap] Missing pluginKey for userId ${userId}`); + continue; + } + const existing = authsByPlugin.get(auth.pluginKey) || []; + existing.push(auth); + authsByPlugin.set(auth.pluginKey, existing); + } + + const authMap: PluginAuthMap = {}; + const decryptionPromises: Promise[] = []; + + /** Single loop through requested pluginKeys */ + for (const pluginKey of pluginKeys) { + authMap[pluginKey] = {}; + const auths = authsByPlugin.get(pluginKey) || []; + + for (const auth of auths) { + decryptionPromises.push( + (async () => { + try { + const decryptedValue = await decrypt(auth.value); + authMap[pluginKey][auth.authField] = decryptedValue; + } catch (error) { + const message = error instanceof Error ? error.message : 'Unknown error'; + logger.error( + `[getPluginAuthMap] Decryption failed for userId ${userId}, plugin ${pluginKey}, field ${auth.authField}: ${message}`, + ); + + if (throwError) { + throw new Error( + `Decryption failed for plugin ${pluginKey}, field ${auth.authField}: ${message}`, + ); + } + } + })(), + ); + } + } + + await Promise.all(decryptionPromises); + return authMap; + } catch (error) { + if (!throwError) { + /** Empty objects for each plugin key on error */ + return pluginKeys.reduce((acc, key) => { + acc[key] = {}; + return acc; + }, {} as PluginAuthMap); + } + + const message = error instanceof Error ? error.message : 'Unknown error'; + logger.error( + `[getPluginAuthMap] Failed to fetch auth values for userId ${userId}, plugins: ${pluginKeys.join(', ')}: ${message}`, + ); + throw error; + } +} diff --git a/packages/api/src/agents/run.ts b/packages/api/src/agents/run.ts index 22f8c4ae93..41ec02d9b9 100644 --- a/packages/api/src/agents/run.ts +++ b/packages/api/src/agents/run.ts @@ -1,6 +1,12 @@ import { Run, Providers } from '@librechat/agents'; import { providerEndpointMap, KnownEndpoints } from 'librechat-data-provider'; -import type { StandardGraphConfig, EventHandler, GraphEvents, IState } from '@librechat/agents'; +import type { + StandardGraphConfig, + EventHandler, + GenericTool, + GraphEvents, + IState, +} from '@librechat/agents'; import type { Agent } from 'librechat-data-provider'; import type * as t from '~/types'; @@ -32,7 +38,7 @@ export async function createRun({ streaming = true, streamUsage = true, }: { - agent: Agent; + agent: Omit & { tools?: GenericTool[] }; signal: AbortSignal; runId?: string; streaming?: boolean; diff --git a/packages/api/src/index.ts b/packages/api/src/index.ts index b7859e7ca4..0341de44b0 100644 --- a/packages/api/src/index.ts +++ b/packages/api/src/index.ts @@ -1,6 +1,7 @@ /* MCP */ export * from './mcp/manager'; export * from './mcp/oauth'; +export * from './mcp/auth'; /* Utilities */ export * from './mcp/utils'; export * from './utils'; diff --git a/packages/api/src/mcp/auth.ts b/packages/api/src/mcp/auth.ts new file mode 100644 index 0000000000..7f6f6001fa --- /dev/null +++ b/packages/api/src/mcp/auth.ts @@ -0,0 +1,58 @@ +import { logger } from '@librechat/data-schemas'; +import { Constants } from 'librechat-data-provider'; +import type { PluginAuthMethods } from '@librechat/data-schemas'; +import type { GenericTool } from '@librechat/agents'; +import { getPluginAuthMap } from '~/agents/auth'; +import { mcpToolPattern } from './utils'; + +export async function getUserMCPAuthMap({ + userId, + tools, + appTools, + findPluginAuthsByKeys, +}: { + userId: string; + tools: GenericTool[] | undefined; + appTools: Record; + findPluginAuthsByKeys: PluginAuthMethods['findPluginAuthsByKeys']; +}) { + if (!tools || tools.length === 0) { + return {}; + } + + const uniqueMcpServers = new Set(); + + for (const tool of tools) { + const toolKey = tool.name; + if (toolKey && appTools[toolKey] && mcpToolPattern.test(toolKey)) { + const parts = toolKey.split(Constants.mcp_delimiter); + const serverName = parts[parts.length - 1]; + uniqueMcpServers.add(`${Constants.mcp_prefix}${serverName}`); + } + } + + if (uniqueMcpServers.size === 0) { + return {}; + } + + const mcpPluginKeysToFetch = Array.from(uniqueMcpServers); + + let allMcpCustomUserVars: Record> = {}; + try { + allMcpCustomUserVars = await getPluginAuthMap({ + userId, + pluginKeys: mcpPluginKeysToFetch, + throwError: false, + findPluginAuthsByKeys, + }); + } catch (err) { + logger.error( + `[handleTools] Error batch fetching customUserVars for MCP tools (keys: ${mcpPluginKeysToFetch.join( + ', ', + )}), user ${userId}: ${err instanceof Error ? err.message : 'Unknown error'}`, + err, + ); + } + + return allMcpCustomUserVars; +} diff --git a/packages/api/src/mcp/manager.ts b/packages/api/src/mcp/manager.ts index 0a60784457..19d4d4e72b 100644 --- a/packages/api/src/mcp/manager.ts +++ b/packages/api/src/mcp/manager.ts @@ -14,10 +14,6 @@ import { MCPTokenStorage } from './oauth/tokens'; import { formatToolContent } from './parsers'; import { MCPConnection } from './connection'; -export interface CallToolOptions extends RequestOptions { - user?: TUser; -} - export class MCPManager { private static instance: MCPManager | null = null; /** App-level connections initialized at startup */ @@ -28,7 +24,11 @@ export class MCPManager { private userLastActivity: Map = new Map(); private readonly USER_CONNECTION_IDLE_TIMEOUT = 15 * 60 * 1000; // 15 minutes (TODO: make configurable) private mcpConfigs: t.MCPServers = {}; - private processMCPEnv?: (obj: MCPOptions, user?: TUser) => MCPOptions; // Store the processing function + private processMCPEnv?: ( + obj: MCPOptions, + user?: TUser, + customUserVars?: Record, + ) => MCPOptions; // Store the processing function /** Store MCP server instructions */ private serverInstructions: Map = new Map(); @@ -63,7 +63,6 @@ export class MCPManager { if (!tokenMethods) { logger.info('[MCP] No token methods provided, token persistence will not be available'); } - const entries = Object.entries(mcpServers); const initializedServers = new Set(); const connectionResults = await Promise.allSettled( @@ -382,6 +381,7 @@ export class MCPManager { user, serverName, flowManager, + customUserVars, tokenMethods, oauthStart, oauthEnd, @@ -390,6 +390,7 @@ export class MCPManager { user: TUser; serverName: string; flowManager: FlowStateManager; + customUserVars?: Record; tokenMethods?: TokenMethods; oauthStart?: (authURL: string) => Promise; oauthEnd?: () => Promise; @@ -444,9 +445,8 @@ export class MCPManager { } if (this.processMCPEnv) { - config = { ...(this.processMCPEnv(config, user) ?? {}) }; + config = { ...(this.processMCPEnv(config, user, customUserVars) ?? {}) }; } - /** If no in-memory tokens, tokens from persistent storage */ let tokens: MCPOAuthTokens | null = null; if (tokenMethods?.findToken) { @@ -752,7 +752,6 @@ export class MCPManager { getServerTools?: (serverName: string) => Promise; }): Promise { const mcpTools: t.LCManifestTool[] = []; - for (const [serverName, connection] of this.connections.entries()) { try { /** Attempt to ensure connection is active, with reconnection if needed */ @@ -784,13 +783,21 @@ export class MCPManager { const serverTools: t.LCManifestTool[] = []; for (const tool of tools) { const pluginKey = `${tool.name}${CONSTANTS.mcp_delimiter}${serverName}`; + + const config = this.mcpConfigs[serverName]; const manifestTool: t.LCManifestTool = { name: tool.name, pluginKey, description: tool.description ?? '', icon: connection.iconPath, + authConfig: config?.customUserVars + ? Object.entries(config.customUserVars).map(([key, value]) => ({ + authField: key, + label: value.title || key, + description: value.description || '', + })) + : undefined, }; - const config = this.mcpConfigs[serverName]; if (config?.chatMenu === false) { manifestTool.chatMenu = false; } @@ -814,6 +821,7 @@ export class MCPManager { * for user-specific connections upon successful call initiation. */ async callTool({ + user, serverName, toolName, provider, @@ -823,20 +831,22 @@ export class MCPManager { flowManager, oauthStart, oauthEnd, + customUserVars, }: { + user?: TUser; serverName: string; toolName: string; provider: t.Provider; toolArguments?: Record; - options?: CallToolOptions; + options?: RequestOptions; tokenMethods?: TokenMethods; + customUserVars?: Record; flowManager: FlowStateManager; oauthStart?: (authURL: string) => Promise; oauthEnd?: () => Promise; }): Promise { /** User-specific connection */ let connection: MCPConnection | undefined; - const { user, ...callOptions } = options ?? {}; const userId = user?.id; const logPrefix = userId ? `[MCP][User: ${userId}][${serverName}]` : `[MCP][${serverName}]`; @@ -852,6 +862,7 @@ export class MCPManager { oauthStart, oauthEnd, signal: options?.signal, + customUserVars, }); } else { /** App-level connection */ @@ -883,7 +894,7 @@ export class MCPManager { CallToolResultSchema, { timeout: connection.timeout, - ...callOptions, + ...options, }, ); if (userId) { diff --git a/packages/api/src/mcp/types/index.ts b/packages/api/src/mcp/types/index.ts index bfd73633eb..d95251eecc 100644 --- a/packages/api/src/mcp/types/index.ts +++ b/packages/api/src/mcp/types/index.ts @@ -14,7 +14,15 @@ export type StdioOptions = z.infer; export type WebSocketOptions = z.infer; export type SSEOptions = z.infer; export type StreamableHTTPOptions = z.infer; -export type MCPOptions = z.infer; +export type MCPOptions = z.infer & { + customUserVars?: Record< + string, + { + title: string; + description: string; + } + >; +}; export type MCPServers = z.infer; export interface MCPResource { uri: string; diff --git a/packages/api/src/mcp/utils.ts b/packages/api/src/mcp/utils.ts index f315976fcf..631ce5c210 100644 --- a/packages/api/src/mcp/utils.ts +++ b/packages/api/src/mcp/utils.ts @@ -1,3 +1,6 @@ +import { Constants } from 'librechat-data-provider'; + +export const mcpToolPattern = new RegExp(`^.+${Constants.mcp_delimiter}.+$`); /** * Normalizes a server name to match the pattern ^[a-zA-Z0-9_.-]+$ * This is required for Azure OpenAI models with Tool Calling diff --git a/packages/data-provider/package.json b/packages/data-provider/package.json index 146121cb44..d46bfcf716 100644 --- a/packages/data-provider/package.json +++ b/packages/data-provider/package.json @@ -1,6 +1,6 @@ { "name": "librechat-data-provider", - "version": "0.7.87", + "version": "0.7.88", "description": "data services for librechat apps", "main": "dist/index.js", "module": "dist/index.es.js", diff --git a/packages/data-provider/specs/actions.spec.ts b/packages/data-provider/specs/actions.spec.ts index 6d84c79373..818c72c832 100644 --- a/packages/data-provider/specs/actions.spec.ts +++ b/packages/data-provider/specs/actions.spec.ts @@ -1,6 +1,8 @@ -import axios from 'axios'; import { z } from 'zod'; -import { OpenAPIV3 } from 'openapi-types'; +import axios from 'axios'; +import type { OpenAPIV3 } from 'openapi-types'; +import type { ParametersSchema } from '../src/actions'; +import type { FlowchartSchema } from './openapiSpecs'; import { createURL, resolveRef, @@ -15,9 +17,7 @@ import { scholarAIOpenapiSpec, swapidev, } from './openapiSpecs'; -import { AuthorizationTypeEnum, AuthTypeEnum } from '../src/types/assistants'; -import type { FlowchartSchema } from './openapiSpecs'; -import type { ParametersSchema } from '../src/actions'; +import { AuthorizationTypeEnum, AuthTypeEnum } from '../src/types/agents'; jest.mock('axios'); const mockedAxios = axios as jest.Mocked; diff --git a/packages/data-provider/specs/mcp.spec.ts b/packages/data-provider/specs/mcp.spec.ts index f2b62c0f89..37493f1bbc 100644 --- a/packages/data-provider/specs/mcp.spec.ts +++ b/packages/data-provider/specs/mcp.spec.ts @@ -525,5 +525,188 @@ describe('Environment Variable Extraction (MCP)', () => { const result3 = processMCPEnv(obj3, userWithBoth); expect('headers' in result3 && result3.headers?.['User-Id']).toBe('user-789'); }); + + it('should process customUserVars in env field', () => { + const user = createTestUser(); + const customUserVars = { + CUSTOM_VAR_1: 'custom-value-1', + CUSTOM_VAR_2: 'custom-value-2', + }; + const obj: MCPOptions = { + command: 'node', + args: ['server.js'], + env: { + VAR_A: '{{CUSTOM_VAR_1}}', + VAR_B: 'Value with {{CUSTOM_VAR_2}}', + VAR_C: '${TEST_API_KEY}', + VAR_D: '{{LIBRECHAT_USER_EMAIL}}', + }, + }; + + const result = processMCPEnv(obj, user, customUserVars); + + expect('env' in result && result.env).toEqual({ + VAR_A: 'custom-value-1', + VAR_B: 'Value with custom-value-2', + VAR_C: 'test-api-key-value', + VAR_D: 'test@example.com', + }); + }); + + it('should process customUserVars in headers field', () => { + const user = createTestUser(); + const customUserVars = { + USER_TOKEN: 'user-specific-token', + REGION: 'us-west-1', + }; + const obj: MCPOptions = { + type: 'sse', + url: 'https://example.com/api', + headers: { + Authorization: 'Bearer {{USER_TOKEN}}', + 'X-Region': '{{REGION}}', + 'X-System-Key': '${TEST_API_KEY}', + 'X-User-Id': '{{LIBRECHAT_USER_ID}}', + }, + }; + + const result = processMCPEnv(obj, user, customUserVars); + + expect('headers' in result && result.headers).toEqual({ + Authorization: 'Bearer user-specific-token', + 'X-Region': 'us-west-1', + 'X-System-Key': 'test-api-key-value', + 'X-User-Id': 'test-user-id', + }); + }); + + it('should process customUserVars in URL field', () => { + const user = createTestUser(); + const customUserVars = { + API_VERSION: 'v2', + TENANT_ID: 'tenant123', + }; + const obj: MCPOptions = { + type: 'websocket', + url: 'wss://example.com/{{TENANT_ID}}/api/{{API_VERSION}}?user={{LIBRECHAT_USER_ID}}&key=${TEST_API_KEY}', + }; + + const result = processMCPEnv(obj, user, customUserVars); + + expect('url' in result && result.url).toBe( + 'wss://example.com/tenant123/api/v2?user=test-user-id&key=test-api-key-value', + ); + }); + + it('should prioritize customUserVars over user fields and system env vars if placeholders are the same (though not recommended)', () => { + // This tests the order of operations: customUserVars -> userFields -> systemEnv + // BUt it's generally not recommended to have overlapping placeholder names. + process.env.LIBRECHAT_USER_EMAIL = 'system-email-should-be-overridden'; + const user = createTestUser({ email: 'user-email-should-be-overridden' }); + const customUserVars = { + LIBRECHAT_USER_EMAIL: 'custom-email-wins', + }; + const obj: MCPOptions = { + type: 'sse', + url: 'https://example.com/api', + headers: { + 'Test-Email': '{{LIBRECHAT_USER_EMAIL}}', // Placeholder that could match custom, user, or system + }, + }; + + const result = processMCPEnv(obj, user, customUserVars); + expect('headers' in result && result.headers?.['Test-Email']).toBe('custom-email-wins'); + + // Clean up env var + delete process.env.LIBRECHAT_USER_EMAIL; + }); + + it('should handle customUserVars with no matching placeholders', () => { + const user = createTestUser(); + const customUserVars = { + UNUSED_VAR: 'unused-value', + }; + const obj: MCPOptions = { + command: 'node', + args: ['server.js'], + env: { + API_KEY: '${TEST_API_KEY}', + }, + }; + + const result = processMCPEnv(obj, user, customUserVars); + expect('env' in result && result.env).toEqual({ + API_KEY: 'test-api-key-value', + }); + }); + + it('should handle placeholders with no matching customUserVars (falling back to user/system vars)', () => { + const user = createTestUser({ email: 'user-provided-email@example.com' }); + // No customUserVars provided or customUserVars is empty + const customUserVars = {}; + const obj: MCPOptions = { + type: 'sse', + url: 'https://example.com/api', + headers: { + 'User-Email-Header': '{{LIBRECHAT_USER_EMAIL}}', // Should use user.email + 'System-Key-Header': '${TEST_API_KEY}', // Should use process.env.TEST_API_KEY + 'Non-Existent-Custom': '{{NON_EXISTENT_CUSTOM_VAR}}', // Should remain as placeholder + }, + }; + + const result = processMCPEnv(obj, user, customUserVars); + expect('headers' in result && result.headers).toEqual({ + 'User-Email-Header': 'user-provided-email@example.com', + 'System-Key-Header': 'test-api-key-value', + 'Non-Existent-Custom': '{{NON_EXISTENT_CUSTOM_VAR}}', + }); + }); + + it('should correctly process a mix of all variable types', () => { + const user = createTestUser({ id: 'userXYZ', username: 'john.doe' }); + const customUserVars = { + CUSTOM_ENDPOINT_ID: 'ep123', + ANOTHER_CUSTOM: 'another_val', + }; + + const obj = { + type: 'streamable-http' as const, + url: 'https://{{CUSTOM_ENDPOINT_ID}}.example.com/users/{{LIBRECHAT_USER_USERNAME}}', + headers: { + 'X-Auth-Token': '{{CUSTOM_TOKEN_FROM_USER_SETTINGS}}', // Assuming this would be a custom var + 'X-User-ID': '{{LIBRECHAT_USER_ID}}', + 'X-System-Test-Key': '${TEST_API_KEY}', // Using existing env var from beforeEach + }, + env: { + PROCESS_MODE: '{{PROCESS_MODE_CUSTOM}}', // Another custom var + USER_HOME_DIR: '/home/{{LIBRECHAT_USER_USERNAME}}', + SYSTEM_PATH: '${PATH}', // Example of a system env var + }, + }; + + // Simulate customUserVars that would be passed, including those for headers and env + const allCustomVarsForCall = { + ...customUserVars, + CUSTOM_TOKEN_FROM_USER_SETTINGS: 'secretToken123!', + PROCESS_MODE_CUSTOM: 'production', + }; + + // Cast obj to MCPOptions when calling processMCPEnv. + // This acknowledges the object might not strictly conform to one schema in the union, + // but we are testing the function's ability to handle these properties if present. + const result = processMCPEnv(obj as MCPOptions, user, allCustomVarsForCall); + + expect('url' in result && result.url).toBe('https://ep123.example.com/users/john.doe'); + expect('headers' in result && result.headers).toEqual({ + 'X-Auth-Token': 'secretToken123!', + 'X-User-ID': 'userXYZ', + 'X-System-Test-Key': 'test-api-key-value', // Expecting value of TEST_API_KEY + }); + expect('env' in result && result.env).toEqual({ + PROCESS_MODE: 'production', + USER_HOME_DIR: '/home/john.doe', + SYSTEM_PATH: process.env.PATH, // Actual value of PATH from the test environment + }); + }); }); }); diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index 145487d7ef..4d1c95b69f 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -588,6 +588,18 @@ export type TStartupConfig = { scraperType?: ScraperTypes; rerankerType?: RerankerTypes; }; + mcpServers?: Record< + string, + { + customUserVars: Record< + string, + { + title: string; + description: string; + } + >; + } + >; }; export enum OCRStrategy { @@ -885,7 +897,6 @@ export const defaultModels = { [EModelEndpoint.assistants]: [...sharedOpenAIModels, 'chatgpt-4o-latest'], [EModelEndpoint.agents]: sharedOpenAIModels, // TODO: Add agent models (agentsModels) [EModelEndpoint.google]: [ - // Shared Google Models between Vertex AI & Gen AI // Gemini 2.0 Models 'gemini-2.0-flash-001', 'gemini-2.0-flash-exp', @@ -1395,6 +1406,8 @@ export enum Constants { GLOBAL_PROJECT_NAME = 'instance', /** Delimiter for MCP tools */ mcp_delimiter = '_mcp_', + /** Prefix for MCP plugins */ + mcp_prefix = 'mcp_', /** Placeholder Agent ID for Ephemeral Agents */ EPHEMERAL_AGENT_ID = 'ephemeral', } diff --git a/packages/data-provider/src/data-service.ts b/packages/data-provider/src/data-service.ts index b956364835..08a666dd78 100644 --- a/packages/data-provider/src/data-service.ts +++ b/packages/data-provider/src/data-service.ts @@ -151,7 +151,11 @@ export const updateUserPlugins = (payload: t.TUpdateUserPlugins) => { /* Config */ -export const getStartupConfig = (): Promise => { +export const getStartupConfig = (): Promise< + config.TStartupConfig & { + mcpCustomUserVars?: Record; + } +> => { return request.get(endpoints.config()); }; diff --git a/packages/data-provider/src/mcp.ts b/packages/data-provider/src/mcp.ts index 990b46e511..05b37115fc 100644 --- a/packages/data-provider/src/mcp.ts +++ b/packages/data-provider/src/mcp.ts @@ -39,6 +39,15 @@ const BaseOptionsSchema = z.object({ token_exchange_method: z.nativeEnum(TokenExchangeMethodEnum).optional(), }) .optional(), + customUserVars: z + .record( + z.string(), + z.object({ + title: z.string(), + description: z.string(), + }), + ) + .optional(), }); export const StdioOptionsSchema = BaseOptionsSchema.extend({ @@ -191,13 +200,55 @@ function processUserPlaceholders(value: string, user?: TUser): string { return value; } +function processSingleValue({ + originalValue, + customUserVars, + user, +}: { + originalValue: string; + customUserVars?: Record; + user?: TUser; +}): string { + let value = originalValue; + + // 1. Replace custom user variables + if (customUserVars) { + for (const [varName, varVal] of Object.entries(customUserVars)) { + /** Escaped varName for use in regex to avoid issues with special characters */ + const escapedVarName = varName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + const placeholderRegex = new RegExp(`\\{\\{${escapedVarName}\\}\\}`, 'g'); + value = value.replace(placeholderRegex, varVal); + } + } + + // 2.A. Special handling for LIBRECHAT_USER_ID placeholder + // This ensures {{LIBRECHAT_USER_ID}} is replaced only if user.id is available. + // If user.id is null/undefined, the placeholder remains + if (user && user.id != null && value.includes('{{LIBRECHAT_USER_ID}}')) { + value = value.replace(/\{\{LIBRECHAT_USER_ID\}\}/g, String(user.id)); + } + + // 2.B. Replace other standard user field placeholders (e.g., {{LIBRECHAT_USER_EMAIL}}) + value = processUserPlaceholders(value, user); + + // 3. Replace system environment variables + value = extractEnvVariable(value); + + return value; +} + /** * Recursively processes an object to replace environment variables in string values * @param obj - The object to process * @param user - The user object containing all user fields + * @param customUserVars - vars that user set in settings * @returns - The processed object with environment variables replaced */ -export function processMCPEnv(obj: Readonly, user?: TUser): MCPOptions { +export function processMCPEnv( + obj: Readonly, + user?: TUser, + customUserVars?: Record, +): MCPOptions { if (obj === null || obj === undefined) { return obj; } @@ -206,32 +257,25 @@ export function processMCPEnv(obj: Readonly, user?: TUser): MCPOptio if ('env' in newObj && newObj.env) { const processedEnv: Record = {}; - for (const [key, value] of Object.entries(newObj.env)) { - let processedValue = extractEnvVariable(value); - processedValue = processUserPlaceholders(processedValue, user); - processedEnv[key] = processedValue; + for (const [key, originalValue] of Object.entries(newObj.env)) { + processedEnv[key] = processSingleValue({ originalValue, customUserVars, user }); } newObj.env = processedEnv; - } else if ('headers' in newObj && newObj.headers) { - const processedHeaders: Record = {}; - for (const [key, value] of Object.entries(newObj.headers)) { - const userId = user?.id; - if (value === '{{LIBRECHAT_USER_ID}}' && userId != null) { - processedHeaders[key] = String(userId); - continue; - } + } - let processedValue = extractEnvVariable(value); - processedValue = processUserPlaceholders(processedValue, user); - processedHeaders[key] = processedValue; + // Process headers if they exist (for WebSocket, SSE, StreamableHTTP types) + // Note: `env` and `headers` are on different branches of the MCPOptions union type. + if ('headers' in newObj && newObj.headers) { + const processedHeaders: Record = {}; + for (const [key, originalValue] of Object.entries(newObj.headers)) { + processedHeaders[key] = processSingleValue({ originalValue, customUserVars, user }); } newObj.headers = processedHeaders; } + // Process URL if it exists (for WebSocket, SSE, StreamableHTTP types) if ('url' in newObj && newObj.url) { - let processedUrl = extractEnvVariable(newObj.url); - processedUrl = processUserPlaceholders(processedUrl, user); - newObj.url = processedUrl; + newObj.url = processSingleValue({ originalValue: newObj.url, customUserVars, user }); } return newObj; diff --git a/packages/data-schemas/src/methods/index.ts b/packages/data-schemas/src/methods/index.ts index be4308ecd9..57f0fc2f2c 100644 --- a/packages/data-schemas/src/methods/index.ts +++ b/packages/data-schemas/src/methods/index.ts @@ -5,6 +5,7 @@ import { createRoleMethods, type RoleMethods } from './role'; /* Memories */ import { createMemoryMethods, type MemoryMethods } from './memory'; import { createShareMethods, type ShareMethods } from './share'; +import { createPluginAuthMethods, type PluginAuthMethods } from './pluginAuth'; /** * Creates all database methods for all collections @@ -17,13 +18,15 @@ export function createMethods(mongoose: typeof import('mongoose')) { ...createRoleMethods(mongoose), ...createMemoryMethods(mongoose), ...createShareMethods(mongoose), + ...createPluginAuthMethods(mongoose), }; } -export type { MemoryMethods, ShareMethods, TokenMethods }; +export type { MemoryMethods, ShareMethods, TokenMethods, PluginAuthMethods }; export type AllMethods = UserMethods & SessionMethods & TokenMethods & RoleMethods & MemoryMethods & - ShareMethods; + ShareMethods & + PluginAuthMethods; diff --git a/packages/data-schemas/src/methods/pluginAuth.ts b/packages/data-schemas/src/methods/pluginAuth.ts new file mode 100644 index 0000000000..f0256f859f --- /dev/null +++ b/packages/data-schemas/src/methods/pluginAuth.ts @@ -0,0 +1,140 @@ +import type { DeleteResult, Model } from 'mongoose'; +import type { IPluginAuth } from '~/schema/pluginAuth'; +import type { + FindPluginAuthsByKeysParams, + UpdatePluginAuthParams, + DeletePluginAuthParams, + FindPluginAuthParams, +} from '~/types'; + +// Factory function that takes mongoose instance and returns the methods +export function createPluginAuthMethods(mongoose: typeof import('mongoose')) { + const PluginAuth: Model = mongoose.models.PluginAuth; + + /** + * Finds a single plugin auth entry by userId and authField + */ + async function findOnePluginAuth({ + userId, + authField, + }: FindPluginAuthParams): Promise { + try { + return await PluginAuth.findOne({ userId, authField }).lean(); + } catch (error) { + throw new Error( + `Failed to find plugin auth: ${error instanceof Error ? error.message : 'Unknown error'}`, + ); + } + } + + /** + * Finds multiple plugin auth entries by userId and pluginKeys + */ + async function findPluginAuthsByKeys({ + userId, + pluginKeys, + }: FindPluginAuthsByKeysParams): Promise { + try { + if (!pluginKeys || pluginKeys.length === 0) { + return []; + } + + return await PluginAuth.find({ + userId, + pluginKey: { $in: pluginKeys }, + }).lean(); + } catch (error) { + throw new Error( + `Failed to find plugin auths: ${error instanceof Error ? error.message : 'Unknown error'}`, + ); + } + } + + /** + * Updates or creates a plugin auth entry + */ + async function updatePluginAuth({ + userId, + authField, + pluginKey, + value, + }: UpdatePluginAuthParams): Promise { + try { + const existingAuth = await PluginAuth.findOne({ userId, pluginKey, authField }).lean(); + + if (existingAuth) { + return await PluginAuth.findOneAndUpdate( + { userId, pluginKey, authField }, + { $set: { value } }, + { new: true, upsert: true }, + ).lean(); + } else { + const newPluginAuth = await new PluginAuth({ + userId, + authField, + value, + pluginKey, + }); + await newPluginAuth.save(); + return newPluginAuth.toObject(); + } + } catch (error) { + throw new Error( + `Failed to update plugin auth: ${error instanceof Error ? error.message : 'Unknown error'}`, + ); + } + } + + /** + * Deletes plugin auth entries based on provided parameters + */ + async function deletePluginAuth({ + userId, + authField, + pluginKey, + all = false, + }: DeletePluginAuthParams): Promise { + try { + if (all) { + const filter: DeletePluginAuthParams = { userId }; + if (pluginKey) { + filter.pluginKey = pluginKey; + } + return await PluginAuth.deleteMany(filter); + } + + if (!authField) { + throw new Error('authField is required when all is false'); + } + + return await PluginAuth.deleteOne({ userId, authField }); + } catch (error) { + throw new Error( + `Failed to delete plugin auth: ${error instanceof Error ? error.message : 'Unknown error'}`, + ); + } + } + + /** + * Deletes all plugin auth entries for a user + */ + async function deleteAllUserPluginAuths(userId: string): Promise { + try { + return await PluginAuth.deleteMany({ userId }); + } catch (error) { + throw new Error( + `Failed to delete all user plugin auths: ${error instanceof Error ? error.message : 'Unknown error'}`, + ); + } + } + + return { + findOnePluginAuth, + findPluginAuthsByKeys, + updatePluginAuth, + deletePluginAuth, + deleteAllUserPluginAuths, + }; +} + +export type PluginAuthMethods = ReturnType; diff --git a/packages/data-schemas/src/schema/pluginAuth.ts b/packages/data-schemas/src/schema/pluginAuth.ts index 5c29024453..534c49d127 100644 --- a/packages/data-schemas/src/schema/pluginAuth.ts +++ b/packages/data-schemas/src/schema/pluginAuth.ts @@ -1,13 +1,5 @@ -import { Schema, Document } from 'mongoose'; - -export interface IPluginAuth extends Document { - authField: string; - value: string; - userId: string; - pluginKey?: string; - createdAt?: Date; - updatedAt?: Date; -} +import { Schema } from 'mongoose'; +import type { IPluginAuth } from '~/types'; const pluginAuthSchema: Schema = new Schema( { diff --git a/packages/data-schemas/src/types/index.ts b/packages/data-schemas/src/types/index.ts index 3dfe1334e5..f8a508a314 100644 --- a/packages/data-schemas/src/types/index.ts +++ b/packages/data-schemas/src/types/index.ts @@ -14,5 +14,6 @@ export * from './action'; export * from './assistant'; export * from './file'; export * from './share'; +export * from './pluginAuth'; /* Memories */ export * from './memory'; diff --git a/packages/data-schemas/src/types/pluginAuth.ts b/packages/data-schemas/src/types/pluginAuth.ts new file mode 100644 index 0000000000..421769eaa3 --- /dev/null +++ b/packages/data-schemas/src/types/pluginAuth.ts @@ -0,0 +1,40 @@ +import type { Document } from 'mongoose'; + +export interface IPluginAuth extends Document { + authField: string; + value: string; + userId: string; + pluginKey?: string; + createdAt?: Date; + updatedAt?: Date; +} + +export interface PluginAuthQuery { + userId: string; + authField?: string; + pluginKey?: string; +} + +export interface FindPluginAuthParams { + userId: string; + authField: string; +} + +export interface FindPluginAuthsByKeysParams { + userId: string; + pluginKeys: string[]; +} + +export interface UpdatePluginAuthParams { + userId: string; + authField: string; + pluginKey: string; + value: string; +} + +export interface DeletePluginAuthParams { + userId: string; + authField?: string; + pluginKey?: string; + all?: boolean; +} From a5e8d009a1e11aa7d3c347ba530c7b60156b123e Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Thu, 19 Jun 2025 19:00:45 -0400 Subject: [PATCH 10/16] =?UTF-8?q?=F0=9F=94=A7=20fix:=20Logger=20Paths=20an?= =?UTF-8?q?d=20Exclude=20`index.html`=20from=20Service=20Worker=20Caching?= =?UTF-8?q?=20(#7982)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/data-schemas/package.json | 2 +- packages/data-schemas/src/config/meiliLogger.ts | 2 +- packages/data-schemas/src/config/winston.ts | 5 +---- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/data-schemas/package.json b/packages/data-schemas/package.json index 9234db4032..cbe0e5578a 100644 --- a/packages/data-schemas/package.json +++ b/packages/data-schemas/package.json @@ -1,6 +1,6 @@ { "name": "@librechat/data-schemas", - "version": "0.0.9", + "version": "0.0.10", "description": "Mongoose schemas and models for LibreChat", "type": "module", "main": "dist/index.cjs", diff --git a/packages/data-schemas/src/config/meiliLogger.ts b/packages/data-schemas/src/config/meiliLogger.ts index 1341c328fd..0d4d39475b 100644 --- a/packages/data-schemas/src/config/meiliLogger.ts +++ b/packages/data-schemas/src/config/meiliLogger.ts @@ -2,7 +2,7 @@ import path from 'path'; import winston from 'winston'; import 'winston-daily-rotate-file'; -const logDir = path.join(__dirname, '..', 'logs'); +const logDir = path.join(__dirname, '..', '..', '..', 'api', 'logs'); const { NODE_ENV, DEBUG_LOGGING = 'false' } = process.env; diff --git a/packages/data-schemas/src/config/winston.ts b/packages/data-schemas/src/config/winston.ts index 598d967394..7e52872962 100644 --- a/packages/data-schemas/src/config/winston.ts +++ b/packages/data-schemas/src/config/winston.ts @@ -3,10 +3,8 @@ import winston from 'winston'; import 'winston-daily-rotate-file'; import { redactFormat, redactMessage, debugTraverse, jsonTruncateFormat } from './parsers'; -// Define log directory -const logDir = path.join(__dirname, '..', 'logs'); +const logDir = path.join(__dirname, '..', '..', '..', 'api', 'logs'); -// Type-safe environment variables const { NODE_ENV, DEBUG_LOGGING, CONSOLE_JSON, DEBUG_CONSOLE } = process.env; const useConsoleJson = typeof CONSOLE_JSON === 'string' && CONSOLE_JSON.toLowerCase() === 'true'; @@ -15,7 +13,6 @@ const useDebugConsole = typeof DEBUG_CONSOLE === 'string' && DEBUG_CONSOLE.toLow const useDebugLogging = typeof DEBUG_LOGGING === 'string' && DEBUG_LOGGING.toLowerCase() === 'true'; -// Define custom log levels const levels: winston.config.AbstractConfigSetLevels = { error: 0, warn: 1, From d53cd1f3912f4d73a781155e24add9208af5645a Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Thu, 19 Jun 2025 19:21:38 -0400 Subject: [PATCH 11/16] =?UTF-8?q?=F0=9F=94=A7=20fix:=20Immutability=20Issu?= =?UTF-8?q?e=20in=20`useChatFunctions`=20(#7983)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Use mutable version of conversation in useChatFunctions * fix: Use cloneDeep for conversation in useChatFunctions to ensure immutability --------- Co-authored-by: lucioperca --- client/src/hooks/Chat/useChatFunctions.ts | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/client/src/hooks/Chat/useChatFunctions.ts b/client/src/hooks/Chat/useChatFunctions.ts index eec8e03bd3..5a86eeb7c6 100644 --- a/client/src/hooks/Chat/useChatFunctions.ts +++ b/client/src/hooks/Chat/useChatFunctions.ts @@ -1,4 +1,5 @@ import { v4 } from 'uuid'; +import { cloneDeep } from 'lodash'; import { useQueryClient } from '@tanstack/react-query'; import { Constants, @@ -51,10 +52,10 @@ export default function useChatFunctions({ getMessages, setMessages, isSubmitting, - conversation, latestMessage, setSubmission, setLatestMessage, + conversation: immutableConversation, }: { index?: number; isSubmitting: boolean; @@ -77,8 +78,8 @@ export default function useChatFunctions({ const isTemporary = useRecoilValue(store.isTemporary); const codeArtifacts = useRecoilValue(store.codeArtifacts); const includeShadcnui = useRecoilValue(store.includeShadcnui); - const { getExpiry } = useUserKey(conversation?.endpoint ?? ''); const customPromptMode = useRecoilValue(store.customPromptMode); + const { getExpiry } = useUserKey(immutableConversation?.endpoint ?? ''); const setShowStopButton = useSetRecoilState(store.showStopButtonByIndex(index)); const resetLatestMultiMessage = useResetRecoilState(store.latestMessageFamily(index + 1)); @@ -108,6 +109,8 @@ export default function useChatFunctions({ return; } + const conversation = cloneDeep(immutableConversation); + const endpoint = conversation?.endpoint; if (endpoint === null) { console.error('No endpoint available'); From 299c484c7ada1f81c8defdc6f4dc28e8fc587018 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 19 Jun 2025 20:45:05 -0400 Subject: [PATCH 12/16] =?UTF-8?q?=F0=9F=8C=8D=20i18n:=20Update=20translati?= =?UTF-8?q?on.json=20with=20latest=20translations=20(#7893)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- client/src/locales/en/translation.json | 83 +++++++++++++------------- 1 file changed, 41 insertions(+), 42 deletions(-) diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index 5bc7a38f6b..34ef2a7448 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -17,6 +17,12 @@ "com_agents_file_search_disabled": "Agent must be created before uploading files for File Search.", "com_agents_file_search_info": "When enabled, the agent will be informed of the exact filenames listed below, allowing it to retrieve relevant context from these files.", "com_agents_instructions_placeholder": "The system instructions that the agent uses", + "com_agents_mcp_description_placeholder": "Explain what it does in a few words", + "com_agents_mcp_icon_size": "Minimum size 128 x 128 px", + "com_agents_mcp_info": "Add MCP servers to your agent to enable it to perform tasks and interact with external services", + "com_agents_mcp_name_placeholder": "Custom Tool", + "com_agents_mcp_trust_subtext": "Custom connectors are not verified by LibreChat", + "com_agents_mcps_disabled": "You need to create an agent before adding MCPs.", "com_agents_missing_provider_model": "Please select a provider and model before creating an agent.", "com_agents_name_placeholder": "Optional: The name of the agent", "com_agents_no_access": "You don't have access to edit this agent.", @@ -277,7 +283,6 @@ "com_error_files_upload_canceled": "The file upload request was canceled. Note: the file upload may still be processing and will need to be manually deleted.", "com_error_files_validation": "An error occurred while validating the file.", "com_error_heic_conversion": "Failed to convert HEIC image to JPEG. Please try converting the image manually or use a different format.", - "com_info_heic_converting": "Converting HEIC image to JPEG...", "com_error_input_length": "The latest message token count is too long, exceeding the token limit, or your token limit parameters are misconfigured, adversely affecting the context window. More info: {{0}}. Please shorten your message, adjust the max context size from the conversation parameters, or fork the conversation to continue.", "com_error_invalid_agent_provider": "The \"{{0}}\" provider is not available for use with Agents. Please go to your agent's settings and select a currently available provider.", "com_error_invalid_user_key": "Invalid key provided. Please provide a valid key and try again.", @@ -290,6 +295,7 @@ "com_files_table": "something needs to go here. was empty", "com_generated_files": "Generated files:", "com_hide_examples": "Hide Examples", + "com_info_heic_converting": "Converting HEIC image to JPEG...", "com_nav_2fa": "Two-Factor Authentication (2FA)", "com_nav_account_settings": "Account Settings", "com_nav_always_make_prod": "Always make new versions production", @@ -488,10 +494,6 @@ "com_sidepanel_mcp_variables_for": "MCP Variables for {{0}}", "com_sidepanel_parameters": "Parameters", "com_sources_image_alt": "Search result image", - "com_ui_configure_mcp_variables_for": "Configure Variables for {{0}}", - "com_ui_mcp_dialog_desc": "Please enter the necessary information below.", - "com_ui_mcp_enter_var": "Enter value for {{0}}", - "com_ui_saving": "Saving...", "com_sources_more_sources": "+{{count}} sources", "com_sources_tab_all": "All", "com_sources_tab_images": "Images", @@ -510,6 +512,8 @@ "com_ui_accept": "I accept", "com_ui_action_button": "Action Button", "com_ui_add": "Add", + "com_ui_add_mcp": "Add MCP", + "com_ui_add_mcp_server": "Add MCP Server", "com_ui_add_model_preset": "Add a model or preset for an additional response", "com_ui_add_multi_conversation": "Add multi-conversation", "com_ui_adding_details": "Adding details", @@ -578,6 +582,7 @@ "com_ui_auth_url": "Authorization URL", "com_ui_authentication": "Authentication", "com_ui_authentication_type": "Authentication Type", + "com_ui_available_tools": "Available Tools", "com_ui_avatar": "Avatar", "com_ui_azure": "Azure", "com_ui_back": "Back", @@ -620,11 +625,13 @@ "com_ui_client_secret": "Client Secret", "com_ui_close": "Close", "com_ui_close_menu": "Close Menu", + "com_ui_close_window": "Close Window", "com_ui_code": "Code", "com_ui_collapse_chat": "Collapse Chat", "com_ui_command_placeholder": "Optional: Enter a command for the prompt or name will be used", "com_ui_command_usage_placeholder": "Select a Prompt by command or name", "com_ui_complete_setup": "Complete Setup", + "com_ui_configure_mcp_variables_for": "Configure Variables for {{0}}", "com_ui_confirm_action": "Confirm Action", "com_ui_confirm_admin_use_change": "Changing this setting will block access for admins, including yourself. Are you sure you want to proceed?", "com_ui_confirm_change": "Confirm Change", @@ -675,6 +682,10 @@ "com_ui_delete_confirm": "This will delete", "com_ui_delete_confirm_prompt_version_var": "This will delete the selected version for \"{{0}}.\" If no other versions exist, the prompt will be deleted.", "com_ui_delete_conversation": "Delete chat?", + "com_ui_delete_mcp": "Delete MCP", + "com_ui_delete_mcp_confirm": "Are you sure you want to delete this MCP server?", + "com_ui_delete_mcp_error": "Failed to delete MCP server", + "com_ui_delete_mcp_success": "MCP server deleted successfully", "com_ui_delete_memory": "Delete Memory", "com_ui_delete_prompt": "Delete Prompt?", "com_ui_delete_shared_link": "Delete shared link?", @@ -684,6 +695,7 @@ "com_ui_descending": "Desc", "com_ui_description": "Description", "com_ui_description_placeholder": "Optional: Enter a description to display for the prompt", + "com_ui_deselect_all": "Deselect All", "com_ui_disabling": "Disabling...", "com_ui_download": "Download", "com_ui_download_artifact": "Download Artifact", @@ -699,6 +711,7 @@ "com_ui_duplication_success": "Successfully duplicated conversation", "com_ui_edit": "Edit", "com_ui_edit_editing_image": "Editing image", + "com_ui_edit_mcp_server": "Edit MCP Server", "com_ui_edit_memory": "Edit Memory", "com_ui_empty_category": "-", "com_ui_endpoint": "Endpoint", @@ -778,6 +791,7 @@ "com_ui_hide_image_details": "Hide Image Details", "com_ui_hide_qr": "Hide QR Code", "com_ui_host": "Host", + "com_ui_icon": "Icon", "com_ui_idea": "Ideas", "com_ui_image_created": "Image created", "com_ui_image_details": "Image Details", @@ -805,8 +819,11 @@ "com_ui_logo": "{{0}} Logo", "com_ui_manage": "Manage", "com_ui_max_tags": "Maximum number allowed is {{0}}, using latest values.", - "com_ui_mcp_servers": "MCP Servers", + "com_ui_mcp_dialog_desc": "Please enter the necessary information below.", + "com_ui_mcp_enter_var": "Enter value for {{0}}", "com_ui_mcp_server_not_found": "Server not found.", + "com_ui_mcp_servers": "MCP Servers", + "com_ui_mcp_url": "MCP Server URL", "com_ui_memories": "Memories", "com_ui_memories_allow_create": "Allow creating Memories", "com_ui_memories_allow_opt_out": "Allow users to opt out of Memories", @@ -847,21 +864,20 @@ "com_ui_not_used": "Not Used", "com_ui_nothing_found": "Nothing found", "com_ui_oauth": "OAuth", - "com_ui_oauth_success_title": "Authentication Successful", - "com_ui_oauth_success_description": "Your authentication was successful. This window will close in", "com_ui_oauth_connected_to": "Connected to", - "com_ui_oauth_error_title": "Authentication Failed", - "com_ui_oauth_error_missing_code": "Authorization code is missing. Please try again.", - "com_ui_oauth_error_missing_state": "State parameter is missing. Please try again.", - "com_ui_oauth_error_invalid_state": "Invalid state parameter. Please try again.", "com_ui_oauth_error_callback_failed": "Authentication callback failed. Please try again.", "com_ui_oauth_error_generic": "Authentication failed. Please try again.", - "com_ui_close_window": "Close Window", - "com_ui_seconds": "seconds", + "com_ui_oauth_error_invalid_state": "Invalid state parameter. Please try again.", + "com_ui_oauth_error_missing_code": "Authorization code is missing. Please try again.", + "com_ui_oauth_error_missing_state": "State parameter is missing. Please try again.", + "com_ui_oauth_error_title": "Authentication Failed", + "com_ui_oauth_success_description": "Your authentication was successful. This window will close in", + "com_ui_oauth_success_title": "Authentication Successful", "com_ui_of": "of", "com_ui_off": "Off", "com_ui_on": "On", "com_ui_openai": "OpenAI", + "com_ui_optional": "(optional)", "com_ui_page": "Page", "com_ui_preferences_updated": "Preferences updated successfully", "com_ui_prev": "Prev", @@ -912,11 +928,14 @@ "com_ui_save_badge_changes": "Save badge changes?", "com_ui_save_submit": "Save & Submit", "com_ui_saved": "Saved!", + "com_ui_saving": "Saving...", "com_ui_schema": "Schema", "com_ui_scope": "Scope", "com_ui_search": "Search", + "com_ui_seconds": "seconds", "com_ui_secret_key": "Secret Key", "com_ui_select": "Select", + "com_ui_select_all": "Select All", "com_ui_select_file": "Select a file", "com_ui_select_model": "Select a model", "com_ui_select_provider": "Select a provider", @@ -968,13 +987,19 @@ "com_ui_token_exchange_method": "Token Exchange Method", "com_ui_token_url": "Token URL", "com_ui_tokens": "tokens", + "com_ui_tool_collection_prefix": "A collection of tools from", + "com_ui_tool_info": "Tool Information", + "com_ui_tool_more_info": "More information about this tool", "com_ui_tools": "Tools", "com_ui_travel": "Travel", + "com_ui_trust_app": "I trust this application", "com_ui_unarchive": "Unarchive", "com_ui_unarchive_error": "Failed to unarchive conversation", "com_ui_unknown": "Unknown", "com_ui_untitled": "Untitled", "com_ui_update": "Update", + "com_ui_update_mcp_error": "There was an error creating or updating the MCP.", + "com_ui_update_mcp_success": "Successfully created or updated MCP", "com_ui_upload": "Upload", "com_ui_upload_code_files": "Upload for Code Interpreter", "com_ui_upload_delay": "Uploading \"{{0}}\" is taking more time than anticipated. Please wait while the file finishes indexing for retrieval.", @@ -1030,31 +1055,5 @@ "com_ui_yes": "Yes", "com_ui_zoom": "Zoom", "com_user_message": "You", - "com_warning_resubmit_unsupported": "Resubmitting the AI message is not supported for this endpoint.", - "com_ui_add_mcp": "Add MCP", - "com_ui_add_mcp": "Add MCP", - "com_ui_add_mcp_server": "Add MCP Server", - "com_ui_edit_mcp_server": "Edit MCP Server", - "com_agents_mcps_disabled": "You need to create an agent before adding MCPs.", - "com_ui_delete_mcp": "Delete MCP", - "com_ui_delete_mcp_confirm": "Are you sure you want to delete this MCP server?", - "com_ui_delete_mcp_success": "MCP server deleted successfully", - "com_ui_delete_mcp_error": "Failed to delete MCP server", - "com_agents_mcp_info": "Add MCP servers to your agent to enable it to perform tasks and interact with external services", - "com_ui_update_mcp_error": "There was an error creating or updating the MCP.", - "com_ui_update_mcp_success": "Successfully created or updated MCP", - "com_ui_available_tools": "Available Tools", - "com_ui_select_all": "Select All", - "com_ui_deselect_all": "Deselect All", - "com_agents_mcp_name_placeholder": "Custom Tool", - "com_ui_optional": "(optional)", - "com_agents_mcp_description_placeholder": "Explain what it does in a few words", - "com_ui_mcp_url": "MCP Server URL", - "com_ui_trust_app": "I trust this application", - "com_agents_mcp_trust_subtext": "Custom connectors are not verified by LibreChat", - "com_ui_icon": "Icon", - "com_agents_mcp_icon_size": "Minimum size 128 x 128 px", - "com_ui_tool_collection_prefix": "A collection of tools from", - "com_ui_tool_info": "Tool Information", - "com_ui_tool_more_info": "More information about this tool" -} + "com_warning_resubmit_unsupported": "Resubmitting the AI message is not supported for this endpoint." +} \ No newline at end of file From 97085073d2766de5c639ed78721b10195f9e8d30 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Thu, 19 Jun 2025 22:54:49 -0400 Subject: [PATCH 13/16] =?UTF-8?q?=F0=9F=94=A7=20fix:=20Potential=20Null=20?= =?UTF-8?q?Values=20for=20Custom=20Config?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/server/routes/config.js | 5 ++--- api/server/services/Config/getCustomConfig.js | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/api/server/routes/config.js b/api/server/routes/config.js index e50fb9f452..dd93037dd9 100644 --- a/api/server/routes/config.js +++ b/api/server/routes/config.js @@ -29,8 +29,6 @@ router.get('/', async function (req, res) { return; } - const config = await getCustomConfig(); - const isBirthday = () => { const today = new Date(); return today.getMonth() === 1 && today.getDate() === 11; @@ -102,7 +100,8 @@ router.get('/', async function (req, res) { }; payload.mcpServers = {}; - if (config.mcpServers) { + const config = await getCustomConfig(); + if (config?.mcpServers != null) { for (const serverName in config.mcpServers) { const serverConfig = config.mcpServers[serverName]; payload.mcpServers[serverName] = { diff --git a/api/server/services/Config/getCustomConfig.js b/api/server/services/Config/getCustomConfig.js index 0851b89a46..d1ee5c3278 100644 --- a/api/server/services/Config/getCustomConfig.js +++ b/api/server/services/Config/getCustomConfig.js @@ -57,7 +57,7 @@ const getCustomEndpointConfig = async (endpoint) => { async function createGetMCPAuthMap() { const customConfig = await getCustomConfig(); const mcpServers = customConfig?.mcpServers; - const hasCustomUserVars = Object.values(mcpServers).some((server) => server.customUserVars); + const hasCustomUserVars = Object.values(mcpServers ?? {}).some((server) => server.customUserVars); if (!hasCustomUserVars) { return; } From fa54c9ae9001fbad409797e5e1ae76d3c8638f0f Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Fri, 20 Jun 2025 15:49:24 -0400 Subject: [PATCH 14/16] =?UTF-8?q?=F0=9F=93=A6=20chore:=20Bump=20Agents=20P?= =?UTF-8?q?ackages=20(#7992)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update peer dependency for @librechat/agents to version 2.4.41 * 🔧 chore: proxy handling in OpenAI endpoint to use undici * 🔧 chore: update @anthropic-ai/sdk to version 0.52.0 and refactor proxy handling to use undici * 🔧 chore: update globIgnores in vite.config.ts to exclude index.html from caching * 🔧 ci: update proxy handling in getLLMConfig to use fetchOptions and ProxyAgent * 🔧 chore: refactor proxy handling in Anthropic and OpenAI clients to use fetchOptions * refactor: agent initialization to streamline model parameters and resendFiles handling * chore: update @google/generative-ai to version 0.24.0 --- api/app/clients/AnthropicClient.js | 3 +- api/app/clients/OpenAIClient.js | 5 +- api/app/clients/specs/AnthropicClient.test.js | 4 +- api/package.json | 14 +- api/server/services/Endpoints/agents/agent.js | 60 +- .../services/Endpoints/agents/initialize.js | 2 +- .../services/Endpoints/anthropic/llm.js | 7 +- .../services/Endpoints/anthropic/llm.spec.js | 8 +- client/vite.config.ts | 2 +- package-lock.json | 744 +++++++++++++----- packages/api/package.json | 3 +- packages/api/src/endpoints/openai/llm.ts | 8 +- 12 files changed, 598 insertions(+), 262 deletions(-) diff --git a/api/app/clients/AnthropicClient.js b/api/app/clients/AnthropicClient.js index 037f1e7c46..a3fba29d5c 100644 --- a/api/app/clients/AnthropicClient.js +++ b/api/app/clients/AnthropicClient.js @@ -190,10 +190,11 @@ class AnthropicClient extends BaseClient { reverseProxyUrl: this.options.reverseProxyUrl, }), apiKey: this.apiKey, + fetchOptions: {}, }; if (this.options.proxy) { - options.httpAgent = new HttpsProxyAgent(this.options.proxy); + options.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy); } if (this.options.reverseProxyUrl) { diff --git a/api/app/clients/OpenAIClient.js b/api/app/clients/OpenAIClient.js index f3a7e67c12..2d4146bd9c 100644 --- a/api/app/clients/OpenAIClient.js +++ b/api/app/clients/OpenAIClient.js @@ -1159,6 +1159,7 @@ ${convo} logger.debug('[OpenAIClient] chatCompletion', { baseURL, modelOptions }); const opts = { baseURL, + fetchOptions: {}, }; if (this.useOpenRouter) { @@ -1177,7 +1178,7 @@ ${convo} } if (this.options.proxy) { - opts.httpAgent = new HttpsProxyAgent(this.options.proxy); + opts.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy); } /** @type {TAzureConfig | undefined} */ @@ -1395,7 +1396,7 @@ ${convo} ...modelOptions, stream: true, }; - const stream = await openai.beta.chat.completions + const stream = await openai.chat.completions .stream(params) .on('abort', () => { /* Do nothing here */ diff --git a/api/app/clients/specs/AnthropicClient.test.js b/api/app/clients/specs/AnthropicClient.test.js index 9867859087..fbcd2b75e4 100644 --- a/api/app/clients/specs/AnthropicClient.test.js +++ b/api/app/clients/specs/AnthropicClient.test.js @@ -309,7 +309,7 @@ describe('AnthropicClient', () => { }; client.setOptions({ modelOptions, promptCache: true }); const anthropicClient = client.getClient(modelOptions); - expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta'); + expect(anthropicClient._options.defaultHeaders).toBeUndefined(); }); it('should not add beta header for other models', () => { @@ -320,7 +320,7 @@ describe('AnthropicClient', () => { }, }); const anthropicClient = client.getClient(); - expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta'); + expect(anthropicClient._options.defaultHeaders).toBeUndefined(); }); }); diff --git a/api/package.json b/api/package.json index 636e8cb8f3..6633a99c3f 100644 --- a/api/package.json +++ b/api/package.json @@ -34,21 +34,21 @@ }, "homepage": "https://librechat.ai", "dependencies": { - "@anthropic-ai/sdk": "^0.37.0", + "@anthropic-ai/sdk": "^0.52.0", "@aws-sdk/client-s3": "^3.758.0", "@aws-sdk/s3-request-presigner": "^3.758.0", "@azure/identity": "^4.7.0", "@azure/search-documents": "^12.0.0", "@azure/storage-blob": "^12.27.0", - "@google/generative-ai": "^0.23.0", + "@google/generative-ai": "^0.24.0", "@googleapis/youtube": "^20.0.0", "@keyv/redis": "^4.3.3", - "@langchain/community": "^0.3.44", - "@langchain/core": "^0.3.57", - "@langchain/google-genai": "^0.2.9", - "@langchain/google-vertexai": "^0.2.9", + "@langchain/community": "^0.3.47", + "@langchain/core": "^0.3.60", + "@langchain/google-genai": "^0.2.13", + "@langchain/google-vertexai": "^0.2.13", "@langchain/textsplitters": "^0.1.0", - "@librechat/agents": "^2.4.38", + "@librechat/agents": "^2.4.41", "@librechat/api": "*", "@librechat/data-schemas": "*", "@node-saml/passport-saml": "^5.0.0", diff --git a/api/server/services/Endpoints/agents/agent.js b/api/server/services/Endpoints/agents/agent.js index 13a42140db..e135401467 100644 --- a/api/server/services/Endpoints/agents/agent.js +++ b/api/server/services/Endpoints/agents/agent.js @@ -63,11 +63,17 @@ const initializeAgent = async ({ } let currentFiles; - if ( - isInitialAgent && - conversationId != null && - (agent.model_parameters?.resendFiles ?? true) === true - ) { + const _modelOptions = structuredClone( + Object.assign( + { model: agent.model }, + agent.model_parameters ?? { model: agent.model }, + isInitialAgent === true ? endpointOption?.model_parameters : {}, + ), + ); + + const { resendFiles = true, ...modelOptions } = _modelOptions; + + if (isInitialAgent && conversationId != null && resendFiles) { const fileIds = (await getConvoFiles(conversationId)) ?? []; /** @type {Set} */ const toolResourceSet = new Set(); @@ -117,15 +123,11 @@ const initializeAgent = async ({ getOptions = initCustom; agent.provider = Providers.OPENAI; } - const model_parameters = Object.assign( - {}, - agent.model_parameters ?? { model: agent.model }, - isInitialAgent === true ? endpointOption?.model_parameters : {}, - ); + const _endpointOption = isInitialAgent === true - ? Object.assign({}, endpointOption, { model_parameters }) - : { model_parameters }; + ? Object.assign({}, endpointOption, { model_parameters: modelOptions }) + : { model_parameters: modelOptions }; const options = await getOptions({ req, @@ -136,6 +138,20 @@ const initializeAgent = async ({ endpointOption: _endpointOption, }); + const tokensModel = + agent.provider === EModelEndpoint.azureOpenAI ? agent.model : modelOptions.model; + const maxTokens = optionalChainWithEmptyCheck( + modelOptions.maxOutputTokens, + modelOptions.maxTokens, + 0, + ); + const maxContextTokens = optionalChainWithEmptyCheck( + modelOptions.maxContextTokens, + modelOptions.max_context_tokens, + getModelMaxTokens(tokensModel, providerEndpointMap[provider]), + 4096, + ); + if ( agent.endpoint === EModelEndpoint.azureOpenAI && options.llmConfig?.azureOpenAIApiInstanceName == null @@ -148,15 +164,11 @@ const initializeAgent = async ({ } /** @type {import('@librechat/agents').ClientOptions} */ - agent.model_parameters = Object.assign(model_parameters, options.llmConfig); + agent.model_parameters = { ...options.llmConfig }; if (options.configOptions) { agent.model_parameters.configuration = options.configOptions; } - if (!agent.model_parameters.model) { - agent.model_parameters.model = agent.model; - } - if (agent.instructions && agent.instructions !== '') { agent.instructions = replaceSpecialVars({ text: agent.instructions, @@ -171,23 +183,11 @@ const initializeAgent = async ({ }); } - const tokensModel = - agent.provider === EModelEndpoint.azureOpenAI ? agent.model : agent.model_parameters.model; - const maxTokens = optionalChainWithEmptyCheck( - agent.model_parameters.maxOutputTokens, - agent.model_parameters.maxTokens, - 0, - ); - const maxContextTokens = optionalChainWithEmptyCheck( - agent.model_parameters.maxContextTokens, - agent.max_context_tokens, - getModelMaxTokens(tokensModel, providerEndpointMap[provider]), - 4096, - ); return { ...agent, tools, attachments, + resendFiles, toolContextMap, maxContextTokens: (maxContextTokens - maxTokens) * 0.9, }; diff --git a/api/server/services/Endpoints/agents/initialize.js b/api/server/services/Endpoints/agents/initialize.js index e3154fe13a..e4ffcf4730 100644 --- a/api/server/services/Endpoints/agents/initialize.js +++ b/api/server/services/Endpoints/agents/initialize.js @@ -130,8 +130,8 @@ const initializeClient = async ({ req, res, endpointOption }) => { iconURL: endpointOption.iconURL, attachments: primaryConfig.attachments, endpointType: endpointOption.endpointType, + resendFiles: primaryConfig.resendFiles ?? true, maxContextTokens: primaryConfig.maxContextTokens, - resendFiles: primaryConfig.model_parameters?.resendFiles ?? true, endpoint: primaryConfig.id === Constants.EPHEMERAL_AGENT_ID ? primaryConfig.endpoint diff --git a/api/server/services/Endpoints/anthropic/llm.js b/api/server/services/Endpoints/anthropic/llm.js index 9f20b8e61d..66496f00fd 100644 --- a/api/server/services/Endpoints/anthropic/llm.js +++ b/api/server/services/Endpoints/anthropic/llm.js @@ -1,4 +1,4 @@ -const { HttpsProxyAgent } = require('https-proxy-agent'); +const { ProxyAgent } = require('undici'); const { anthropicSettings, removeNullishValues } = require('librechat-data-provider'); const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers'); @@ -67,7 +67,10 @@ function getLLMConfig(apiKey, options = {}) { } if (options.proxy) { - requestOptions.clientOptions.httpAgent = new HttpsProxyAgent(options.proxy); + const proxyAgent = new ProxyAgent(options.proxy); + requestOptions.clientOptions.fetchOptions = { + dispatcher: proxyAgent, + }; } if (options.reverseProxyUrl) { diff --git a/api/server/services/Endpoints/anthropic/llm.spec.js b/api/server/services/Endpoints/anthropic/llm.spec.js index 9c453efb92..f3f77ee897 100644 --- a/api/server/services/Endpoints/anthropic/llm.spec.js +++ b/api/server/services/Endpoints/anthropic/llm.spec.js @@ -21,8 +21,12 @@ describe('getLLMConfig', () => { proxy: 'http://proxy:8080', }); - expect(result.llmConfig.clientOptions).toHaveProperty('httpAgent'); - expect(result.llmConfig.clientOptions.httpAgent).toHaveProperty('proxy', 'http://proxy:8080'); + expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions'); + expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher'); + expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined(); + expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe( + 'ProxyAgent', + ); }); it('should include reverse proxy URL when provided', () => { diff --git a/client/vite.config.ts b/client/vite.config.ts index 9a091d9f53..152cf4255e 100644 --- a/client/vite.config.ts +++ b/client/vite.config.ts @@ -46,7 +46,7 @@ export default defineConfig(({ command }) => ({ 'assets/maskable-icon.png', 'manifest.webmanifest', ], - globIgnores: ['images/**/*', '**/*.map'], + globIgnores: ['images/**/*', '**/*.map', 'index.html'], maximumFileSizeToCacheInBytes: 4 * 1024 * 1024, navigateFallbackDenylist: [/^\/oauth/, /^\/api/], }, diff --git a/package-lock.json b/package-lock.json index a7384d0f90..0301bdce1b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -50,21 +50,21 @@ "version": "v0.7.8", "license": "ISC", "dependencies": { - "@anthropic-ai/sdk": "^0.37.0", + "@anthropic-ai/sdk": "^0.52.0", "@aws-sdk/client-s3": "^3.758.0", "@aws-sdk/s3-request-presigner": "^3.758.0", "@azure/identity": "^4.7.0", "@azure/search-documents": "^12.0.0", "@azure/storage-blob": "^12.27.0", - "@google/generative-ai": "^0.23.0", + "@google/generative-ai": "^0.24.0", "@googleapis/youtube": "^20.0.0", "@keyv/redis": "^4.3.3", - "@langchain/community": "^0.3.44", - "@langchain/core": "^0.3.57", - "@langchain/google-genai": "^0.2.9", - "@langchain/google-vertexai": "^0.2.9", + "@langchain/community": "^0.3.47", + "@langchain/core": "^0.3.60", + "@langchain/google-genai": "^0.2.13", + "@langchain/google-vertexai": "^0.2.13", "@langchain/textsplitters": "^0.1.0", - "@librechat/agents": "^2.4.38", + "@librechat/agents": "^2.4.41", "@librechat/api": "*", "@librechat/data-schemas": "*", "@node-saml/passport-saml": "^5.0.0", @@ -140,17 +140,12 @@ } }, "api/node_modules/@anthropic-ai/sdk": { - "version": "0.37.0", - "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.37.0.tgz", - "integrity": "sha512-tHjX2YbkUBwEgg0JZU3EFSSAQPoK4qQR/NFYa8Vtzd5UAyXzZksCw2In69Rml4R/TyHPBfRYaLK35XiOe33pjw==", - "dependencies": { - "@types/node": "^18.11.18", - "@types/node-fetch": "^2.6.4", - "abort-controller": "^3.0.0", - "agentkeepalive": "^4.2.1", - "form-data-encoder": "1.7.2", - "formdata-node": "^4.3.2", - "node-fetch": "^2.6.7" + "version": "0.52.0", + "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.52.0.tgz", + "integrity": "sha512-d4c+fg+xy9e46c8+YnrrgIQR45CZlAi7PwdzIfDXDM6ACxEZli1/fxhURsq30ZpMZy6LvSkr41jGq5aF5TD7rQ==", + "license": "MIT", + "bin": { + "anthropic-ai-sdk": "bin/cli" } }, "api/node_modules/@aws-sdk/client-sso": { @@ -781,14 +776,6 @@ } } }, - "api/node_modules/@google/generative-ai": { - "version": "0.23.0", - "resolved": "https://registry.npmjs.org/@google/generative-ai/-/generative-ai-0.23.0.tgz", - "integrity": "sha512-8vGoguvLG22U3t9XpXvbxjxy/yfBhE27pqptz5QVKXwBSh9sUXXmZWS8ZdGMqifK7kiuZQCG2cjGJNUdBsgX+g==", - "engines": { - "node": ">=18.0.0" - } - }, "api/node_modules/@keyv/redis": { "version": "4.3.3", "resolved": "https://registry.npmjs.org/@keyv/redis/-/redis-4.3.3.tgz", @@ -803,9 +790,9 @@ } }, "api/node_modules/@langchain/community": { - "version": "0.3.44", - "resolved": "https://registry.npmjs.org/@langchain/community/-/community-0.3.44.tgz", - "integrity": "sha512-lOA7rw0lC6WCRO/xoacx4Gpbx1ncscAilYn9LVjyiBxJw47d01iq8hdkGdBW5OFISub/wCK4FmHih2S4WJicAg==", + "version": "0.3.47", + "resolved": "https://registry.npmjs.org/@langchain/community/-/community-0.3.47.tgz", + "integrity": "sha512-Vo42kAfkXpTFSevhEkeqqE55az8NyQgDktCbitXYuhipNbFYx08XVvqEDkFkB20MM/Z7u+cvLb+DxCqnKuH0CQ==", "license": "MIT", "dependencies": { "@langchain/openai": ">=0.2.0 <0.6.0", @@ -815,10 +802,9 @@ "flat": "^5.0.2", "js-yaml": "^4.1.0", "langchain": ">=0.2.3 <0.3.0 || >=0.3.4 <0.4.0", - "langsmith": "^0.3.29", + "langsmith": "^0.3.33", "uuid": "^10.0.0", - "zod": "^3.22.3", - "zod-to-json-schema": "^3.22.5" + "zod": "^3.25.32" }, "engines": { "node": ">=18" @@ -851,11 +837,11 @@ "@google-ai/generativelanguage": "*", "@google-cloud/storage": "^6.10.1 || ^7.7.0", "@gradientai/nodejs-sdk": "^1.2.0", - "@huggingface/inference": "^2.6.4", - "@huggingface/transformers": "^3.2.3", + "@huggingface/inference": "^4.0.5", + "@huggingface/transformers": "^3.5.2", "@ibm-cloud/watsonx-ai": "*", "@lancedb/lancedb": "^0.12.0", - "@langchain/core": ">=0.2.21 <0.4.0", + "@langchain/core": ">=0.3.58 <0.4.0", "@layerup/layerup-security": "^1.5.12", "@libsql/client": "^0.14.0", "@mendable/firecrawl-js": "^1.4.3", @@ -925,7 +911,7 @@ "mammoth": "^1.6.0", "mariadb": "^3.4.0", "mem0ai": "^2.1.8", - "mongodb": ">=5.2.0", + "mongodb": "^6.17.0", "mysql2": "^3.9.8", "neo4j-driver": "*", "notion-to-md": "^3.1.0", @@ -1329,21 +1315,41 @@ } }, "api/node_modules/@langchain/openai": { - "version": "0.5.11", - "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.11.tgz", - "integrity": "sha512-DAp7x+NfjSqDvKVMle8yb85nzz+3ctP7zGJaeRS0vLmvkY9qf/jRkowsM0mcsIiEUKhG/AHzWqvxbhktb/jJ6Q==", + "version": "0.5.14", + "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.14.tgz", + "integrity": "sha512-0GEj5K/qi1MRuZ4nE7NvyI4jTG+RSewLZqsExUwRukWdeqmkPNHGrogTa5ZDt7eaJxAaY7EgLC5ZnvCM3L1oug==", "license": "MIT", "dependencies": { "js-tiktoken": "^1.0.12", - "openai": "^4.96.0", - "zod": "^3.22.4", - "zod-to-json-schema": "^3.22.3" + "openai": "^5.3.0", + "zod": "^3.25.32" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.48 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" + } + }, + "api/node_modules/@langchain/openai/node_modules/openai": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/openai/-/openai-5.5.1.tgz", + "integrity": "sha512-5i19097mGotHA1eFsM6Tjd/tJ8uo9sa5Ysv4Q6bKJ2vtN6rc0MzMrUefXnLXYAJcmMQrC1Efhj0AvfIkXrQamw==", + "license": "Apache-2.0", + "bin": { + "openai": "bin/cli" + }, + "peerDependencies": { + "ws": "^8.18.0", + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "ws": { + "optional": true + }, + "zod": { + "optional": true + } } }, "api/node_modules/@smithy/abort-controller": { @@ -2090,14 +2096,6 @@ "node": ">=18.0.0" } }, - "api/node_modules/@types/node": { - "version": "18.19.14", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.14.tgz", - "integrity": "sha512-EnQ4Us2rmOS64nHDWr0XqAD8DsO6f3XR6lf9UIIrZQpUzPVdN/oPuEzfDWNHSyXLvoGgjuEm/sPwFGSSs35Wtg==", - "dependencies": { - "undici-types": "~5.26.4" - } - }, "api/node_modules/agent-base": { "version": "7.1.3", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", @@ -2142,6 +2140,104 @@ "node": ">= 0.6" } }, + "api/node_modules/gaxios": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-7.1.0.tgz", + "integrity": "sha512-y1Q0MX1Ba6eg67Zz92kW0MHHhdtWksYckQy1KJsI6P4UlDQ8cvdvpLEPslD/k7vFkdPppMESFGTvk7XpSiKj8g==", + "license": "Apache-2.0", + "optional": true, + "peer": true, + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "node-fetch": "^3.3.2" + }, + "engines": { + "node": ">=18" + } + }, + "api/node_modules/gaxios/node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "api/node_modules/google-auth-library": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-10.1.0.tgz", + "integrity": "sha512-GspVjZj1RbyRWpQ9FbAXMKjFGzZwDKnUHi66JJ+tcjcu5/xYAP1pdlWotCuIkMwjfVsxxDvsGZXGLzRt72D0sQ==", + "license": "Apache-2.0", + "optional": true, + "peer": true, + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^7.0.0", + "gcp-metadata": "^7.0.0", + "google-logging-utils": "^1.0.0", + "gtoken": "^8.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "api/node_modules/google-auth-library/node_modules/gcp-metadata": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-7.0.0.tgz", + "integrity": "sha512-3PfRTzvT3Msu0Hy8Gf9ypxJvaClG2IB9pyH0r8QOmRBW5mUcrHgYpF4GYP+XulDbfhxEhBYtJtJJQb5S2wM+LA==", + "license": "Apache-2.0", + "optional": true, + "peer": true, + "dependencies": { + "gaxios": "^7.0.0", + "google-logging-utils": "^1.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "api/node_modules/google-logging-utils": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-1.1.1.tgz", + "integrity": "sha512-rcX58I7nqpu4mbKztFeOAObbomBbHU2oIb/d3tJfF3dizGSApqtSwYJigGCooHdnMyQBIw8BrWyK96w3YXgr6A==", + "license": "Apache-2.0", + "optional": true, + "peer": true, + "engines": { + "node": ">=14" + } + }, + "api/node_modules/gtoken": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-8.0.0.tgz", + "integrity": "sha512-+CqsMbHPiSTdtSO14O51eMNlrp9N79gmeqmXeouJOhfucAedHw9noVe/n5uJk3tbKE6a+6ZCQg3RPhVhHByAIw==", + "license": "MIT", + "optional": true, + "peer": true, + "dependencies": { + "gaxios": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=18" + } + }, "api/node_modules/https-proxy-agent": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", @@ -2243,14 +2339,6 @@ "node": ">=14" } }, - "api/node_modules/mongodb-connection-string-url/node_modules/webidl-conversions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", - "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", - "engines": { - "node": ">=12" - } - }, "api/node_modules/mongodb-connection-string-url/node_modules/whatwg-url": { "version": "13.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-13.0.0.tgz", @@ -2383,15 +2471,6 @@ "@img/sharp-win32-x64": "0.33.5" } }, - "api/node_modules/undici": { - "version": "7.10.0", - "resolved": "https://registry.npmjs.org/undici/-/undici-7.10.0.tgz", - "integrity": "sha512-u5otvFBOBZvmdjWLVW+5DAc9Nkq8f24g0O9oY7qw2JVIF1VocIFoyz9JFkuVOS2j41AufeO0xnlweJ2RLT8nGw==", - "license": "MIT", - "engines": { - "node": ">=20.18.1" - } - }, "api/node_modules/uuid": { "version": "10.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", @@ -16752,6 +16831,7 @@ "version": "0.24.0", "resolved": "https://registry.npmjs.org/@google/generative-ai/-/generative-ai-0.24.0.tgz", "integrity": "sha512-fnEITCGEB7NdX0BhoYZ/cq/7WPZ1QS5IzJJfC3Tg/OwkvBetMiVJciyaan297OvE4B9Jg1xvo0zIazX/9sGu1Q==", + "license": "Apache-2.0", "engines": { "node": ">=18.0.0" } @@ -17857,65 +17937,46 @@ } }, "node_modules/@langchain/anthropic": { - "version": "0.3.21", - "resolved": "https://registry.npmjs.org/@langchain/anthropic/-/anthropic-0.3.21.tgz", - "integrity": "sha512-iyVZ9PHcNbABVzWFWtolcDUqHYCEkl1yypRYXE98tTPiNhGo6g/MgKky96TEcOnJ0VNHD6qlzo9LhQl87OplvA==", + "version": "0.3.23", + "resolved": "https://registry.npmjs.org/@langchain/anthropic/-/anthropic-0.3.23.tgz", + "integrity": "sha512-lwp43HUcCM0bJqJEwBwutskvV85G3R3rQDW5XNCntPDzelW+fCmlsm40P7dg7uG/3uOtDGhj4eDMapKpbPvtlA==", "license": "MIT", "dependencies": { - "@anthropic-ai/sdk": "^0.39.0", - "fast-xml-parser": "^4.4.1", - "zod": "^3.22.4", - "zod-to-json-schema": "^3.22.4" + "@anthropic-ai/sdk": "^0.52.0", + "fast-xml-parser": "^4.4.1" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.48 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" } }, "node_modules/@langchain/anthropic/node_modules/@anthropic-ai/sdk": { - "version": "0.39.0", - "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.39.0.tgz", - "integrity": "sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg==", + "version": "0.52.0", + "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.52.0.tgz", + "integrity": "sha512-d4c+fg+xy9e46c8+YnrrgIQR45CZlAi7PwdzIfDXDM6ACxEZli1/fxhURsq30ZpMZy6LvSkr41jGq5aF5TD7rQ==", "license": "MIT", - "dependencies": { - "@types/node": "^18.11.18", - "@types/node-fetch": "^2.6.4", - "abort-controller": "^3.0.0", - "agentkeepalive": "^4.2.1", - "form-data-encoder": "1.7.2", - "formdata-node": "^4.3.2", - "node-fetch": "^2.6.7" - } - }, - "node_modules/@langchain/anthropic/node_modules/@types/node": { - "version": "18.19.87", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.87.tgz", - "integrity": "sha512-OIAAu6ypnVZHmsHCeJ+7CCSub38QNBS9uceMQeg7K5Ur0Jr+wG9wEOEvvMbhp09pxD5czIUy/jND7s7Tb6Nw7A==", - "license": "MIT", - "dependencies": { - "undici-types": "~5.26.4" + "bin": { + "anthropic-ai-sdk": "bin/cli" } }, "node_modules/@langchain/aws": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/@langchain/aws/-/aws-0.1.10.tgz", - "integrity": "sha512-PWA68aPBdLgmOvzsVgVpBec3sfwyCgsx/fpaTsf75k6TfHp4KBzqGGLGzgYo5/QBrInRkxVawJL1eKu4APy2nw==", + "version": "0.1.11", + "resolved": "https://registry.npmjs.org/@langchain/aws/-/aws-0.1.11.tgz", + "integrity": "sha512-JNnEmJaJB5TzcniPYGZi6dlpmZyzeyVsS+Za0Ye1DhCpcNmEiWRy514gVcTPQUEl5EcpIR51B/YyowI7zUzVvg==", "license": "MIT", "dependencies": { "@aws-sdk/client-bedrock-agent-runtime": "^3.755.0", "@aws-sdk/client-bedrock-runtime": "^3.755.0", "@aws-sdk/client-kendra": "^3.750.0", - "@aws-sdk/credential-provider-node": "^3.750.0", - "zod": "^3.23.8", - "zod-to-json-schema": "^3.22.5" + "@aws-sdk/credential-provider-node": "^3.750.0" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.48 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" } }, "node_modules/@langchain/aws/node_modules/@aws-sdk/client-sso": { @@ -18771,9 +18832,9 @@ } }, "node_modules/@langchain/core": { - "version": "0.3.57", - "resolved": "https://registry.npmjs.org/@langchain/core/-/core-0.3.57.tgz", - "integrity": "sha512-jz28qCTKJmi47b6jqhQ6vYRTG5jRpqhtPQjriRTB5wR8mgvzo6xKs0fG/kExS3ZvM79ytD1npBvgf8i19xOo9Q==", + "version": "0.3.60", + "resolved": "https://registry.npmjs.org/@langchain/core/-/core-0.3.60.tgz", + "integrity": "sha512-FlUjO7ovGnaKcb2JPmw5ajPaZj18LVjh/vAURtdzzFy4UsYBLv/5Y3HJQ2KgDdrl6sW/UyfG0zWdnhZQ1A5eJw==", "license": "MIT", "dependencies": { "@cfworker/json-schema": "^4.0.2", @@ -18781,12 +18842,12 @@ "camelcase": "6", "decamelize": "1.2.0", "js-tiktoken": "^1.0.12", - "langsmith": "^0.3.29", + "langsmith": "^0.3.33", "mustache": "^4.2.0", "p-queue": "^6.6.2", "p-retry": "4", "uuid": "^10.0.0", - "zod": "^3.22.4", + "zod": "^3.25.32", "zod-to-json-schema": "^3.22.3" }, "engines": { @@ -18818,34 +18879,71 @@ } }, "node_modules/@langchain/deepseek": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/@langchain/deepseek/-/deepseek-0.0.1.tgz", - "integrity": "sha512-jgrbitvV4p7Kqo/Fyni9coCliNXUrJ2XChdR8eHvQg3RL+w13DIQjJn2mrkCrb7v6Is1rI7It2x3yIbADL71Yg==", + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/@langchain/deepseek/-/deepseek-0.0.2.tgz", + "integrity": "sha512-u13KbPUXW7uhcybbRzYdRroBgqVUSgG0SJM15c7Etld2yjRQC2c4O/ga9eQZdLh/kaDlQfH/ZITFdjHe77RnGw==", + "license": "MIT", "dependencies": { - "@langchain/openai": "^0.4.2", - "zod": "^3.24.1" + "@langchain/openai": "^0.5.5" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.0 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" + } + }, + "node_modules/@langchain/deepseek/node_modules/@langchain/openai": { + "version": "0.5.14", + "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.14.tgz", + "integrity": "sha512-0GEj5K/qi1MRuZ4nE7NvyI4jTG+RSewLZqsExUwRukWdeqmkPNHGrogTa5ZDt7eaJxAaY7EgLC5ZnvCM3L1oug==", + "license": "MIT", + "dependencies": { + "js-tiktoken": "^1.0.12", + "openai": "^5.3.0", + "zod": "^3.25.32" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@langchain/core": ">=0.3.58 <0.4.0" + } + }, + "node_modules/@langchain/deepseek/node_modules/openai": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/openai/-/openai-5.5.1.tgz", + "integrity": "sha512-5i19097mGotHA1eFsM6Tjd/tJ8uo9sa5Ysv4Q6bKJ2vtN6rc0MzMrUefXnLXYAJcmMQrC1Efhj0AvfIkXrQamw==", + "license": "Apache-2.0", + "bin": { + "openai": "bin/cli" + }, + "peerDependencies": { + "ws": "^8.18.0", + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "ws": { + "optional": true + }, + "zod": { + "optional": true + } } }, "node_modules/@langchain/google-common": { - "version": "0.2.9", - "resolved": "https://registry.npmjs.org/@langchain/google-common/-/google-common-0.2.9.tgz", - "integrity": "sha512-T6U6zK906ruitDWjrWLk542LotCkqZSENogNAzS+QvGKW1KpLtdgcIhvfKDFP8rCMj4X1QfIW6OXo2a6URnK0w==", + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/@langchain/google-common/-/google-common-0.2.13.tgz", + "integrity": "sha512-Wd254vAajKxK3bIYPmuFRrk90oN3YIDzwwiO+3ojYKoWP+EBzW3eg3B4f8ofvGXUkJPxEwp/u8ymSsVUElUGlw==", "license": "MIT", "dependencies": { - "uuid": "^10.0.0", - "zod-to-json-schema": "^3.22.4" + "uuid": "^10.0.0" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.55 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" } }, "node_modules/@langchain/google-common/node_modules/uuid": { @@ -18862,36 +18960,143 @@ } }, "node_modules/@langchain/google-gauth": { - "version": "0.2.9", - "resolved": "https://registry.npmjs.org/@langchain/google-gauth/-/google-gauth-0.2.9.tgz", - "integrity": "sha512-x9SjEzIhPN7XzXhQ1Aj68OjsQcnTKa070UGs030rn3US2MUQWvqaxcKKnKhE7XiwdLsvuUXrsbqv09XUNzu3Fg==", + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/@langchain/google-gauth/-/google-gauth-0.2.13.tgz", + "integrity": "sha512-JAIMtdmN+6/5aPRz3XUCFQ8+4TP272V8QCLhcyZ9LhDlnmY5DJv+LhzjMk9L5XZx9sRnKRvthVWiAY0Xbs3qAg==", "license": "MIT", "dependencies": { - "@langchain/google-common": "^0.2.9", - "google-auth-library": "^9.15.1" + "@langchain/google-common": "^0.2.13", + "google-auth-library": "^10.1.0" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.55 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" + } + }, + "node_modules/@langchain/google-gauth/node_modules/agent-base": { + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/@langchain/google-gauth/node_modules/gaxios": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-7.1.0.tgz", + "integrity": "sha512-y1Q0MX1Ba6eg67Zz92kW0MHHhdtWksYckQy1KJsI6P4UlDQ8cvdvpLEPslD/k7vFkdPppMESFGTvk7XpSiKj8g==", + "license": "Apache-2.0", + "dependencies": { + "extend": "^3.0.2", + "https-proxy-agent": "^7.0.1", + "node-fetch": "^3.3.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@langchain/google-gauth/node_modules/gcp-metadata": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-7.0.0.tgz", + "integrity": "sha512-3PfRTzvT3Msu0Hy8Gf9ypxJvaClG2IB9pyH0r8QOmRBW5mUcrHgYpF4GYP+XulDbfhxEhBYtJtJJQb5S2wM+LA==", + "license": "Apache-2.0", + "dependencies": { + "gaxios": "^7.0.0", + "google-logging-utils": "^1.0.0", + "json-bigint": "^1.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@langchain/google-gauth/node_modules/google-auth-library": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-10.1.0.tgz", + "integrity": "sha512-GspVjZj1RbyRWpQ9FbAXMKjFGzZwDKnUHi66JJ+tcjcu5/xYAP1pdlWotCuIkMwjfVsxxDvsGZXGLzRt72D0sQ==", + "license": "Apache-2.0", + "dependencies": { + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "gaxios": "^7.0.0", + "gcp-metadata": "^7.0.0", + "google-logging-utils": "^1.0.0", + "gtoken": "^8.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@langchain/google-gauth/node_modules/google-logging-utils": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/google-logging-utils/-/google-logging-utils-1.1.1.tgz", + "integrity": "sha512-rcX58I7nqpu4mbKztFeOAObbomBbHU2oIb/d3tJfF3dizGSApqtSwYJigGCooHdnMyQBIw8BrWyK96w3YXgr6A==", + "license": "Apache-2.0", + "engines": { + "node": ">=14" + } + }, + "node_modules/@langchain/google-gauth/node_modules/gtoken": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-8.0.0.tgz", + "integrity": "sha512-+CqsMbHPiSTdtSO14O51eMNlrp9N79gmeqmXeouJOhfucAedHw9noVe/n5uJk3tbKE6a+6ZCQg3RPhVhHByAIw==", + "license": "MIT", + "dependencies": { + "gaxios": "^7.0.0", + "jws": "^4.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@langchain/google-gauth/node_modules/https-proxy-agent": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/@langchain/google-gauth/node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "license": "MIT", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" } }, "node_modules/@langchain/google-genai": { - "version": "0.2.9", - "resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-0.2.9.tgz", - "integrity": "sha512-AcHAD0VX8Qci5ApLYd6ojMTJUk4ZTeMQrsOYsOVv09sy3FtrLHkRxZXkb0qgtDS9EJ+w40904B+s6s2H9+HZmQ==", + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/@langchain/google-genai/-/google-genai-0.2.13.tgz", + "integrity": "sha512-ReZe4oNUhPNEijYo9CGA3/CJUwVPaaoYnyplZyYTbUNPAwwRH5aR1e6bppKFBb+ZZeTRCR25JFDIPnXJFfjaBg==", "license": "MIT", "dependencies": { "@google/generative-ai": "^0.24.0", - "uuid": "^11.1.0", - "zod-to-json-schema": "^3.22.4" + "uuid": "^11.1.0" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.55 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" } }, "node_modules/@langchain/google-genai/node_modules/uuid": { @@ -18907,36 +19112,36 @@ } }, "node_modules/@langchain/google-vertexai": { - "version": "0.2.9", - "resolved": "https://registry.npmjs.org/@langchain/google-vertexai/-/google-vertexai-0.2.9.tgz", - "integrity": "sha512-41LyACkSyQK5iKh58UBNVDkteeTqKapeu6SdYGfdqVP6erckciDClwmZUKO2UefDco9X0SCKphR+tPWpWFPoag==", + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/@langchain/google-vertexai/-/google-vertexai-0.2.13.tgz", + "integrity": "sha512-Y97f0IBr4uWsyJTcDJROWXuu+qh4elSDLK1e6MD+mrxCx+UlgcXCReg4zvEFJzqpBKrfFt+lvXstJ6XTR6Zfyg==", "license": "MIT", "dependencies": { - "@langchain/google-gauth": "^0.2.9" + "@langchain/google-gauth": "^0.2.10" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.55 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" } }, "node_modules/@langchain/langgraph": { - "version": "0.2.74", - "resolved": "https://registry.npmjs.org/@langchain/langgraph/-/langgraph-0.2.74.tgz", - "integrity": "sha512-oHpEi5sTZTPaeZX1UnzfM2OAJ21QGQrwReTV6+QnX7h8nDCBzhtipAw1cK616S+X8zpcVOjgOtJuaJhXa4mN8w==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@langchain/langgraph/-/langgraph-0.3.4.tgz", + "integrity": "sha512-Vuja8Qtu3Zjx7k4fK7Cnw+p8gtvIRPciWp9btPhAs3aUo6aBgOJOZVcK5Ii3mHfEHK/aQmRElR0x/u/YwykOrg==", "license": "MIT", "dependencies": { - "@langchain/langgraph-checkpoint": "~0.0.17", + "@langchain/langgraph-checkpoint": "~0.0.18", "@langchain/langgraph-sdk": "~0.0.32", "uuid": "^10.0.0", - "zod": "^3.23.8" + "zod": "^3.25.32" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.2.36 <0.3.0 || >=0.3.40 < 0.4.0", + "@langchain/core": ">=0.3.58 < 0.4.0", "zod-to-json-schema": "^3.x" }, "peerDependenciesMeta": { @@ -18974,9 +19179,9 @@ } }, "node_modules/@langchain/langgraph-sdk": { - "version": "0.0.82", - "resolved": "https://registry.npmjs.org/@langchain/langgraph-sdk/-/langgraph-sdk-0.0.82.tgz", - "integrity": "sha512-QxhGtDArHkqsJAbO5RuZsCyvDmPWf4pUpkOpLDzPEQXCBuasrBRgB6pxQWof2l6kfMYCfrc6lp3jL6TAqapmjQ==", + "version": "0.0.84", + "resolved": "https://registry.npmjs.org/@langchain/langgraph-sdk/-/langgraph-sdk-0.0.84.tgz", + "integrity": "sha512-l0PFQyJ+6m6aclORNPPWlcRwgKcXVXsPaJCbCUYFABR3yf4cOpsjhUNR0cJ7+2cS400oieHjGRdGGyO/hbSjhg==", "license": "MIT", "dependencies": { "@types/json-schema": "^7.0.15", @@ -19011,20 +19216,19 @@ } }, "node_modules/@langchain/mistralai": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@langchain/mistralai/-/mistralai-0.2.0.tgz", - "integrity": "sha512-VdfbKZopAuSXf/vlXbriGWLK3c7j5s47DoB3S31xpprY2BMSKZZiX9vE9TsgxMfAPuIDPIYcfgU7p1upvTYt8g==", + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@langchain/mistralai/-/mistralai-0.2.1.tgz", + "integrity": "sha512-s91BlNcuxaaZGnVukyl81nwGrWpeE0EYiAdEFoBmZwlT4yLpx+QpPhRsGKrTg/Vm7Nscy6Wd8Xy2PJ93wftMdw==", + "license": "MIT", "dependencies": { "@mistralai/mistralai": "^1.3.1", - "uuid": "^10.0.0", - "zod": "^3.23.8", - "zod-to-json-schema": "^3.22.4" + "uuid": "^10.0.0" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.7 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" } }, "node_modules/@langchain/mistralai/node_modules/uuid": { @@ -19040,20 +19244,19 @@ } }, "node_modules/@langchain/ollama": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@langchain/ollama/-/ollama-0.2.0.tgz", - "integrity": "sha512-jLlYFqt+nbhaJKLakk7lRTWHZJ7wHeJLM6yuv4jToQ8zPzpL//372+MjggDoW0mnw8ofysg1T2C6mEJspKJtiA==", + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@langchain/ollama/-/ollama-0.2.3.tgz", + "integrity": "sha512-1Obe45jgQspqLMBVlayQbGdywFmri8DgmGRdzNu0li56cG5RReYlRCFVDZBRMMvF9JhsP5eXRyfyivtKfITHWQ==", + "license": "MIT", "dependencies": { "ollama": "^0.5.12", - "uuid": "^10.0.0", - "zod": "^3.24.1", - "zod-to-json-schema": "^3.24.1" + "uuid": "^10.0.0" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.2.21 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" } }, "node_modules/@langchain/ollama/node_modules/uuid": { @@ -19130,18 +19333,56 @@ } }, "node_modules/@langchain/xai": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/@langchain/xai/-/xai-0.0.2.tgz", - "integrity": "sha512-wVOs7SfJs4VWk/oiHJomaoaZ+r9nQhPqbEXlQ2D8L0d54PxYhb1ILR9rub9LT1RpqazSX8HG4A8+hX4R01qkSg==", + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@langchain/xai/-/xai-0.0.3.tgz", + "integrity": "sha512-NA+0d6z/1focGuakceOz/AspWN9xcz7mYpjLFuCDtOPRLzdjUTRiqljXx9RVSl/VQMA8AzHCOA64m3asYZAYWg==", + "license": "MIT", "dependencies": { - "@langchain/openai": "~0.4.4", - "zod": "^3.24.2" + "@langchain/openai": "^0.5.5" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.2.21 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" + } + }, + "node_modules/@langchain/xai/node_modules/@langchain/openai": { + "version": "0.5.14", + "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.14.tgz", + "integrity": "sha512-0GEj5K/qi1MRuZ4nE7NvyI4jTG+RSewLZqsExUwRukWdeqmkPNHGrogTa5ZDt7eaJxAaY7EgLC5ZnvCM3L1oug==", + "license": "MIT", + "dependencies": { + "js-tiktoken": "^1.0.12", + "openai": "^5.3.0", + "zod": "^3.25.32" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "@langchain/core": ">=0.3.58 <0.4.0" + } + }, + "node_modules/@langchain/xai/node_modules/openai": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/openai/-/openai-5.5.1.tgz", + "integrity": "sha512-5i19097mGotHA1eFsM6Tjd/tJ8uo9sa5Ysv4Q6bKJ2vtN6rc0MzMrUefXnLXYAJcmMQrC1Efhj0AvfIkXrQamw==", + "license": "Apache-2.0", + "bin": { + "openai": "bin/cli" + }, + "peerDependencies": { + "ws": "^8.18.0", + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "ws": { + "optional": true + }, + "zod": { + "optional": true + } } }, "node_modules/@lezer/common": { @@ -19196,23 +19437,23 @@ } }, "node_modules/@librechat/agents": { - "version": "2.4.38", - "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.38.tgz", - "integrity": "sha512-GfQ36TpKrC60vesTM0MrBaE8aEC50yBp0CsT4ERHO9HbCjH8na+CA14Ldps1cGwinL4gtyDH2gB0nVuhgEtYAg==", + "version": "2.4.41", + "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.41.tgz", + "integrity": "sha512-kYmdk5WVRp0qZxTx6BuGCs4l0Ir9iBLLx4ZY4/1wxr80al5/vq3P8wbgGdKMeO2qTu4ZaT4RyWRQYWBg5HDkUQ==", "license": "MIT", "dependencies": { - "@langchain/anthropic": "^0.3.21", - "@langchain/aws": "^0.1.10", - "@langchain/community": "^0.3.44", - "@langchain/core": "^0.3.57", - "@langchain/deepseek": "^0.0.1", - "@langchain/google-genai": "^0.2.9", - "@langchain/google-vertexai": "^0.2.9", - "@langchain/langgraph": "^0.2.73", - "@langchain/mistralai": "^0.2.0", - "@langchain/ollama": "^0.2.0", - "@langchain/openai": "^0.5.11", - "@langchain/xai": "^0.0.2", + "@langchain/anthropic": "^0.3.23", + "@langchain/aws": "^0.1.11", + "@langchain/community": "^0.3.47", + "@langchain/core": "^0.3.60", + "@langchain/deepseek": "^0.0.2", + "@langchain/google-genai": "^0.2.13", + "@langchain/google-vertexai": "^0.2.13", + "@langchain/langgraph": "^0.3.4", + "@langchain/mistralai": "^0.2.1", + "@langchain/ollama": "^0.2.3", + "@langchain/openai": "^0.5.14", + "@langchain/xai": "^0.0.3", "cheerio": "^1.0.0", "dotenv": "^16.4.7", "https-proxy-agent": "^7.0.6", @@ -19223,9 +19464,9 @@ } }, "node_modules/@librechat/agents/node_modules/@langchain/community": { - "version": "0.3.45", - "resolved": "https://registry.npmjs.org/@langchain/community/-/community-0.3.45.tgz", - "integrity": "sha512-KkAGmnP+w5tozLYsj/kGKwyfuPnCcA6MyDXfNF7oDo7L1TxhUgdEKhvNsY7ooLXz6Xh/LV5Kqp2B8U0jfYCQKQ==", + "version": "0.3.47", + "resolved": "https://registry.npmjs.org/@langchain/community/-/community-0.3.47.tgz", + "integrity": "sha512-Vo42kAfkXpTFSevhEkeqqE55az8NyQgDktCbitXYuhipNbFYx08XVvqEDkFkB20MM/Z7u+cvLb+DxCqnKuH0CQ==", "license": "MIT", "dependencies": { "@langchain/openai": ">=0.2.0 <0.6.0", @@ -19235,10 +19476,9 @@ "flat": "^5.0.2", "js-yaml": "^4.1.0", "langchain": ">=0.2.3 <0.3.0 || >=0.3.4 <0.4.0", - "langsmith": "^0.3.29", + "langsmith": "^0.3.33", "uuid": "^10.0.0", - "zod": "^3.22.3", - "zod-to-json-schema": "^3.22.5" + "zod": "^3.25.32" }, "engines": { "node": ">=18" @@ -19271,11 +19511,11 @@ "@google-ai/generativelanguage": "*", "@google-cloud/storage": "^6.10.1 || ^7.7.0", "@gradientai/nodejs-sdk": "^1.2.0", - "@huggingface/inference": "^2.6.4", - "@huggingface/transformers": "^3.2.3", + "@huggingface/inference": "^4.0.5", + "@huggingface/transformers": "^3.5.2", "@ibm-cloud/watsonx-ai": "*", "@lancedb/lancedb": "^0.12.0", - "@langchain/core": ">=0.2.21 <0.4.0", + "@langchain/core": ">=0.3.58 <0.4.0", "@layerup/layerup-security": "^1.5.12", "@libsql/client": "^0.14.0", "@mendable/firecrawl-js": "^1.4.3", @@ -19345,7 +19585,7 @@ "mammoth": "^1.6.0", "mariadb": "^3.4.0", "mem0ai": "^2.1.8", - "mongodb": ">=5.2.0", + "mongodb": "^6.17.0", "mysql2": "^3.9.8", "neo4j-driver": "*", "notion-to-md": "^3.1.0", @@ -19749,21 +19989,20 @@ } }, "node_modules/@librechat/agents/node_modules/@langchain/openai": { - "version": "0.5.11", - "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.11.tgz", - "integrity": "sha512-DAp7x+NfjSqDvKVMle8yb85nzz+3ctP7zGJaeRS0vLmvkY9qf/jRkowsM0mcsIiEUKhG/AHzWqvxbhktb/jJ6Q==", + "version": "0.5.14", + "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.14.tgz", + "integrity": "sha512-0GEj5K/qi1MRuZ4nE7NvyI4jTG+RSewLZqsExUwRukWdeqmkPNHGrogTa5ZDt7eaJxAaY7EgLC5ZnvCM3L1oug==", "license": "MIT", "dependencies": { "js-tiktoken": "^1.0.12", - "openai": "^4.96.0", - "zod": "^3.22.4", - "zod-to-json-schema": "^3.22.3" + "openai": "^5.3.0", + "zod": "^3.25.32" }, "engines": { "node": ">=18" }, "peerDependencies": { - "@langchain/core": ">=0.3.48 <0.4.0" + "@langchain/core": ">=0.3.58 <0.4.0" } }, "node_modules/@librechat/agents/node_modules/agent-base": { @@ -19788,6 +20027,27 @@ "node": ">= 14" } }, + "node_modules/@librechat/agents/node_modules/openai": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/openai/-/openai-5.5.1.tgz", + "integrity": "sha512-5i19097mGotHA1eFsM6Tjd/tJ8uo9sa5Ysv4Q6bKJ2vtN6rc0MzMrUefXnLXYAJcmMQrC1Efhj0AvfIkXrQamw==", + "license": "Apache-2.0", + "bin": { + "openai": "bin/cli" + }, + "peerDependencies": { + "ws": "^8.18.0", + "zod": "^3.23.8" + }, + "peerDependenciesMeta": { + "ws": { + "optional": true + }, + "zod": { + "optional": true + } + } + }, "node_modules/@librechat/agents/node_modules/uuid": { "version": "10.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", @@ -27551,6 +27811,15 @@ "url": "https://github.com/sponsors/fb55" } }, + "node_modules/cheerio/node_modules/undici": { + "version": "6.21.3", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.3.tgz", + "integrity": "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==", + "license": "MIT", + "engines": { + "node": ">=18.17" + } + }, "node_modules/cheerio/node_modules/whatwg-mimetype": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", @@ -28554,6 +28823,15 @@ "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==", "dev": true }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, "node_modules/data-urls": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-3.0.2.tgz", @@ -30727,6 +31005,29 @@ "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.3.tgz", "integrity": "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw==" }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "license": "MIT", + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, "node_modules/file-entry-cache": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", @@ -31107,6 +31408,18 @@ "node": ">= 14" } }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "license": "MIT", + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, "node_modules/formidable": { "version": "3.5.4", "resolved": "https://registry.npmjs.org/formidable/-/formidable-3.5.4.tgz", @@ -34583,9 +34896,9 @@ } }, "node_modules/langsmith": { - "version": "0.3.29", - "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.3.29.tgz", - "integrity": "sha512-JPF2B339qpYy9FyuY4Yz1aWYtgPlFc/a+VTj3L/JcFLHCiMP7+Ig8I9jO+o1QwVa+JU3iugL1RS0wwc+Glw0zA==", + "version": "0.3.33", + "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.3.33.tgz", + "integrity": "sha512-imNIaBL6+ElE5eMzNHYwFxo6W/6rHlqcaUjCYoIeGdCYWlARxE3CTGKul5DJnaUgGP2CTLFeNXyvRx5HWC/4KQ==", "license": "MIT", "dependencies": { "@types/uuid": "^10.0.0", @@ -44153,12 +44466,12 @@ "dev": true }, "node_modules/undici": { - "version": "6.21.3", - "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.3.tgz", - "integrity": "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==", + "version": "7.10.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.10.0.tgz", + "integrity": "sha512-u5otvFBOBZvmdjWLVW+5DAc9Nkq8f24g0O9oY7qw2JVIF1VocIFoyz9JFkuVOS2j41AufeO0xnlweJ2RLT8nGw==", "license": "MIT", "engines": { - "node": ">=18.17" + "node": ">=20.18.1" } }, "node_modules/undici-types": { @@ -45069,6 +45382,15 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/web-streams-polyfill": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, "node_modules/webidl-conversions": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", @@ -46174,9 +46496,10 @@ } }, "node_modules/zod": { - "version": "3.24.2", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.24.2.tgz", - "integrity": "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==", + "version": "3.25.67", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.67.tgz", + "integrity": "sha512-idA2YXwpCdqUSKRCACDE6ItZD9TZzy3OZMtpfLoh6oPR47lipysRrJfjzMqFxQ3uJuUPyUeWe1r9vLH33xO/Qw==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/colinhacks" } @@ -46232,7 +46555,7 @@ "typescript": "^5.0.4" }, "peerDependencies": { - "@librechat/agents": "^2.4.37", + "@librechat/agents": "^2.4.41", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.12.3", "axios": "^1.8.2", @@ -46243,6 +46566,7 @@ "librechat-data-provider": "*", "node-fetch": "2.7.0", "tiktoken": "^1.0.15", + "undici": "^7.10.0", "zod": "^3.22.4" } }, @@ -46462,7 +46786,7 @@ }, "packages/data-schemas": { "name": "@librechat/data-schemas", - "version": "0.0.9", + "version": "0.0.10", "license": "MIT", "devDependencies": { "@rollup/plugin-alias": "^5.1.0", diff --git a/packages/api/package.json b/packages/api/package.json index 1c2ab534e3..aa4fc43772 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -69,7 +69,7 @@ "registry": "https://registry.npmjs.org/" }, "peerDependencies": { - "@librechat/agents": "^2.4.37", + "@librechat/agents": "^2.4.41", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.12.3", "axios": "^1.8.2", @@ -80,6 +80,7 @@ "librechat-data-provider": "*", "node-fetch": "2.7.0", "tiktoken": "^1.0.15", + "undici": "^7.10.0", "zod": "^3.22.4" } } diff --git a/packages/api/src/endpoints/openai/llm.ts b/packages/api/src/endpoints/openai/llm.ts index 2f9a3f6011..ec7c4b863d 100644 --- a/packages/api/src/endpoints/openai/llm.ts +++ b/packages/api/src/endpoints/openai/llm.ts @@ -1,4 +1,4 @@ -import { HttpsProxyAgent } from 'https-proxy-agent'; +import { ProxyAgent } from 'undici'; import { KnownEndpoints } from 'librechat-data-provider'; import type * as t from '~/types'; import { sanitizeModelName, constructAzureURL } from '~/utils/azure'; @@ -102,8 +102,10 @@ export function getOpenAIConfig( } if (proxy) { - const proxyAgent = new HttpsProxyAgent(proxy); - configOptions.httpAgent = proxyAgent; + const proxyAgent = new ProxyAgent(proxy); + configOptions.fetchOptions = { + dispatcher: proxyAgent, + }; } if (azure) { From 72cd159a373fb8c05e41dbf62ef2e174799feeae Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Fri, 20 Jun 2025 18:05:19 -0400 Subject: [PATCH 15/16] =?UTF-8?q?=F0=9F=94=83=20refactor:=20Optimize=20Mei?= =?UTF-8?q?liSearch=20Sync=20Processing=20and=20Tracking=20(#7994)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/db/indexSync.js | 137 ++++-- .../src/models/plugins/mongoMeili.ts | 397 +++++++++++++----- 2 files changed, 404 insertions(+), 130 deletions(-) diff --git a/api/db/indexSync.js b/api/db/indexSync.js index e8bcd55e37..945346a906 100644 --- a/api/db/indexSync.js +++ b/api/db/indexSync.js @@ -1,8 +1,11 @@ const mongoose = require('mongoose'); const { MeiliSearch } = require('meilisearch'); const { logger } = require('@librechat/data-schemas'); +const { FlowStateManager } = require('@librechat/api'); +const { CacheKeys } = require('librechat-data-provider'); const { isEnabled } = require('~/server/utils'); +const { getLogStores } = require('~/cache'); const Conversation = mongoose.models.Conversation; const Message = mongoose.models.Message; @@ -28,43 +31,123 @@ class MeiliSearchClient { } } +/** + * Performs the actual sync operations for messages and conversations + */ +async function performSync() { + const client = MeiliSearchClient.getInstance(); + + const { status } = await client.health(); + if (status !== 'available') { + throw new Error('Meilisearch not available'); + } + + if (indexingDisabled === true) { + logger.info('[indexSync] Indexing is disabled, skipping...'); + return { messagesSync: false, convosSync: false }; + } + + let messagesSync = false; + let convosSync = false; + + // Check if we need to sync messages + const messageProgress = await Message.getSyncProgress(); + if (!messageProgress.isComplete) { + logger.info( + `[indexSync] Messages need syncing: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments} indexed`, + ); + + // Check if we should do a full sync or incremental + const messageCount = await Message.countDocuments(); + const messagesIndexed = messageProgress.totalProcessed; + const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10); + + if (messageCount - messagesIndexed > syncThreshold) { + logger.info('[indexSync] Starting full message sync due to large difference'); + await Message.syncWithMeili(); + messagesSync = true; + } else if (messageCount !== messagesIndexed) { + logger.warn('[indexSync] Messages out of sync, performing incremental sync'); + await Message.syncWithMeili(); + messagesSync = true; + } + } else { + logger.info( + `[indexSync] Messages are fully synced: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments}`, + ); + } + + // Check if we need to sync conversations + const convoProgress = await Conversation.getSyncProgress(); + if (!convoProgress.isComplete) { + logger.info( + `[indexSync] Conversations need syncing: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments} indexed`, + ); + + const convoCount = await Conversation.countDocuments(); + const convosIndexed = convoProgress.totalProcessed; + const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10); + + if (convoCount - convosIndexed > syncThreshold) { + logger.info('[indexSync] Starting full conversation sync due to large difference'); + await Conversation.syncWithMeili(); + convosSync = true; + } else if (convoCount !== convosIndexed) { + logger.warn('[indexSync] Convos out of sync, performing incremental sync'); + await Conversation.syncWithMeili(); + convosSync = true; + } + } else { + logger.info( + `[indexSync] Conversations are fully synced: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments}`, + ); + } + + return { messagesSync, convosSync }; +} + +/** + * Main index sync function that uses FlowStateManager to prevent concurrent execution + */ async function indexSync() { if (!searchEnabled) { return; } - try { - const client = MeiliSearchClient.getInstance(); - const { status } = await client.health(); - if (status !== 'available') { - throw new Error('Meilisearch not available'); + logger.info('[indexSync] Starting index synchronization check...'); + + try { + // Get or create FlowStateManager instance + const flowsCache = getLogStores(CacheKeys.FLOWS); + if (!flowsCache) { + logger.warn('[indexSync] Flows cache not available, falling back to direct sync'); + return await performSync(); } - if (indexingDisabled === true) { - logger.info('[indexSync] Indexing is disabled, skipping...'); + const flowManager = new FlowStateManager(flowsCache, { + ttl: 60000 * 10, // 10 minutes TTL for sync operations + }); + + // Use a unique flow ID for the sync operation + const flowId = 'meili-index-sync'; + const flowType = 'MEILI_SYNC'; + + // This will only execute the handler if no other instance is running the sync + const result = await flowManager.createFlowWithHandler(flowId, flowType, performSync); + + if (result.messagesSync || result.convosSync) { + logger.info('[indexSync] Sync completed successfully'); + } else { + logger.debug('[indexSync] No sync was needed'); + } + + return result; + } catch (err) { + if (err.message.includes('flow already exists')) { + logger.info('[indexSync] Sync already running on another instance'); return; } - const messageCount = await Message.countDocuments(); - const convoCount = await Conversation.countDocuments(); - const messages = await client.index('messages').getStats(); - const convos = await client.index('convos').getStats(); - const messagesIndexed = messages.numberOfDocuments; - const convosIndexed = convos.numberOfDocuments; - - logger.debug(`[indexSync] There are ${messageCount} messages and ${messagesIndexed} indexed`); - logger.debug(`[indexSync] There are ${convoCount} convos and ${convosIndexed} indexed`); - - if (messageCount !== messagesIndexed) { - logger.debug('[indexSync] Messages out of sync, indexing'); - Message.syncWithMeili(); - } - - if (convoCount !== convosIndexed) { - logger.debug('[indexSync] Convos out of sync, indexing'); - Conversation.syncWithMeili(); - } - } catch (err) { if (err.message.includes('not found')) { logger.debug('[indexSync] Creating indices...'); currentTimeout = setTimeout(async () => { diff --git a/packages/data-schemas/src/models/plugins/mongoMeili.ts b/packages/data-schemas/src/models/plugins/mongoMeili.ts index d44dfd806b..84a8c7efe8 100644 --- a/packages/data-schemas/src/models/plugins/mongoMeili.ts +++ b/packages/data-schemas/src/models/plugins/mongoMeili.ts @@ -19,6 +19,8 @@ interface MongoMeiliOptions { indexName: string; primaryKey: string; mongoose: typeof import('mongoose'); + syncBatchSize?: number; + syncDelayMs?: number; } interface MeiliIndexable { @@ -31,6 +33,13 @@ interface ContentItem { text?: string; } +interface SyncProgress { + lastSyncedId?: string; + totalProcessed: number; + totalDocuments: number; + isComplete: boolean; +} + interface _DocumentWithMeiliIndex extends Document { _meiliIndex?: boolean; preprocessObjectForIndex?: () => Record; @@ -45,7 +54,24 @@ interface _DocumentWithMeiliIndex extends Document { export type DocumentWithMeiliIndex = _DocumentWithMeiliIndex & IConversation & Partial; export interface SchemaWithMeiliMethods extends Model { - syncWithMeili(): Promise; + syncWithMeili(options?: { resumeFromId?: string }): Promise; + getSyncProgress(): Promise; + processSyncBatch( + index: Index, + documents: Array>, + updateOps: Array<{ + updateOne: { + filter: Record; + update: { $set: { _meiliIndex: boolean } }; + }; + }>, + ): Promise; + cleanupMeiliIndex( + index: Index, + primaryKey: string, + batchSize: number, + delayMs: number, + ): Promise; setMeiliIndexSettings(settings: Record): Promise; meiliSearch( q: string, @@ -66,6 +92,14 @@ const searchEnabled = process.env.SEARCH != null && process.env.SEARCH.toLowerCa const meiliEnabled = process.env.MEILI_HOST != null && process.env.MEILI_MASTER_KEY != null && searchEnabled; +/** + * Get sync configuration from environment variables + */ +const getSyncConfig = () => ({ + batchSize: parseInt(process.env.MEILI_SYNC_BATCH_SIZE || '100', 10), + delayMs: parseInt(process.env.MEILI_SYNC_DELAY_MS || '100', 10), +}); + /** * Local implementation of parseTextParts to avoid dependency on librechat-data-provider * Extracts text content from an array of content items @@ -101,6 +135,26 @@ const validateOptions = (options: Partial): void => { }); }; +/** + * Helper function to process documents in batches with rate limiting + */ +const processBatch = async ( + items: T[], + batchSize: number, + delayMs: number, + processor: (batch: T[]) => Promise, +): Promise => { + for (let i = 0; i < items.length; i += batchSize) { + const batch = items.slice(i, i + batchSize); + await processor(batch); + + // Add delay between batches to prevent overwhelming resources + if (i + batchSize < items.length && delayMs > 0) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } +}; + /** * Factory function to create a MeiliMongooseModel class which extends a Mongoose model. * This class contains static and instance methods to synchronize and manage the MeiliSearch index @@ -109,127 +163,213 @@ const validateOptions = (options: Partial): void => { * @param config - Configuration object. * @param config.index - The MeiliSearch index object. * @param config.attributesToIndex - List of attributes to index. + * @param config.syncOptions - Sync configuration options. * @returns A class definition that will be loaded into the Mongoose schema. */ const createMeiliMongooseModel = ({ index, attributesToIndex, + syncOptions, }: { index: Index; attributesToIndex: string[]; + syncOptions: { batchSize: number; delayMs: number }; }) => { const primaryKey = attributesToIndex[0]; + const syncConfig = { ...getSyncConfig(), ...syncOptions }; class MeiliMongooseModel { /** - * Synchronizes the data between the MongoDB collection and the MeiliSearch index. - * - * The synchronization process involves: - * 1. Fetching all documents from the MongoDB collection and MeiliSearch index. - * 2. Comparing documents from both sources. - * 3. Deleting documents from MeiliSearch that no longer exist in MongoDB. - * 4. Adding documents to MeiliSearch that exist in MongoDB but not in the index. - * 5. Updating documents in MeiliSearch if key fields (such as `text` or `title`) differ. - * 6. Updating the `_meiliIndex` field in MongoDB to indicate the indexing status. - * - * Note: The function processes documents in batches because MeiliSearch's - * `index.getDocuments` requires an exact limit and `index.addDocuments` does not handle - * partial failures in a batch. - * - * @returns {Promise} Resolves when the synchronization is complete. + * Get the current sync progress */ - static async syncWithMeili(this: SchemaWithMeiliMethods): Promise { + static async getSyncProgress(this: SchemaWithMeiliMethods): Promise { + const totalDocuments = await this.countDocuments(); + const indexedDocuments = await this.countDocuments({ _meiliIndex: true }); + + return { + totalProcessed: indexedDocuments, + totalDocuments, + isComplete: indexedDocuments === totalDocuments, + }; + } + + /** + * Synchronizes the data between the MongoDB collection and the MeiliSearch index. + * Now uses streaming and batching to reduce memory usage. + */ + static async syncWithMeili( + this: SchemaWithMeiliMethods, + options?: { resumeFromId?: string }, + ): Promise { try { - let moreDocuments = true; - const mongoDocuments = await this.find().lean(); + const startTime = Date.now(); + const { batchSize, delayMs } = syncConfig; + + logger.info( + `[syncWithMeili] Starting sync for ${primaryKey === 'messageId' ? 'messages' : 'conversations'} with batch size ${batchSize}`, + ); + + // Build query with resume capability + const query: FilterQuery = {}; + if (options?.resumeFromId) { + query._id = { $gt: options.resumeFromId }; + } + + // Get total count for progress tracking + const totalCount = await this.countDocuments(query); + let processedCount = 0; + + // First, handle documents that need to be removed from Meili + await this.cleanupMeiliIndex(index, primaryKey, batchSize, delayMs); + + // Process MongoDB documents in batches using cursor + const cursor = this.find(query) + .select(attributesToIndex.join(' ') + ' _meiliIndex') + .sort({ _id: 1 }) + .batchSize(batchSize) + .cursor(); const format = (doc: Record) => _.omitBy(_.pick(doc, attributesToIndex), (v, k) => k.startsWith('$')); - const mongoMap = new Map( - mongoDocuments.map((doc) => { - const typedDoc = doc as Record; - return [typedDoc[primaryKey], format(typedDoc)]; - }), - ); - const indexMap = new Map>(); - let offset = 0; - const batchSize = 1000; - - while (moreDocuments) { - const batch = await index.getDocuments({ limit: batchSize, offset }); - if (batch.results.length === 0) { - moreDocuments = false; - } - for (const doc of batch.results) { - indexMap.set(doc[primaryKey], format(doc)); - } - offset += batchSize; - } - - logger.debug('[syncWithMeili]', { indexMap: indexMap.size, mongoMap: mongoMap.size }); - - const updateOps: Array<{ + let documentBatch: Array> = []; + let updateOps: Array<{ updateOne: { filter: Record; update: { $set: { _meiliIndex: boolean } }; }; }> = []; - // Process documents present in the MeiliSearch index - for (const [id, doc] of indexMap) { - const update: Record = {}; - update[primaryKey] = id; - if (mongoMap.has(id)) { - const mongoDoc = mongoMap.get(id); - if ( - (doc.text && doc.text !== mongoDoc?.text) || - (doc.title && doc.title !== mongoDoc?.title) - ) { - logger.debug( - `[syncWithMeili] ${id} had document discrepancy in ${ - doc.text ? 'text' : 'title' - } field`, - ); - updateOps.push({ - updateOne: { filter: update, update: { $set: { _meiliIndex: true } } }, - }); - await index.addDocuments([doc]); + // Process documents in streaming fashion + for await (const doc of cursor) { + const typedDoc = doc.toObject() as unknown as Record; + const formatted = format(typedDoc); + + // Check if document needs indexing + if (!typedDoc._meiliIndex) { + documentBatch.push(formatted); + updateOps.push({ + updateOne: { + filter: { _id: typedDoc._id }, + update: { $set: { _meiliIndex: true } }, + }, + }); + } + + processedCount++; + + // Process batch when it reaches the configured size + if (documentBatch.length >= batchSize) { + await this.processSyncBatch(index, documentBatch, updateOps); + documentBatch = []; + updateOps = []; + + // Log progress + const progress = Math.round((processedCount / totalCount) * 100); + logger.info(`[syncWithMeili] Progress: ${progress}% (${processedCount}/${totalCount})`); + + // Add delay to prevent overwhelming resources + if (delayMs > 0) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); } - } else { - await index.deleteDocument(id as string); - updateOps.push({ - updateOne: { filter: update, update: { $set: { _meiliIndex: false } } }, - }); } } - // Process documents present in MongoDB - for (const [id, doc] of mongoMap) { - const update: Record = {}; - update[primaryKey] = id; - if (!indexMap.has(id)) { - await index.addDocuments([doc]); - updateOps.push({ - updateOne: { filter: update, update: { $set: { _meiliIndex: true } } }, - }); - } else if (doc._meiliIndex === false) { - updateOps.push({ - updateOne: { filter: update, update: { $set: { _meiliIndex: true } } }, - }); - } + // Process remaining documents + if (documentBatch.length > 0) { + await this.processSyncBatch(index, documentBatch, updateOps); } + const duration = Date.now() - startTime; + logger.info( + `[syncWithMeili] Completed sync for ${primaryKey === 'messageId' ? 'messages' : 'conversations'} in ${duration}ms`, + ); + } catch (error) { + logger.error('[syncWithMeili] Error during sync:', error); + throw error; + } + } + + /** + * Process a batch of documents for syncing + */ + static async processSyncBatch( + this: SchemaWithMeiliMethods, + index: Index, + documents: Array>, + updateOps: Array<{ + updateOne: { + filter: Record; + update: { $set: { _meiliIndex: boolean } }; + }; + }>, + ): Promise { + if (documents.length === 0) { + return; + } + + try { + // Add documents to MeiliSearch + await index.addDocuments(documents); + + // Update MongoDB to mark documents as indexed if (updateOps.length > 0) { await this.collection.bulkWrite(updateOps); - logger.debug( - `[syncWithMeili] Finished indexing ${ - primaryKey === 'messageId' ? 'messages' : 'conversations' - }`, - ); } } catch (error) { - logger.error('[syncWithMeili] Error adding document to Meili:', error); + logger.error('[processSyncBatch] Error processing batch:', error); + // Don't throw - allow sync to continue with other documents + } + } + + /** + * Clean up documents in MeiliSearch that no longer exist in MongoDB + */ + static async cleanupMeiliIndex( + this: SchemaWithMeiliMethods, + index: Index, + primaryKey: string, + batchSize: number, + delayMs: number, + ): Promise { + try { + let offset = 0; + let moreDocuments = true; + + while (moreDocuments) { + const batch = await index.getDocuments({ limit: batchSize, offset }); + if (batch.results.length === 0) { + moreDocuments = false; + break; + } + + const meiliIds = batch.results.map((doc) => doc[primaryKey]); + const query: Record = {}; + query[primaryKey] = { $in: meiliIds }; + + // Find which documents exist in MongoDB + const existingDocs = await this.find(query).select(primaryKey).lean(); + + const existingIds = new Set( + existingDocs.map((doc: Record) => doc[primaryKey]), + ); + + // Delete documents that don't exist in MongoDB + const toDelete = meiliIds.filter((id) => !existingIds.has(id)); + if (toDelete.length > 0) { + await Promise.all(toDelete.map((id) => index.deleteDocument(id as string))); + logger.debug(`[cleanupMeiliIndex] Deleted ${toDelete.length} orphaned documents`); + } + + offset += batchSize; + + // Add delay between batches + if (delayMs > 0) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + } catch (error) { + logger.error('[cleanupMeiliIndex] Error during cleanup:', error); } } @@ -313,18 +453,29 @@ const createMeiliMongooseModel = ({ } /** - * Adds the current document to the MeiliSearch index + * Adds the current document to the MeiliSearch index with retry logic */ async addObjectToMeili( this: DocumentWithMeiliIndex, next: CallbackWithoutResultAndOptionalError, ): Promise { const object = this.preprocessObjectForIndex!(); - try { - await index.addDocuments([object]); - } catch (error) { - logger.error('[addObjectToMeili] Error adding document to Meili:', error); - return next(); + const maxRetries = 3; + let retryCount = 0; + + while (retryCount < maxRetries) { + try { + await index.addDocuments([object]); + break; + } catch (error) { + retryCount++; + if (retryCount >= maxRetries) { + logger.error('[addObjectToMeili] Error adding document to Meili after retries:', error); + return next(); + } + // Exponential backoff + await new Promise((resolve) => setTimeout(resolve, Math.pow(2, retryCount) * 1000)); + } } try { @@ -445,6 +596,8 @@ const createMeiliMongooseModel = ({ * @param options.apiKey - The MeiliSearch API key. * @param options.indexName - The name of the MeiliSearch index. * @param options.primaryKey - The primary key field for indexing. + * @param options.syncBatchSize - Batch size for sync operations. + * @param options.syncDelayMs - Delay between batches in milliseconds. */ export default function mongoMeili(schema: Schema, options: MongoMeiliOptions): void { const mongoose = options.mongoose; @@ -461,11 +614,38 @@ export default function mongoMeili(schema: Schema, options: MongoMeiliOptions): }); const { host, apiKey, indexName, primaryKey } = options; + const syncOptions = { + batchSize: options.syncBatchSize || getSyncConfig().batchSize, + delayMs: options.syncDelayMs || getSyncConfig().delayMs, + }; const client = new MeiliSearch({ host, apiKey }); - client.createIndex(indexName, { primaryKey }); + + /** Create index only if it doesn't exist */ const index = client.index(indexName); + // Check if index exists and create if needed + (async () => { + try { + await index.getRawInfo(); + logger.debug(`[mongoMeili] Index ${indexName} already exists`); + } catch (error) { + const errorCode = (error as { code?: string })?.code; + if (errorCode === 'index_not_found') { + try { + logger.info(`[mongoMeili] Creating new index: ${indexName}`); + await client.createIndex(indexName, { primaryKey }); + logger.info(`[mongoMeili] Successfully created index: ${indexName}`); + } catch (createError) { + // Index might have been created by another instance + logger.debug(`[mongoMeili] Index ${indexName} may already exist:`, createError); + } + } else { + logger.error(`[mongoMeili] Error checking index ${indexName}:`, error); + } + } + })(); + // Collect attributes from the schema that should be indexed const attributesToIndex: string[] = [ ...Object.entries(schema.obj).reduce((results, [key, value]) => { @@ -474,7 +654,7 @@ export default function mongoMeili(schema: Schema, options: MongoMeiliOptions): }, []), ]; - schema.loadClass(createMeiliMongooseModel({ index, attributesToIndex })); + schema.loadClass(createMeiliMongooseModel({ index, attributesToIndex, syncOptions })); // Register Mongoose hooks schema.post('save', function (doc: DocumentWithMeiliIndex, next) { @@ -497,17 +677,23 @@ export default function mongoMeili(schema: Schema, options: MongoMeiliOptions): try { const conditions = (this as Query).getQuery(); + const { batchSize, delayMs } = syncOptions; if (Object.prototype.hasOwnProperty.call(schema.obj, 'messages')) { const convoIndex = client.index('convos'); const deletedConvos = await mongoose .model('Conversation') .find(conditions as FilterQuery) + .select('conversationId') .lean(); - const promises = deletedConvos.map((convo: Record) => - convoIndex.deleteDocument(convo.conversationId as string), - ); - await Promise.all(promises); + + // Process deletions in batches + await processBatch(deletedConvos, batchSize, delayMs, async (batch) => { + const promises = batch.map((convo: Record) => + convoIndex.deleteDocument(convo.conversationId as string), + ); + await Promise.all(promises); + }); } if (Object.prototype.hasOwnProperty.call(schema.obj, 'messageId')) { @@ -515,11 +701,16 @@ export default function mongoMeili(schema: Schema, options: MongoMeiliOptions): const deletedMessages = await mongoose .model('Message') .find(conditions as FilterQuery) + .select('messageId') .lean(); - const promises = deletedMessages.map((message: Record) => - messageIndex.deleteDocument(message.messageId as string), - ); - await Promise.all(promises); + + // Process deletions in batches + await processBatch(deletedMessages, batchSize, delayMs, async (batch) => { + const promises = batch.map((message: Record) => + messageIndex.deleteDocument(message.messageId as string), + ); + await Promise.all(promises); + }); } return next(); } catch (error) { From 0587a1cc7ca7c9d206e5402ca46fc201cfe081bd Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Fri, 20 Jun 2025 20:44:13 -0400 Subject: [PATCH 16/16] =?UTF-8?q?=F0=9F=94=83=20refactor:=20Index=20Sync?= =?UTF-8?q?=20in=20Background?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/server/index.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/api/server/index.js b/api/server/index.js index b1132873c7..8c7db3e226 100644 --- a/api/server/index.js +++ b/api/server/index.js @@ -39,7 +39,9 @@ const startServer = async () => { await connectDb(); logger.info('Connected to MongoDB'); - await indexSync(); + indexSync().catch((err) => { + logger.error('[indexSync] Background sync failed:', err); + }); app.disable('x-powered-by'); app.set('trust proxy', trusted_proxy);