From 01e9b196bc2ebd665decf9be3c768f196a73c5d8 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Mon, 23 Jun 2025 09:59:05 -0400 Subject: [PATCH 01/65] =?UTF-8?q?=F0=9F=A4=96=20feat:=20Streamline=20Endpo?= =?UTF-8?q?ints=20to=20Agent=20Framework=20(#8013)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor(buildEndpointOption): Improve error logging in middleware, consolidate `isAgents` builder logic, remove adding `modelsConfig` to `endpointOption` * refactor: parameter extraction and organization in agent services, minimize redundancy of shared fields across objects, make clear distinction of parameters processed uniquely by LibreChat vs LLM Provider Configs * refactor(createPayload): streamline all endpoints to agent route * fix: add `modelLabel` to response sender options for agent initialization * chore: correct log message context in EditController abort controller cleanup * chore: remove unused abortRequest hook * chore: remove unused addToCache module and its dependencies * refactor: remove AskController and related routes, update endpoint URLs (now all streamlined to agents route) * chore: remove unused bedrock route and its related imports * refactor: simplify response sender logic for Google endpoint * chore: add `modelDisplayLabel` handling for agents endpoint * feat: add file search capability to ephemeral agents, update code interpreter selection based of file upload, consolidate main upload menu for all endpoints * feat: implement useToolToggle hook for managing tool toggle state, refactor CodeInterpreter and WebSearch components to utilize new hook * feat: add ToolsDropdown component to BadgeRow for enhanced tool options * feat: introduce BadgeRowContext and BadgeRowProvider for managing conversation state, refactor related components to utilize context * feat: implement useMCPSelect hook for managing MCP selection state, refactor MCPSelect component to utilize new hook * feat: enhance BadgeRowContext with MCPSelect and tool toggle functionality, refactor related components to utilize updated context and hooks * refactor: streamline useToolToggle hook by integrating setEphemeralAgent directly into toggle logic and removing redundant setValue function * refactor: consolidate codeApiKeyForm and searchApiKeyForm from CodeInterpreter and WebSearch to utilize new context properties * refactor: update CheckboxButton to support controlled state and enhance ToolsDropdown with permission-based toggles for web search and code interpreter * refactor: conditionally render CheckboxButton in CodeInterpreter and WebSearch components for improved UI responsiveness * chore: add jotai dependency to package.json and package-lock.json * chore: update brace-expansion package to version 2.0.2 in package-lock.json due to CVE-2025-5889 * Revert "chore: add jotai dependency to package.json and package-lock.json" This reverts commit 69b699739671e740820f570ddb83d1caa0dbd6e4. * refactor: add pinning functionality to CodeInterpreter and WebSearch components, and enhance ToolsDropdown with pin toggle for web search and code interpreter * chore: move MCPIcon to correct location, remove duplicate * fix: update MCP import to use type-only import from librechat-data-provider * feat: implement MCPSubMenu component and integrate pinning functionality into ToolsDropdown * fix: cycling to submenu by using parent menu context * feat: add FileSearch component and integrate it into BadgeRow and ToolsDropdown * chore: import order * chore: remove agent specific logic that would block functionality for streamlined endpoints * chore: linting for `createContextHandlers` * chore: ensure ToolsDropdown doesn't show up for agents * chore: ensure tool resource is selected when dragged to UI * chore: update file search behavior to simulate legacy functionality * feat: ToolDialogs with multiple trigger references, add settings to tool dropdown * refactor: simplify web search and code interpreter settings checks * chore: simplify local storage key for pinned state in useToolToggle * refactor: reinstate agent check in AttachFileChat component, as individual providers will ahve different file configurations * ci: increase timeout for MongoDB connection in Agent tests --- api/app/clients/BaseClient.js | 6 +- .../clients/prompts/createContextHandlers.js | 34 +- api/models/Agent.js | 3 + api/models/Agent.spec.js | 14 +- api/server/controllers/AskController.js | 282 --------------- api/server/controllers/EditController.js | 2 +- api/server/index.js | 2 - api/server/middleware/buildEndpointOption.js | 23 +- api/server/routes/ask/addToCache.js | 63 ---- api/server/routes/ask/anthropic.js | 25 -- api/server/routes/ask/custom.js | 25 -- api/server/routes/ask/google.js | 24 -- api/server/routes/ask/gptPlugins.js | 241 ------------- api/server/routes/ask/index.js | 47 --- api/server/routes/ask/openAI.js | 27 -- api/server/routes/bedrock/chat.js | 37 -- api/server/routes/bedrock/index.js | 35 -- api/server/routes/index.js | 4 - api/server/services/Endpoints/agents/agent.js | 18 +- api/server/services/Endpoints/agents/build.js | 12 +- .../services/Endpoints/agents/initialize.js | 25 +- client/src/Providers/BadgeRowContext.tsx | 83 +++++ client/src/Providers/index.ts | 2 + client/src/components/Chat/Input/BadgeRow.tsx | 159 +++++---- .../components/Chat/Input/CodeInterpreter.tsx | 109 +----- .../src/components/Chat/Input/FileSearch.tsx | 28 ++ .../Chat/Input/Files/AttachFileChat.tsx | 23 +- .../Chat/Input/Files/AttachFileMenu.tsx | 19 +- .../Chat/Input/Files/DragDropModal.tsx | 2 +- .../src/components/Chat/Input/MCPSelect.tsx | 123 +------ .../src/components/Chat/Input/MCPSubMenu.tsx | 96 ++++++ .../src/components/Chat/Input/ToolDialogs.tsx | 66 ++++ .../components/Chat/Input/ToolsDropdown.tsx | 322 ++++++++++++++++++ .../src/components/Chat/Input/WebSearch.tsx | 109 +----- .../components/Chat/Menus/Endpoints/utils.ts | 2 +- .../SidePanel/Agents/Code/ApiKeyDialog.tsx | 11 +- .../components/SidePanel/Agents/MCPInput.tsx | 2 +- .../SidePanel/Agents/Search/ApiKeyDialog.tsx | 11 +- client/src/components/svg/MCPIcon.tsx | 32 +- client/src/components/svg/VectorIcon.tsx | 15 + client/src/components/svg/index.ts | 2 + client/src/components/ui/CheckboxButton.tsx | 21 +- client/src/components/ui/MCPIcon.tsx | 31 -- client/src/components/ui/OriginalDialog.tsx | 12 +- client/src/components/ui/index.ts | 1 - client/src/hooks/Files/useDragHelpers.ts | 33 +- client/src/hooks/Nav/useSideNavLinks.ts | 3 +- client/src/hooks/Plugins/index.ts | 2 + client/src/hooks/Plugins/useCodeApiKeyForm.ts | 6 +- client/src/hooks/Plugins/useMCPSelect.ts | 121 +++++++ .../src/hooks/Plugins/useSearchApiKeyForm.ts | 6 +- client/src/hooks/Plugins/useToolToggle.ts | 119 +++++++ client/src/hooks/SSE/useSSE.ts | 6 +- e2e/specs/messages.spec.ts | 4 +- package-lock.json | 41 ++- packages/api/src/utils/index.ts | 1 + packages/api/src/utils/llm.test.ts | 189 ++++++++++ packages/api/src/utils/llm.ts | 47 +++ packages/data-provider/src/api-endpoints.ts | 2 - packages/data-provider/src/config.ts | 20 +- packages/data-provider/src/createPayload.ts | 12 +- packages/data-provider/src/data-service.ts | 8 - .../data-provider/src/parameterSettings.ts | 10 +- packages/data-provider/src/parsers.ts | 6 +- .../src/react-query/react-query-service.ts | 17 - packages/data-provider/src/schemas.ts | 17 - packages/data-provider/src/types.ts | 1 + 67 files changed, 1468 insertions(+), 1433 deletions(-) delete mode 100644 api/server/controllers/AskController.js delete mode 100644 api/server/routes/ask/addToCache.js delete mode 100644 api/server/routes/ask/anthropic.js delete mode 100644 api/server/routes/ask/custom.js delete mode 100644 api/server/routes/ask/google.js delete mode 100644 api/server/routes/ask/gptPlugins.js delete mode 100644 api/server/routes/ask/index.js delete mode 100644 api/server/routes/ask/openAI.js delete mode 100644 api/server/routes/bedrock/chat.js delete mode 100644 api/server/routes/bedrock/index.js create mode 100644 client/src/Providers/BadgeRowContext.tsx create mode 100644 client/src/components/Chat/Input/FileSearch.tsx create mode 100644 client/src/components/Chat/Input/MCPSubMenu.tsx create mode 100644 client/src/components/Chat/Input/ToolDialogs.tsx create mode 100644 client/src/components/Chat/Input/ToolsDropdown.tsx create mode 100644 client/src/components/svg/VectorIcon.tsx delete mode 100644 client/src/components/ui/MCPIcon.tsx create mode 100644 client/src/hooks/Plugins/useMCPSelect.ts create mode 100644 client/src/hooks/Plugins/useToolToggle.ts create mode 100644 packages/api/src/utils/llm.test.ts create mode 100644 packages/api/src/utils/llm.ts diff --git a/api/app/clients/BaseClient.js b/api/app/clients/BaseClient.js index 55b8780180..c8f4228f10 100644 --- a/api/app/clients/BaseClient.js +++ b/api/app/clients/BaseClient.js @@ -792,7 +792,8 @@ class BaseClient { userMessage.tokenCount = userMessageTokenCount; /* - Note: `AskController` saves the user message, so we update the count of its `userMessage` reference + Note: `AgentController` saves the user message if not saved here + (noted by `savedMessageIds`), so we update the count of its `userMessage` reference */ if (typeof opts?.getReqData === 'function') { opts.getReqData({ @@ -801,7 +802,8 @@ class BaseClient { } /* Note: we update the user message to be sure it gets the calculated token count; - though `AskController` saves the user message, EditController does not + though `AgentController` saves the user message if not saved here + (noted by `savedMessageIds`), EditController does not */ await userMessagePromise; await this.updateMessageInDatabase({ diff --git a/api/app/clients/prompts/createContextHandlers.js b/api/app/clients/prompts/createContextHandlers.js index 4dcfaf68e4..57847bea3e 100644 --- a/api/app/clients/prompts/createContextHandlers.js +++ b/api/app/clients/prompts/createContextHandlers.js @@ -96,35 +96,35 @@ function createContextHandlers(req, userMessageContent) { resolvedQueries.length === 0 ? '\n\tThe semantic search did not return any results.' : resolvedQueries - .map((queryResult, index) => { - const file = processedFiles[index]; - let contextItems = queryResult.data; + .map((queryResult, index) => { + const file = processedFiles[index]; + let contextItems = queryResult.data; - const generateContext = (currentContext) => - ` + const generateContext = (currentContext) => + ` ${file.filename} ${currentContext} `; - if (useFullContext) { - return generateContext(`\n${contextItems}`); - } + if (useFullContext) { + return generateContext(`\n${contextItems}`); + } - contextItems = queryResult.data - .map((item) => { - const pageContent = item[0].page_content; - return ` + contextItems = queryResult.data + .map((item) => { + const pageContent = item[0].page_content; + return ` `; - }) - .join(''); + }) + .join(''); - return generateContext(contextItems); - }) - .join(''); + return generateContext(contextItems); + }) + .join(''); if (useFullContext) { const prompt = `${header} diff --git a/api/models/Agent.js b/api/models/Agent.js index d33ca8a8bf..04ba8b020e 100644 --- a/api/models/Agent.js +++ b/api/models/Agent.js @@ -70,6 +70,9 @@ const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _ if (ephemeralAgent?.execute_code === true) { tools.push(Tools.execute_code); } + if (ephemeralAgent?.file_search === true) { + tools.push(Tools.file_search); + } if (ephemeralAgent?.web_search === true) { tools.push(Tools.web_search); } diff --git a/api/models/Agent.spec.js b/api/models/Agent.spec.js index 0b0646f524..8953ae0482 100644 --- a/api/models/Agent.spec.js +++ b/api/models/Agent.spec.js @@ -43,7 +43,7 @@ describe('models/Agent', () => { const mongoUri = mongoServer.getUri(); Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema); await mongoose.connect(mongoUri); - }); + }, 20000); afterAll(async () => { await mongoose.disconnect(); @@ -413,7 +413,7 @@ describe('models/Agent', () => { const mongoUri = mongoServer.getUri(); Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema); await mongoose.connect(mongoUri); - }); + }, 20000); afterAll(async () => { await mongoose.disconnect(); @@ -670,7 +670,7 @@ describe('models/Agent', () => { const mongoUri = mongoServer.getUri(); Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema); await mongoose.connect(mongoUri); - }); + }, 20000); afterAll(async () => { await mongoose.disconnect(); @@ -1332,7 +1332,7 @@ describe('models/Agent', () => { const mongoUri = mongoServer.getUri(); Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema); await mongoose.connect(mongoUri); - }); + }, 20000); afterAll(async () => { await mongoose.disconnect(); @@ -1514,7 +1514,7 @@ describe('models/Agent', () => { const mongoUri = mongoServer.getUri(); Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema); await mongoose.connect(mongoUri); - }); + }, 20000); afterAll(async () => { await mongoose.disconnect(); @@ -1798,7 +1798,7 @@ describe('models/Agent', () => { const mongoUri = mongoServer.getUri(); Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema); await mongoose.connect(mongoUri); - }); + }, 20000); afterAll(async () => { await mongoose.disconnect(); @@ -2350,7 +2350,7 @@ describe('models/Agent', () => { const mongoUri = mongoServer.getUri(); Agent = mongoose.models.Agent || mongoose.model('Agent', agentSchema); await mongoose.connect(mongoUri); - }); + }, 20000); afterAll(async () => { await mongoose.disconnect(); diff --git a/api/server/controllers/AskController.js b/api/server/controllers/AskController.js deleted file mode 100644 index 40b209ef35..0000000000 --- a/api/server/controllers/AskController.js +++ /dev/null @@ -1,282 +0,0 @@ -const { getResponseSender, Constants } = require('librechat-data-provider'); -const { - handleAbortError, - createAbortController, - cleanupAbortController, -} = require('~/server/middleware'); -const { - disposeClient, - processReqData, - clientRegistry, - requestDataMap, -} = require('~/server/cleanup'); -const { sendMessage, createOnProgress } = require('~/server/utils'); -const { saveMessage } = require('~/models'); -const { logger } = require('~/config'); - -const AskController = async (req, res, next, initializeClient, addTitle) => { - let { - text, - endpointOption, - conversationId, - modelDisplayLabel, - parentMessageId = null, - overrideParentMessageId = null, - } = req.body; - - let client = null; - let abortKey = null; - let cleanupHandlers = []; - let clientRef = null; - - logger.debug('[AskController]', { - text, - conversationId, - ...endpointOption, - modelsConfig: endpointOption?.modelsConfig ? 'exists' : '', - }); - - let userMessage = null; - let userMessagePromise = null; - let promptTokens = null; - let userMessageId = null; - let responseMessageId = null; - let getAbortData = null; - - const sender = getResponseSender({ - ...endpointOption, - model: endpointOption.modelOptions.model, - modelDisplayLabel, - }); - const initialConversationId = conversationId; - const newConvo = !initialConversationId; - const userId = req.user.id; - - let reqDataContext = { - userMessage, - userMessagePromise, - responseMessageId, - promptTokens, - conversationId, - userMessageId, - }; - - const updateReqData = (data = {}) => { - reqDataContext = processReqData(data, reqDataContext); - abortKey = reqDataContext.abortKey; - userMessage = reqDataContext.userMessage; - userMessagePromise = reqDataContext.userMessagePromise; - responseMessageId = reqDataContext.responseMessageId; - promptTokens = reqDataContext.promptTokens; - conversationId = reqDataContext.conversationId; - userMessageId = reqDataContext.userMessageId; - }; - - let { onProgress: progressCallback, getPartialText } = createOnProgress(); - - const performCleanup = () => { - logger.debug('[AskController] Performing cleanup'); - if (Array.isArray(cleanupHandlers)) { - for (const handler of cleanupHandlers) { - try { - if (typeof handler === 'function') { - handler(); - } - } catch (e) { - // Ignore - } - } - } - - if (abortKey) { - logger.debug('[AskController] Cleaning up abort controller'); - cleanupAbortController(abortKey); - abortKey = null; - } - - if (client) { - disposeClient(client); - client = null; - } - - reqDataContext = null; - userMessage = null; - userMessagePromise = null; - promptTokens = null; - getAbortData = null; - progressCallback = null; - endpointOption = null; - cleanupHandlers = null; - addTitle = null; - - if (requestDataMap.has(req)) { - requestDataMap.delete(req); - } - logger.debug('[AskController] Cleanup completed'); - }; - - try { - ({ client } = await initializeClient({ req, res, endpointOption })); - if (clientRegistry && client) { - clientRegistry.register(client, { userId }, client); - } - - if (client) { - requestDataMap.set(req, { client }); - } - - clientRef = new WeakRef(client); - - getAbortData = () => { - const currentClient = clientRef?.deref(); - const currentText = - currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText(); - - return { - sender, - conversationId, - messageId: reqDataContext.responseMessageId, - parentMessageId: overrideParentMessageId ?? userMessageId, - text: currentText, - userMessage: userMessage, - userMessagePromise: userMessagePromise, - promptTokens: reqDataContext.promptTokens, - }; - }; - - const { onStart, abortController } = createAbortController( - req, - res, - getAbortData, - updateReqData, - ); - - const closeHandler = () => { - logger.debug('[AskController] Request closed'); - if (!abortController || abortController.signal.aborted || abortController.requestCompleted) { - return; - } - abortController.abort(); - logger.debug('[AskController] Request aborted on close'); - }; - - res.on('close', closeHandler); - cleanupHandlers.push(() => { - try { - res.removeListener('close', closeHandler); - } catch (e) { - // Ignore - } - }); - - const messageOptions = { - user: userId, - parentMessageId, - conversationId: reqDataContext.conversationId, - overrideParentMessageId, - getReqData: updateReqData, - onStart, - abortController, - progressCallback, - progressOptions: { - res, - }, - }; - - /** @type {TMessage} */ - let response = await client.sendMessage(text, messageOptions); - response.endpoint = endpointOption.endpoint; - - const databasePromise = response.databasePromise; - delete response.databasePromise; - - const { conversation: convoData = {} } = await databasePromise; - const conversation = { ...convoData }; - conversation.title = - conversation && !conversation.title ? null : conversation?.title || 'New Chat'; - - const latestUserMessage = reqDataContext.userMessage; - - if (client?.options?.attachments && latestUserMessage) { - latestUserMessage.files = client.options.attachments; - if (endpointOption?.modelOptions?.model) { - conversation.model = endpointOption.modelOptions.model; - } - delete latestUserMessage.image_urls; - } - - if (!abortController.signal.aborted) { - const finalResponseMessage = { ...response }; - - sendMessage(res, { - final: true, - conversation, - title: conversation.title, - requestMessage: latestUserMessage, - responseMessage: finalResponseMessage, - }); - res.end(); - - if (client?.savedMessageIds && !client.savedMessageIds.has(response.messageId)) { - await saveMessage( - req, - { ...finalResponseMessage, user: userId }, - { context: 'api/server/controllers/AskController.js - response end' }, - ); - } - } - - if (!client?.skipSaveUserMessage && latestUserMessage) { - await saveMessage(req, latestUserMessage, { - context: "api/server/controllers/AskController.js - don't skip saving user message", - }); - } - - if (typeof addTitle === 'function' && parentMessageId === Constants.NO_PARENT && newConvo) { - addTitle(req, { - text, - response: { ...response }, - client, - }) - .then(() => { - logger.debug('[AskController] Title generation started'); - }) - .catch((err) => { - logger.error('[AskController] Error in title generation', err); - }) - .finally(() => { - logger.debug('[AskController] Title generation completed'); - performCleanup(); - }); - } else { - performCleanup(); - } - } catch (error) { - logger.error('[AskController] Error handling request', error); - let partialText = ''; - try { - const currentClient = clientRef?.deref(); - partialText = - currentClient?.getStreamText != null ? currentClient.getStreamText() : getPartialText(); - } catch (getTextError) { - logger.error('[AskController] Error calling getText() during error handling', getTextError); - } - - handleAbortError(res, req, error, { - sender, - partialText, - conversationId: reqDataContext.conversationId, - messageId: reqDataContext.responseMessageId, - parentMessageId: overrideParentMessageId ?? reqDataContext.userMessageId ?? parentMessageId, - userMessageId: reqDataContext.userMessageId, - }) - .catch((err) => { - logger.error('[AskController] Error in `handleAbortError` during catch block', err); - }) - .finally(() => { - performCleanup(); - }); - } -}; - -module.exports = AskController; diff --git a/api/server/controllers/EditController.js b/api/server/controllers/EditController.js index d142d474df..574111abf9 100644 --- a/api/server/controllers/EditController.js +++ b/api/server/controllers/EditController.js @@ -84,7 +84,7 @@ const EditController = async (req, res, next, initializeClient) => { } if (abortKey) { - logger.debug('[AskController] Cleaning up abort controller'); + logger.debug('[EditController] Cleaning up abort controller'); cleanupAbortController(abortKey); abortKey = null; } diff --git a/api/server/index.js b/api/server/index.js index 8c7db3e226..ac79a627e9 100644 --- a/api/server/index.js +++ b/api/server/index.js @@ -97,7 +97,6 @@ const startServer = async () => { app.use('/api/actions', routes.actions); app.use('/api/keys', routes.keys); app.use('/api/user', routes.user); - app.use('/api/ask', routes.ask); app.use('/api/search', routes.search); app.use('/api/edit', routes.edit); app.use('/api/messages', routes.messages); @@ -118,7 +117,6 @@ const startServer = async () => { app.use('/api/roles', routes.roles); app.use('/api/agents', routes.agents); app.use('/api/banner', routes.banner); - app.use('/api/bedrock', routes.bedrock); app.use('/api/memories', routes.memories); app.use('/api/tags', routes.tags); app.use('/api/mcp', routes.mcp); diff --git a/api/server/middleware/buildEndpointOption.js b/api/server/middleware/buildEndpointOption.js index 8394223b5e..f3138bf6e9 100644 --- a/api/server/middleware/buildEndpointOption.js +++ b/api/server/middleware/buildEndpointOption.js @@ -1,11 +1,11 @@ +const { logger } = require('@librechat/data-schemas'); const { - parseCompactConvo, + EndpointURLs, EModelEndpoint, isAgentsEndpoint, - EndpointURLs, + parseCompactConvo, } = require('librechat-data-provider'); const azureAssistants = require('~/server/services/Endpoints/azureAssistants'); -const { getModelsConfig } = require('~/server/controllers/ModelController'); const assistants = require('~/server/services/Endpoints/assistants'); const gptPlugins = require('~/server/services/Endpoints/gptPlugins'); const { processFiles } = require('~/server/services/Files/process'); @@ -36,6 +36,9 @@ async function buildEndpointOption(req, res, next) { try { parsedBody = parseCompactConvo({ endpoint, endpointType, conversation: req.body }); } catch (error) { + logger.warn( + `Error parsing conversation for endpoint ${endpoint}${error?.message ? `: ${error.message}` : ''}`, + ); return handleError(res, { text: 'Error parsing conversation' }); } @@ -77,6 +80,7 @@ async function buildEndpointOption(req, res, next) { conversation: currentModelSpec.preset, }); } catch (error) { + logger.error(`Error parsing model spec for endpoint ${endpoint}`, error); return handleError(res, { text: 'Error parsing model spec' }); } } @@ -84,20 +88,23 @@ async function buildEndpointOption(req, res, next) { try { const isAgents = isAgentsEndpoint(endpoint) || req.baseUrl.startsWith(EndpointURLs[EModelEndpoint.agents]); - const endpointFn = buildFunction[isAgents ? EModelEndpoint.agents : (endpointType ?? endpoint)]; - const builder = isAgents ? (...args) => endpointFn(req, ...args) : endpointFn; + const builder = isAgents + ? (...args) => buildFunction[EModelEndpoint.agents](req, ...args) + : buildFunction[endpointType ?? endpoint]; // TODO: use object params req.body.endpointOption = await builder(endpoint, parsedBody, endpointType); - // TODO: use `getModelsConfig` only when necessary - const modelsConfig = await getModelsConfig(req); - req.body.endpointOption.modelsConfig = modelsConfig; if (req.body.files && !isAgents) { req.body.endpointOption.attachments = processFiles(req.body.files); } + next(); } catch (error) { + logger.error( + `Error building endpoint option for endpoint ${endpoint} with type ${endpointType}`, + error, + ); return handleError(res, { text: 'Error building endpoint option' }); } } diff --git a/api/server/routes/ask/addToCache.js b/api/server/routes/ask/addToCache.js deleted file mode 100644 index a2f427098f..0000000000 --- a/api/server/routes/ask/addToCache.js +++ /dev/null @@ -1,63 +0,0 @@ -const { Keyv } = require('keyv'); -const { KeyvFile } = require('keyv-file'); -const { logger } = require('~/config'); - -const addToCache = async ({ endpoint, endpointOption, userMessage, responseMessage }) => { - try { - const conversationsCache = new Keyv({ - store: new KeyvFile({ filename: './data/cache.json' }), - namespace: 'chatgpt', // should be 'bing' for bing/sydney - }); - - const { - conversationId, - messageId: userMessageId, - parentMessageId: userParentMessageId, - text: userText, - } = userMessage; - const { - messageId: responseMessageId, - parentMessageId: responseParentMessageId, - text: responseText, - } = responseMessage; - - let conversation = await conversationsCache.get(conversationId); - // used to generate a title for the conversation if none exists - // let isNewConversation = false; - if (!conversation) { - conversation = { - messages: [], - createdAt: Date.now(), - }; - // isNewConversation = true; - } - - const roles = (options) => { - if (endpoint === 'openAI') { - return options?.chatGptLabel || 'ChatGPT'; - } - }; - - let _userMessage = { - id: userMessageId, - parentMessageId: userParentMessageId, - role: 'User', - message: userText, - }; - - let _responseMessage = { - id: responseMessageId, - parentMessageId: responseParentMessageId, - role: roles(endpointOption), - message: responseText, - }; - - conversation.messages.push(_userMessage, _responseMessage); - - await conversationsCache.set(conversationId, conversation); - } catch (error) { - logger.error('[addToCache] Error adding conversation to cache', error); - } -}; - -module.exports = addToCache; diff --git a/api/server/routes/ask/anthropic.js b/api/server/routes/ask/anthropic.js deleted file mode 100644 index afe1720d84..0000000000 --- a/api/server/routes/ask/anthropic.js +++ /dev/null @@ -1,25 +0,0 @@ -const express = require('express'); -const AskController = require('~/server/controllers/AskController'); -const { addTitle, initializeClient } = require('~/server/services/Endpoints/anthropic'); -const { - setHeaders, - handleAbort, - validateModel, - validateEndpoint, - buildEndpointOption, -} = require('~/server/middleware'); - -const router = express.Router(); - -router.post( - '/', - validateEndpoint, - validateModel, - buildEndpointOption, - setHeaders, - async (req, res, next) => { - await AskController(req, res, next, initializeClient, addTitle); - }, -); - -module.exports = router; diff --git a/api/server/routes/ask/custom.js b/api/server/routes/ask/custom.js deleted file mode 100644 index 8fc343cf17..0000000000 --- a/api/server/routes/ask/custom.js +++ /dev/null @@ -1,25 +0,0 @@ -const express = require('express'); -const AskController = require('~/server/controllers/AskController'); -const { initializeClient } = require('~/server/services/Endpoints/custom'); -const { addTitle } = require('~/server/services/Endpoints/openAI'); -const { - setHeaders, - validateModel, - validateEndpoint, - buildEndpointOption, -} = require('~/server/middleware'); - -const router = express.Router(); - -router.post( - '/', - validateEndpoint, - validateModel, - buildEndpointOption, - setHeaders, - async (req, res, next) => { - await AskController(req, res, next, initializeClient, addTitle); - }, -); - -module.exports = router; diff --git a/api/server/routes/ask/google.js b/api/server/routes/ask/google.js deleted file mode 100644 index 16c7e265f4..0000000000 --- a/api/server/routes/ask/google.js +++ /dev/null @@ -1,24 +0,0 @@ -const express = require('express'); -const AskController = require('~/server/controllers/AskController'); -const { initializeClient, addTitle } = require('~/server/services/Endpoints/google'); -const { - setHeaders, - validateModel, - validateEndpoint, - buildEndpointOption, -} = require('~/server/middleware'); - -const router = express.Router(); - -router.post( - '/', - validateEndpoint, - validateModel, - buildEndpointOption, - setHeaders, - async (req, res, next) => { - await AskController(req, res, next, initializeClient, addTitle); - }, -); - -module.exports = router; diff --git a/api/server/routes/ask/gptPlugins.js b/api/server/routes/ask/gptPlugins.js deleted file mode 100644 index a40022848a..0000000000 --- a/api/server/routes/ask/gptPlugins.js +++ /dev/null @@ -1,241 +0,0 @@ -const express = require('express'); -const { getResponseSender, Constants } = require('librechat-data-provider'); -const { initializeClient } = require('~/server/services/Endpoints/gptPlugins'); -const { sendMessage, createOnProgress } = require('~/server/utils'); -const { addTitle } = require('~/server/services/Endpoints/openAI'); -const { saveMessage, updateMessage } = require('~/models'); -const { - handleAbort, - createAbortController, - handleAbortError, - setHeaders, - validateModel, - validateEndpoint, - buildEndpointOption, - moderateText, -} = require('~/server/middleware'); -const { validateTools } = require('~/app'); -const { logger } = require('~/config'); - -const router = express.Router(); - -router.use(moderateText); - -router.post( - '/', - validateEndpoint, - validateModel, - buildEndpointOption, - setHeaders, - async (req, res) => { - let { - text, - endpointOption, - conversationId, - parentMessageId = null, - overrideParentMessageId = null, - } = req.body; - - logger.debug('[/ask/gptPlugins]', { text, conversationId, ...endpointOption }); - - let userMessage; - let userMessagePromise; - let promptTokens; - let userMessageId; - let responseMessageId; - const sender = getResponseSender({ - ...endpointOption, - model: endpointOption.modelOptions.model, - }); - const newConvo = !conversationId; - const user = req.user.id; - - const plugins = []; - - const getReqData = (data = {}) => { - for (let key in data) { - if (key === 'userMessage') { - userMessage = data[key]; - userMessageId = data[key].messageId; - } else if (key === 'userMessagePromise') { - userMessagePromise = data[key]; - } else if (key === 'responseMessageId') { - responseMessageId = data[key]; - } else if (key === 'promptTokens') { - promptTokens = data[key]; - } else if (!conversationId && key === 'conversationId') { - conversationId = data[key]; - } - } - }; - - let streaming = null; - let timer = null; - - const { - onProgress: progressCallback, - sendIntermediateMessage, - getPartialText, - } = createOnProgress({ - onProgress: () => { - if (timer) { - clearTimeout(timer); - } - - streaming = new Promise((resolve) => { - timer = setTimeout(() => { - resolve(); - }, 250); - }); - }, - }); - - const pluginMap = new Map(); - const onAgentAction = async (action, runId) => { - pluginMap.set(runId, action.tool); - sendIntermediateMessage(res, { - plugins, - parentMessageId: userMessage.messageId, - messageId: responseMessageId, - }); - }; - - const onToolStart = async (tool, input, runId, parentRunId) => { - const pluginName = pluginMap.get(parentRunId); - const latestPlugin = { - runId, - loading: true, - inputs: [input], - latest: pluginName, - outputs: null, - }; - - if (streaming) { - await streaming; - } - const extraTokens = ':::plugin:::\n'; - plugins.push(latestPlugin); - sendIntermediateMessage( - res, - { plugins, parentMessageId: userMessage.messageId, messageId: responseMessageId }, - extraTokens, - ); - }; - - const onToolEnd = async (output, runId) => { - if (streaming) { - await streaming; - } - - const pluginIndex = plugins.findIndex((plugin) => plugin.runId === runId); - - if (pluginIndex !== -1) { - plugins[pluginIndex].loading = false; - plugins[pluginIndex].outputs = output; - } - }; - - const getAbortData = () => ({ - sender, - conversationId, - userMessagePromise, - messageId: responseMessageId, - parentMessageId: overrideParentMessageId ?? userMessageId, - text: getPartialText(), - plugins: plugins.map((p) => ({ ...p, loading: false })), - userMessage, - promptTokens, - }); - const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData); - - try { - endpointOption.tools = await validateTools(user, endpointOption.tools); - const { client } = await initializeClient({ req, res, endpointOption }); - - const onChainEnd = () => { - if (!client.skipSaveUserMessage) { - saveMessage( - req, - { ...userMessage, user }, - { context: 'api/server/routes/ask/gptPlugins.js - onChainEnd' }, - ); - } - sendIntermediateMessage(res, { - plugins, - parentMessageId: userMessage.messageId, - messageId: responseMessageId, - }); - }; - - let response = await client.sendMessage(text, { - user, - conversationId, - parentMessageId, - overrideParentMessageId, - getReqData, - onAgentAction, - onChainEnd, - onToolStart, - onToolEnd, - onStart, - getPartialText, - ...endpointOption, - progressCallback, - progressOptions: { - res, - // parentMessageId: overrideParentMessageId || userMessageId, - plugins, - }, - abortController, - }); - - if (overrideParentMessageId) { - response.parentMessageId = overrideParentMessageId; - } - - logger.debug('[/ask/gptPlugins]', response); - - const { conversation = {} } = await response.databasePromise; - delete response.databasePromise; - conversation.title = - conversation && !conversation.title ? null : conversation?.title || 'New Chat'; - - sendMessage(res, { - title: conversation.title, - final: true, - conversation, - requestMessage: userMessage, - responseMessage: response, - }); - res.end(); - - if (parentMessageId === Constants.NO_PARENT && newConvo) { - addTitle(req, { - text, - response, - client, - }); - } - - response.plugins = plugins.map((p) => ({ ...p, loading: false })); - if (response.plugins?.length > 0) { - await updateMessage( - req, - { ...response, user }, - { context: 'api/server/routes/ask/gptPlugins.js - save plugins used' }, - ); - } - } catch (error) { - const partialText = getPartialText(); - handleAbortError(res, req, error, { - partialText, - conversationId, - sender, - messageId: responseMessageId, - parentMessageId: userMessageId ?? parentMessageId, - }); - } - }, -); - -module.exports = router; diff --git a/api/server/routes/ask/index.js b/api/server/routes/ask/index.js deleted file mode 100644 index 525bd8e29d..0000000000 --- a/api/server/routes/ask/index.js +++ /dev/null @@ -1,47 +0,0 @@ -const express = require('express'); -const { EModelEndpoint } = require('librechat-data-provider'); -const { - uaParser, - checkBan, - requireJwtAuth, - messageIpLimiter, - concurrentLimiter, - messageUserLimiter, - validateConvoAccess, -} = require('~/server/middleware'); -const { isEnabled } = require('~/server/utils'); -const gptPlugins = require('./gptPlugins'); -const anthropic = require('./anthropic'); -const custom = require('./custom'); -const google = require('./google'); -const openAI = require('./openAI'); - -const { LIMIT_CONCURRENT_MESSAGES, LIMIT_MESSAGE_IP, LIMIT_MESSAGE_USER } = process.env ?? {}; - -const router = express.Router(); - -router.use(requireJwtAuth); -router.use(checkBan); -router.use(uaParser); - -if (isEnabled(LIMIT_CONCURRENT_MESSAGES)) { - router.use(concurrentLimiter); -} - -if (isEnabled(LIMIT_MESSAGE_IP)) { - router.use(messageIpLimiter); -} - -if (isEnabled(LIMIT_MESSAGE_USER)) { - router.use(messageUserLimiter); -} - -router.use(validateConvoAccess); - -router.use([`/${EModelEndpoint.azureOpenAI}`, `/${EModelEndpoint.openAI}`], openAI); -router.use(`/${EModelEndpoint.gptPlugins}`, gptPlugins); -router.use(`/${EModelEndpoint.anthropic}`, anthropic); -router.use(`/${EModelEndpoint.google}`, google); -router.use(`/${EModelEndpoint.custom}`, custom); - -module.exports = router; diff --git a/api/server/routes/ask/openAI.js b/api/server/routes/ask/openAI.js deleted file mode 100644 index dadf00def4..0000000000 --- a/api/server/routes/ask/openAI.js +++ /dev/null @@ -1,27 +0,0 @@ -const express = require('express'); -const AskController = require('~/server/controllers/AskController'); -const { addTitle, initializeClient } = require('~/server/services/Endpoints/openAI'); -const { - handleAbort, - setHeaders, - validateModel, - validateEndpoint, - buildEndpointOption, - moderateText, -} = require('~/server/middleware'); - -const router = express.Router(); -router.use(moderateText); - -router.post( - '/', - validateEndpoint, - validateModel, - buildEndpointOption, - setHeaders, - async (req, res, next) => { - await AskController(req, res, next, initializeClient, addTitle); - }, -); - -module.exports = router; diff --git a/api/server/routes/bedrock/chat.js b/api/server/routes/bedrock/chat.js deleted file mode 100644 index 263ca96002..0000000000 --- a/api/server/routes/bedrock/chat.js +++ /dev/null @@ -1,37 +0,0 @@ -const express = require('express'); - -const router = express.Router(); -const { - setHeaders, - handleAbort, - moderateText, - // validateModel, - // validateEndpoint, - buildEndpointOption, -} = require('~/server/middleware'); -const { initializeClient } = require('~/server/services/Endpoints/bedrock'); -const AgentController = require('~/server/controllers/agents/request'); -const addTitle = require('~/server/services/Endpoints/agents/title'); - -router.use(moderateText); - -/** - * @route POST / - * @desc Chat with an assistant - * @access Public - * @param {express.Request} req - The request object, containing the request data. - * @param {express.Response} res - The response object, used to send back a response. - * @returns {void} - */ -router.post( - '/', - // validateModel, - // validateEndpoint, - buildEndpointOption, - setHeaders, - async (req, res, next) => { - await AgentController(req, res, next, initializeClient, addTitle); - }, -); - -module.exports = router; diff --git a/api/server/routes/bedrock/index.js b/api/server/routes/bedrock/index.js deleted file mode 100644 index ce440a7c0e..0000000000 --- a/api/server/routes/bedrock/index.js +++ /dev/null @@ -1,35 +0,0 @@ -const express = require('express'); -const { - uaParser, - checkBan, - requireJwtAuth, - messageIpLimiter, - concurrentLimiter, - messageUserLimiter, -} = require('~/server/middleware'); -const { isEnabled } = require('~/server/utils'); -const chat = require('./chat'); - -const { LIMIT_CONCURRENT_MESSAGES, LIMIT_MESSAGE_IP, LIMIT_MESSAGE_USER } = process.env ?? {}; - -const router = express.Router(); - -router.use(requireJwtAuth); -router.use(checkBan); -router.use(uaParser); - -if (isEnabled(LIMIT_CONCURRENT_MESSAGES)) { - router.use(concurrentLimiter); -} - -if (isEnabled(LIMIT_MESSAGE_IP)) { - router.use(messageIpLimiter); -} - -if (isEnabled(LIMIT_MESSAGE_USER)) { - router.use(messageUserLimiter); -} - -router.use('/chat', chat); - -module.exports = router; diff --git a/api/server/routes/index.js b/api/server/routes/index.js index 7c1b5de0fa..ec97ba3986 100644 --- a/api/server/routes/index.js +++ b/api/server/routes/index.js @@ -9,7 +9,6 @@ const presets = require('./presets'); const prompts = require('./prompts'); const balance = require('./balance'); const plugins = require('./plugins'); -const bedrock = require('./bedrock'); const actions = require('./actions'); const banner = require('./banner'); const search = require('./search'); @@ -26,11 +25,9 @@ const auth = require('./auth'); const edit = require('./edit'); const keys = require('./keys'); const user = require('./user'); -const ask = require('./ask'); const mcp = require('./mcp'); module.exports = { - ask, edit, auth, keys, @@ -46,7 +43,6 @@ module.exports = { search, config, models, - bedrock, prompts, plugins, actions, diff --git a/api/server/services/Endpoints/agents/agent.js b/api/server/services/Endpoints/agents/agent.js index e135401467..506670ecad 100644 --- a/api/server/services/Endpoints/agents/agent.js +++ b/api/server/services/Endpoints/agents/agent.js @@ -1,5 +1,9 @@ const { Providers } = require('@librechat/agents'); -const { primeResources, optionalChainWithEmptyCheck } = require('@librechat/api'); +const { + primeResources, + extractLibreChatParams, + optionalChainWithEmptyCheck, +} = require('@librechat/api'); const { ErrorTypes, EModelEndpoint, @@ -15,10 +19,9 @@ const initGoogle = require('~/server/services/Endpoints/google/initialize'); const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts'); const { getCustomEndpointConfig } = require('~/server/services/Config'); const { processFiles } = require('~/server/services/Files/process'); +const { getFiles, getToolFilesByIds } = require('~/models/File'); const { getConvoFiles } = require('~/models/Conversation'); -const { getToolFilesByIds } = require('~/models/File'); const { getModelMaxTokens } = require('~/utils'); -const { getFiles } = require('~/models/File'); const providerConfigMap = { [Providers.XAI]: initCustom, @@ -71,7 +74,7 @@ const initializeAgent = async ({ ), ); - const { resendFiles = true, ...modelOptions } = _modelOptions; + const { resendFiles, maxContextTokens, modelOptions } = extractLibreChatParams(_modelOptions); if (isInitialAgent && conversationId != null && resendFiles) { const fileIds = (await getConvoFiles(conversationId)) ?? []; @@ -145,9 +148,8 @@ const initializeAgent = async ({ modelOptions.maxTokens, 0, ); - const maxContextTokens = optionalChainWithEmptyCheck( - modelOptions.maxContextTokens, - modelOptions.max_context_tokens, + const agentMaxContextTokens = optionalChainWithEmptyCheck( + maxContextTokens, getModelMaxTokens(tokensModel, providerEndpointMap[provider]), 4096, ); @@ -189,7 +191,7 @@ const initializeAgent = async ({ attachments, resendFiles, toolContextMap, - maxContextTokens: (maxContextTokens - maxTokens) * 0.9, + maxContextTokens: (agentMaxContextTokens - maxTokens) * 0.9, }; }; diff --git a/api/server/services/Endpoints/agents/build.js b/api/server/services/Endpoints/agents/build.js index 77ebbc58dc..143dde9459 100644 --- a/api/server/services/Endpoints/agents/build.js +++ b/api/server/services/Endpoints/agents/build.js @@ -1,10 +1,9 @@ -const { isAgentsEndpoint, Constants } = require('librechat-data-provider'); +const { isAgentsEndpoint, removeNullishValues, Constants } = require('librechat-data-provider'); const { loadAgent } = require('~/models/Agent'); const { logger } = require('~/config'); const buildOptions = (req, endpoint, parsedBody, endpointType) => { - const { spec, iconURL, agent_id, instructions, maxContextTokens, ...model_parameters } = - parsedBody; + const { spec, iconURL, agent_id, instructions, ...model_parameters } = parsedBody; const agentPromise = loadAgent({ req, agent_id: isAgentsEndpoint(endpoint) ? agent_id : Constants.EPHEMERAL_AGENT_ID, @@ -15,19 +14,16 @@ const buildOptions = (req, endpoint, parsedBody, endpointType) => { return undefined; }); - const endpointOption = { + return removeNullishValues({ spec, iconURL, endpoint, agent_id, endpointType, instructions, - maxContextTokens, model_parameters, agent: agentPromise, - }; - - return endpointOption; + }); }; module.exports = { buildOptions }; diff --git a/api/server/services/Endpoints/agents/initialize.js b/api/server/services/Endpoints/agents/initialize.js index e4ffcf4730..94af3bdd3b 100644 --- a/api/server/services/Endpoints/agents/initialize.js +++ b/api/server/services/Endpoints/agents/initialize.js @@ -1,11 +1,17 @@ const { logger } = require('@librechat/data-schemas'); const { createContentAggregator } = require('@librechat/agents'); -const { Constants, EModelEndpoint, getResponseSender } = require('librechat-data-provider'); const { - getDefaultHandlers, + Constants, + EModelEndpoint, + isAgentsEndpoint, + getResponseSender, +} = require('librechat-data-provider'); +const { createToolEndCallback, + getDefaultHandlers, } = require('~/server/controllers/agents/callbacks'); const { initializeAgent } = require('~/server/services/Endpoints/agents/agent'); +const { getCustomEndpointConfig } = require('~/server/services/Config'); const { loadAgentTools } = require('~/server/services/ToolService'); const AgentClient = require('~/server/controllers/agents/client'); const { getAgent } = require('~/models/Agent'); @@ -61,6 +67,7 @@ const initializeClient = async ({ req, res, endpointOption }) => { } const primaryAgent = await endpointOption.agent; + delete endpointOption.agent; if (!primaryAgent) { throw new Error('Agent not found'); } @@ -108,11 +115,25 @@ const initializeClient = async ({ req, res, endpointOption }) => { } } + let endpointConfig = req.app.locals[primaryConfig.endpoint]; + if (!isAgentsEndpoint(primaryConfig.endpoint) && !endpointConfig) { + try { + endpointConfig = await getCustomEndpointConfig(primaryConfig.endpoint); + } catch (err) { + logger.error( + '[api/server/controllers/agents/client.js #titleConvo] Error getting custom endpoint config', + err, + ); + } + } + const sender = primaryAgent.name ?? getResponseSender({ ...endpointOption, model: endpointOption.model_parameters.model, + modelDisplayLabel: endpointConfig?.modelDisplayLabel, + modelLabel: endpointOption.model_parameters.modelLabel, }); const client = new AgentClient({ diff --git a/client/src/Providers/BadgeRowContext.tsx b/client/src/Providers/BadgeRowContext.tsx new file mode 100644 index 0000000000..860c59da46 --- /dev/null +++ b/client/src/Providers/BadgeRowContext.tsx @@ -0,0 +1,83 @@ +import React, { createContext, useContext } from 'react'; +import { Tools, LocalStorageKeys } from 'librechat-data-provider'; +import { useMCPSelect, useToolToggle, useCodeApiKeyForm, useSearchApiKeyForm } from '~/hooks'; + +interface BadgeRowContextType { + conversationId?: string | null; + mcpSelect: ReturnType; + webSearch: ReturnType; + codeInterpreter: ReturnType; + fileSearch: ReturnType; + codeApiKeyForm: ReturnType; + searchApiKeyForm: ReturnType; +} + +const BadgeRowContext = createContext(undefined); + +export function useBadgeRowContext() { + const context = useContext(BadgeRowContext); + if (context === undefined) { + throw new Error('useBadgeRowContext must be used within a BadgeRowProvider'); + } + return context; +} + +interface BadgeRowProviderProps { + children: React.ReactNode; + conversationId?: string | null; +} + +export default function BadgeRowProvider({ children, conversationId }: BadgeRowProviderProps) { + /** MCPSelect hook */ + const mcpSelect = useMCPSelect({ conversationId }); + + /** CodeInterpreter hooks */ + const codeApiKeyForm = useCodeApiKeyForm({}); + const { setIsDialogOpen: setCodeDialogOpen } = codeApiKeyForm; + + const codeInterpreter = useToolToggle({ + conversationId, + setIsDialogOpen: setCodeDialogOpen, + toolKey: Tools.execute_code, + localStorageKey: LocalStorageKeys.LAST_CODE_TOGGLE_, + authConfig: { + toolId: Tools.execute_code, + queryOptions: { retry: 1 }, + }, + }); + + /** WebSearch hooks */ + const searchApiKeyForm = useSearchApiKeyForm({}); + const { setIsDialogOpen: setWebSearchDialogOpen } = searchApiKeyForm; + + const webSearch = useToolToggle({ + conversationId, + toolKey: Tools.web_search, + localStorageKey: LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_, + setIsDialogOpen: setWebSearchDialogOpen, + authConfig: { + toolId: Tools.web_search, + queryOptions: { retry: 1 }, + }, + }); + + /** FileSearch hook */ + const fileSearch = useToolToggle({ + conversationId, + toolKey: Tools.file_search, + localStorageKey: LocalStorageKeys.LAST_FILE_SEARCH_TOGGLE_, + isAuthenticated: true, + }); + + const value: BadgeRowContextType = { + mcpSelect, + webSearch, + fileSearch, + conversationId, + codeApiKeyForm, + codeInterpreter, + searchApiKeyForm, + }; + + return {children}; +} diff --git a/client/src/Providers/index.ts b/client/src/Providers/index.ts index 41c9cdceb3..8809532b49 100644 --- a/client/src/Providers/index.ts +++ b/client/src/Providers/index.ts @@ -22,3 +22,5 @@ export * from './CodeBlockContext'; export * from './ToolCallsMapContext'; export * from './SetConvoContext'; export * from './SearchContext'; +export * from './BadgeRowContext'; +export { default as BadgeRowProvider } from './BadgeRowContext'; diff --git a/client/src/components/Chat/Input/BadgeRow.tsx b/client/src/components/Chat/Input/BadgeRow.tsx index ed9f4b82c2..14f98b4521 100644 --- a/client/src/components/Chat/Input/BadgeRow.tsx +++ b/client/src/components/Chat/Input/BadgeRow.tsx @@ -1,19 +1,23 @@ import React, { memo, - useState, useRef, - useEffect, - useCallback, useMemo, + useState, + useEffect, forwardRef, useReducer, + useCallback, } from 'react'; import { useRecoilValue, useRecoilCallback } from 'recoil'; import type { LucideIcon } from 'lucide-react'; import CodeInterpreter from './CodeInterpreter'; +import { BadgeRowProvider } from '~/Providers'; +import ToolsDropdown from './ToolsDropdown'; import type { BadgeItem } from '~/common'; import { useChatBadges } from '~/hooks'; import { Badge } from '~/components/ui'; +import ToolDialogs from './ToolDialogs'; +import FileSearch from './FileSearch'; import MCPSelect from './MCPSelect'; import WebSearch from './WebSearch'; import store from '~/store'; @@ -313,78 +317,83 @@ function BadgeRow({ }, [dragState.draggedBadge, handleMouseMove, handleMouseUp]); return ( -
- {tempBadges.map((badge, index) => ( - - {dragState.draggedBadge && dragState.insertIndex === index && ghostBadge && ( -
- -
- )} - -
- ))} - {dragState.draggedBadge && dragState.insertIndex === tempBadges.length && ghostBadge && ( -
- -
- )} - {showEphemeralBadges === true && ( - <> - - - - - )} - {ghostBadge && ( -
- -
- )} -
+ +
+ {showEphemeralBadges === true && } + {tempBadges.map((badge, index) => ( + + {dragState.draggedBadge && dragState.insertIndex === index && ghostBadge && ( +
+ +
+ )} + +
+ ))} + {dragState.draggedBadge && dragState.insertIndex === tempBadges.length && ghostBadge && ( +
+ +
+ )} + {showEphemeralBadges === true && ( + <> + + + + + + )} + {ghostBadge && ( +
+ +
+ )} +
+ +
); } diff --git a/client/src/components/Chat/Input/CodeInterpreter.tsx b/client/src/components/Chat/Input/CodeInterpreter.tsx index 411f1e27b3..f2d9760cca 100644 --- a/client/src/components/Chat/Input/CodeInterpreter.tsx +++ b/client/src/components/Chat/Input/CodeInterpreter.tsx @@ -1,122 +1,37 @@ -import debounce from 'lodash/debounce'; -import React, { memo, useMemo, useCallback, useRef } from 'react'; -import { useRecoilState } from 'recoil'; +import React, { memo } from 'react'; import { TerminalSquareIcon } from 'lucide-react'; -import { - Tools, - AuthType, - Constants, - LocalStorageKeys, - PermissionTypes, - Permissions, -} from 'librechat-data-provider'; -import ApiKeyDialog from '~/components/SidePanel/Agents/Code/ApiKeyDialog'; -import { useLocalize, useHasAccess, useCodeApiKeyForm } from '~/hooks'; +import { PermissionTypes, Permissions } from 'librechat-data-provider'; import CheckboxButton from '~/components/ui/CheckboxButton'; -import useLocalStorage from '~/hooks/useLocalStorageAlt'; -import { useVerifyAgentToolAuth } from '~/data-provider'; -import { ephemeralAgentByConvoId } from '~/store'; +import { useLocalize, useHasAccess } from '~/hooks'; +import { useBadgeRowContext } from '~/Providers'; -const storageCondition = (value: unknown, rawCurrentValue?: string | null) => { - if (rawCurrentValue) { - try { - const currentValue = rawCurrentValue?.trim() ?? ''; - if (currentValue === 'true' && value === false) { - return true; - } - } catch (e) { - console.error(e); - } - } - return value !== undefined && value !== null && value !== '' && value !== false; -}; - -function CodeInterpreter({ conversationId }: { conversationId?: string | null }) { - const triggerRef = useRef(null); +function CodeInterpreter() { const localize = useLocalize(); - const key = conversationId ?? Constants.NEW_CONVO; + const { codeInterpreter, codeApiKeyForm } = useBadgeRowContext(); + const { toggleState: runCode, debouncedChange, isPinned } = codeInterpreter; + const { badgeTriggerRef } = codeApiKeyForm; const canRunCode = useHasAccess({ permissionType: PermissionTypes.RUN_CODE, permission: Permissions.USE, }); - const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key)); - const isCodeToggleEnabled = useMemo(() => { - return ephemeralAgent?.execute_code ?? false; - }, [ephemeralAgent?.execute_code]); - - const { data } = useVerifyAgentToolAuth( - { toolId: Tools.execute_code }, - { - retry: 1, - }, - ); - const authType = useMemo(() => data?.message ?? false, [data?.message]); - const isAuthenticated = useMemo(() => data?.authenticated ?? false, [data?.authenticated]); - const { methods, onSubmit, isDialogOpen, setIsDialogOpen, handleRevokeApiKey } = - useCodeApiKeyForm({}); - - const setValue = useCallback( - (isChecked: boolean) => { - setEphemeralAgent((prev) => ({ - ...prev, - execute_code: isChecked, - })); - }, - [setEphemeralAgent], - ); - - const [runCode, setRunCode] = useLocalStorage( - `${LocalStorageKeys.LAST_CODE_TOGGLE_}${key}`, - isCodeToggleEnabled, - setValue, - storageCondition, - ); - - const handleChange = useCallback( - (e: React.ChangeEvent, isChecked: boolean) => { - if (!isAuthenticated) { - setIsDialogOpen(true); - e.preventDefault(); - return; - } - setRunCode(isChecked); - }, - [setRunCode, setIsDialogOpen, isAuthenticated], - ); - - const debouncedChange = useMemo( - () => debounce(handleChange, 50, { leading: true }), - [handleChange], - ); if (!canRunCode) { return null; } return ( - <> + (runCode || isPinned) && ( } /> - - + ) ); } diff --git a/client/src/components/Chat/Input/FileSearch.tsx b/client/src/components/Chat/Input/FileSearch.tsx new file mode 100644 index 0000000000..a4952d1fd1 --- /dev/null +++ b/client/src/components/Chat/Input/FileSearch.tsx @@ -0,0 +1,28 @@ +import React, { memo } from 'react'; +import CheckboxButton from '~/components/ui/CheckboxButton'; +import { useBadgeRowContext } from '~/Providers'; +import { VectorIcon } from '~/components/svg'; +import { useLocalize } from '~/hooks'; + +function FileSearch() { + const localize = useLocalize(); + const { fileSearch } = useBadgeRowContext(); + const { toggleState: fileSearchEnabled, debouncedChange, isPinned } = fileSearch; + + return ( + <> + {(fileSearchEnabled || isPinned) && ( + } + /> + )} + + ); +} + +export default memo(FileSearch); diff --git a/client/src/components/Chat/Input/Files/AttachFileChat.tsx b/client/src/components/Chat/Input/Files/AttachFileChat.tsx index 11bca082fe..746c3d9c17 100644 --- a/client/src/components/Chat/Input/Files/AttachFileChat.tsx +++ b/client/src/components/Chat/Input/Files/AttachFileChat.tsx @@ -1,31 +1,21 @@ import { memo, useMemo } from 'react'; -import { useRecoilValue } from 'recoil'; import { Constants, supportsFiles, mergeFileConfig, isAgentsEndpoint, - isEphemeralAgent, EndpointFileConfig, fileConfig as defaultFileConfig, } from 'librechat-data-provider'; -import { useChatContext } from '~/Providers'; import { useGetFileConfig } from '~/data-provider'; -import { ephemeralAgentByConvoId } from '~/store'; import AttachFileMenu from './AttachFileMenu'; -import AttachFile from './AttachFile'; +import { useChatContext } from '~/Providers'; function AttachFileChat({ disableInputs }: { disableInputs: boolean }) { const { conversation } = useChatContext(); - + const conversationId = conversation?.conversationId ?? Constants.NEW_CONVO; const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null }; - - const key = conversation?.conversationId ?? Constants.NEW_CONVO; - const ephemeralAgent = useRecoilValue(ephemeralAgentByConvoId(key)); - const isAgents = useMemo( - () => isAgentsEndpoint(_endpoint) || isEphemeralAgent(_endpoint, ephemeralAgent), - [_endpoint, ephemeralAgent], - ); + const isAgents = useMemo(() => isAgentsEndpoint(_endpoint), [_endpoint]); const { data: fileConfig = defaultFileConfig } = useGetFileConfig({ select: (data) => mergeFileConfig(data), @@ -38,11 +28,8 @@ function AttachFileChat({ disableInputs }: { disableInputs: boolean }) { const endpointSupportsFiles: boolean = supportsFiles[endpointType ?? _endpoint ?? ''] ?? false; const isUploadDisabled = (disableInputs || endpointFileConfig?.disabled) ?? false; - if (isAgents) { - return ; - } - if (endpointSupportsFiles && !isUploadDisabled) { - return ; + if (isAgents || (endpointSupportsFiles && !isUploadDisabled)) { + return ; } return null; diff --git a/client/src/components/Chat/Input/Files/AttachFileMenu.tsx b/client/src/components/Chat/Input/Files/AttachFileMenu.tsx index 85df07f24f..2bffa4f50c 100644 --- a/client/src/components/Chat/Input/Files/AttachFileMenu.tsx +++ b/client/src/components/Chat/Input/Files/AttachFileMenu.tsx @@ -1,21 +1,25 @@ +import { useSetRecoilState } from 'recoil'; import * as Ariakit from '@ariakit/react'; import React, { useRef, useState, useMemo } from 'react'; import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon } from 'lucide-react'; -import { EToolResources, EModelEndpoint, defaultAgentCapabilities } from 'librechat-data-provider'; import { FileUpload, TooltipAnchor, DropdownPopup, AttachmentIcon } from '~/components'; +import { EToolResources, EModelEndpoint } from 'librechat-data-provider'; import { useGetEndpointsQuery } from '~/data-provider'; import { useLocalize, useFileHandling } from '~/hooks'; +import { ephemeralAgentByConvoId } from '~/store'; import { cn } from '~/utils'; -interface AttachFileProps { +interface AttachFileMenuProps { + conversationId: string; disabled?: boolean | null; } -const AttachFile = ({ disabled }: AttachFileProps) => { +const AttachFileMenu = ({ disabled, conversationId }: AttachFileMenuProps) => { const localize = useLocalize(); const isUploadDisabled = disabled ?? false; const inputRef = useRef(null); const [isPopoverActive, setIsPopoverActive] = useState(false); + const setEphemeralAgent = useSetRecoilState(ephemeralAgentByConvoId(conversationId)); const [toolResource, setToolResource] = useState(); const { data: endpointsConfig } = useGetEndpointsQuery(); const { handleFileChange } = useFileHandling({ @@ -69,6 +73,7 @@ const AttachFile = ({ disabled }: AttachFileProps) => { label: localize('com_ui_upload_file_search'), onClick: () => { setToolResource(EToolResources.file_search); + /** File search is not automatically enabled to simulate legacy behavior */ handleUploadClick(); }, icon: , @@ -80,6 +85,10 @@ const AttachFile = ({ disabled }: AttachFileProps) => { label: localize('com_ui_upload_code_files'), onClick: () => { setToolResource(EToolResources.execute_code); + setEphemeralAgent((prev) => ({ + ...prev, + [EToolResources.execute_code]: true, + })); handleUploadClick(); }, icon: , @@ -87,7 +96,7 @@ const AttachFile = ({ disabled }: AttachFileProps) => { } return items; - }, [capabilities, localize, setToolResource]); + }, [capabilities, localize, setToolResource, setEphemeralAgent]); const menuTrigger = ( { ); }; -export default React.memo(AttachFile); +export default React.memo(AttachFileMenu); diff --git a/client/src/components/Chat/Input/Files/DragDropModal.tsx b/client/src/components/Chat/Input/Files/DragDropModal.tsx index 784116dc65..5606b4d30c 100644 --- a/client/src/components/Chat/Input/Files/DragDropModal.tsx +++ b/client/src/components/Chat/Input/Files/DragDropModal.tsx @@ -7,7 +7,7 @@ import useLocalize from '~/hooks/useLocalize'; import { OGDialog } from '~/components/ui'; interface DragDropModalProps { - onOptionSelect: (option: string | undefined) => void; + onOptionSelect: (option: EToolResources | undefined) => void; files: File[]; isVisible: boolean; setShowModal: (showModal: boolean) => void; diff --git a/client/src/components/Chat/Input/MCPSelect.tsx b/client/src/components/Chat/Input/MCPSelect.tsx index ebe56c8024..13c1a4a26a 100644 --- a/client/src/components/Chat/Input/MCPSelect.tsx +++ b/client/src/components/Chat/Input/MCPSelect.tsx @@ -1,75 +1,29 @@ -import React, { memo, useRef, useMemo, useEffect, useCallback, useState } from 'react'; -import { useRecoilState } from 'recoil'; -import { Settings2 } from 'lucide-react'; +import React, { memo, useCallback, useState } from 'react'; +import { SettingsIcon } from 'lucide-react'; +import { Constants } from 'librechat-data-provider'; import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query'; -import { Constants, EModelEndpoint, LocalStorageKeys } from 'librechat-data-provider'; -import type { TPlugin, TPluginAuthConfig, TUpdateUserPlugins } from 'librechat-data-provider'; +import type { TUpdateUserPlugins } from 'librechat-data-provider'; +import type { McpServerInfo } from '~/hooks/Plugins/useMCPSelect'; import MCPConfigDialog, { type ConfigFieldDetail } from '~/components/ui/MCPConfigDialog'; -import { useAvailableToolsQuery } from '~/data-provider'; -import useLocalStorage from '~/hooks/useLocalStorageAlt'; +import { useToastContext, useBadgeRowContext } from '~/Providers'; import MultiSelect from '~/components/ui/MultiSelect'; -import { ephemeralAgentByConvoId } from '~/store'; -import { useToastContext } from '~/Providers'; -import MCPIcon from '~/components/ui/MCPIcon'; +import { MCPIcon } from '~/components/svg'; import { useLocalize } from '~/hooks'; -interface McpServerInfo { - name: string; - pluginKey: string; - authConfig?: TPluginAuthConfig[]; - authenticated?: boolean; -} - -// Helper function to extract mcp_serverName from a full pluginKey like action_mcp_serverName const getBaseMCPPluginKey = (fullPluginKey: string): string => { const parts = fullPluginKey.split(Constants.mcp_delimiter); return Constants.mcp_prefix + parts[parts.length - 1]; }; -const storageCondition = (value: unknown, rawCurrentValue?: string | null) => { - if (rawCurrentValue) { - try { - const currentValue = rawCurrentValue?.trim() ?? ''; - if (currentValue.length > 2) { - return true; - } - } catch (e) { - console.error(e); - } - } - return Array.isArray(value) && value.length > 0; -}; - -function MCPSelect({ conversationId }: { conversationId?: string | null }) { +function MCPSelect() { const localize = useLocalize(); const { showToast } = useToastContext(); - const key = conversationId ?? Constants.NEW_CONVO; - const hasSetFetched = useRef(null); + const { mcpSelect } = useBadgeRowContext(); + const { mcpValues, setMCPValues, mcpServerNames, mcpToolDetails, isPinned } = mcpSelect; + const [isConfigModalOpen, setIsConfigModalOpen] = useState(false); const [selectedToolForConfig, setSelectedToolForConfig] = useState(null); - const { data: mcpToolDetails, isFetched } = useAvailableToolsQuery(EModelEndpoint.agents, { - select: (data: TPlugin[]) => { - const mcpToolsMap = new Map(); - data.forEach((tool) => { - const isMCP = tool.pluginKey.includes(Constants.mcp_delimiter); - if (isMCP && tool.chatMenu !== false) { - const parts = tool.pluginKey.split(Constants.mcp_delimiter); - const serverName = parts[parts.length - 1]; - if (!mcpToolsMap.has(serverName)) { - mcpToolsMap.set(serverName, { - name: serverName, - pluginKey: tool.pluginKey, - authConfig: tool.authConfig, - authenticated: tool.authenticated, - }); - } - } - }); - return Array.from(mcpToolsMap.values()); - }, - }); - const updateUserPluginsMutation = useUpdateUserPluginsMutation({ onSuccess: () => { setIsConfigModalOpen(false); @@ -84,48 +38,6 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) { }, }); - const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key)); - const mcpState = useMemo(() => { - return ephemeralAgent?.mcp ?? []; - }, [ephemeralAgent?.mcp]); - - const setSelectedValues = useCallback( - (values: string[] | null | undefined) => { - if (!values) { - return; - } - if (!Array.isArray(values)) { - return; - } - setEphemeralAgent((prev) => ({ - ...prev, - mcp: values, - })); - }, - [setEphemeralAgent], - ); - const [mcpValues, setMCPValues] = useLocalStorage( - `${LocalStorageKeys.LAST_MCP_}${key}`, - mcpState, - setSelectedValues, - storageCondition, - ); - - useEffect(() => { - if (hasSetFetched.current === key) { - return; - } - if (!isFetched) { - return; - } - hasSetFetched.current = key; - if ((mcpToolDetails?.length ?? 0) > 0) { - setMCPValues(mcpValues.filter((mcp) => mcpToolDetails?.some((tool) => tool.name === mcp))); - return; - } - setMCPValues([]); - }, [isFetched, setMCPValues, mcpToolDetails, key, mcpValues]); - const renderSelectedValues = useCallback( (values: string[], placeholder?: string) => { if (values.length === 0) { @@ -139,10 +51,6 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) { [localize], ); - const mcpServerNames = useMemo(() => { - return (mcpToolDetails ?? []).map((tool) => tool.name); - }, [mcpToolDetails]); - const handleConfigSave = useCallback( (targetName: string, authData: Record) => { if (selectedToolForConfig && selectedToolForConfig.name === targetName) { @@ -198,10 +106,10 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) { setSelectedToolForConfig(tool); setIsConfigModalOpen(true); }} - className="ml-2 flex h-6 w-6 items-center justify-center rounded p-1 hover:bg-black/10 dark:hover:bg-white/10" + className="ml-2 flex h-6 w-6 items-center justify-center rounded p-1 hover:bg-surface-secondary" aria-label={`Configure ${serverName}`} > - + ); @@ -212,6 +120,11 @@ function MCPSelect({ conversationId }: { conversationId?: string | null }) { [mcpToolDetails, setSelectedToolForConfig, setIsConfigModalOpen], ); + // Don't render if no servers are selected and not pinned + if ((!mcpValues || mcpValues.length === 0) && !isPinned) { + return null; + } + if (!mcpToolDetails || mcpToolDetails.length === 0) { return null; } diff --git a/client/src/components/Chat/Input/MCPSubMenu.tsx b/client/src/components/Chat/Input/MCPSubMenu.tsx new file mode 100644 index 0000000000..a955f2bd90 --- /dev/null +++ b/client/src/components/Chat/Input/MCPSubMenu.tsx @@ -0,0 +1,96 @@ +import React from 'react'; +import * as Ariakit from '@ariakit/react'; +import { ChevronRight } from 'lucide-react'; +import { PinIcon, MCPIcon } from '~/components/svg'; +import { useLocalize } from '~/hooks'; +import { cn } from '~/utils'; + +interface MCPSubMenuProps { + isMCPPinned: boolean; + setIsMCPPinned: (value: boolean) => void; + mcpValues?: string[]; + mcpServerNames: string[]; + handleMCPToggle: (serverName: string) => void; +} + +const MCPSubMenu = ({ + mcpValues, + isMCPPinned, + mcpServerNames, + setIsMCPPinned, + handleMCPToggle, + ...props +}: MCPSubMenuProps) => { + const localize = useLocalize(); + + const menuStore = Ariakit.useMenuStore({ + showTimeout: 100, + placement: 'right', + }); + + return ( + + + } + > +
+ + {localize('com_ui_mcp_servers')} + +
+ +
+ + {mcpServerNames.map((serverName) => ( + { + event.preventDefault(); + handleMCPToggle(serverName); + }} + className={cn( + 'flex items-center gap-2 rounded-lg px-2 py-1.5 text-text-primary hover:cursor-pointer', + 'scroll-m-1 outline-none transition-colors', + 'hover:bg-black/[0.075] dark:hover:bg-white/10', + 'data-[active-item]:bg-black/[0.075] dark:data-[active-item]:bg-white/10', + 'w-full min-w-0 text-sm', + )} + > + + {serverName} + + ))} + +
+ ); +}; + +export default React.memo(MCPSubMenu); diff --git a/client/src/components/Chat/Input/ToolDialogs.tsx b/client/src/components/Chat/Input/ToolDialogs.tsx new file mode 100644 index 0000000000..d9f2122fca --- /dev/null +++ b/client/src/components/Chat/Input/ToolDialogs.tsx @@ -0,0 +1,66 @@ +import React, { useMemo } from 'react'; +import { AuthType } from 'librechat-data-provider'; +import SearchApiKeyDialog from '~/components/SidePanel/Agents/Search/ApiKeyDialog'; +import CodeApiKeyDialog from '~/components/SidePanel/Agents/Code/ApiKeyDialog'; +import { useBadgeRowContext } from '~/Providers'; + +function ToolDialogs() { + const { webSearch, codeInterpreter, searchApiKeyForm, codeApiKeyForm } = useBadgeRowContext(); + const { authData: webSearchAuthData } = webSearch; + const { authData: codeAuthData } = codeInterpreter; + + const { + methods: searchMethods, + onSubmit: searchOnSubmit, + isDialogOpen: searchDialogOpen, + setIsDialogOpen: setSearchDialogOpen, + handleRevokeApiKey: searchHandleRevoke, + badgeTriggerRef: searchBadgeTriggerRef, + menuTriggerRef: searchMenuTriggerRef, + } = searchApiKeyForm; + + const { + methods: codeMethods, + onSubmit: codeOnSubmit, + isDialogOpen: codeDialogOpen, + setIsDialogOpen: setCodeDialogOpen, + handleRevokeApiKey: codeHandleRevoke, + badgeTriggerRef: codeBadgeTriggerRef, + menuTriggerRef: codeMenuTriggerRef, + } = codeApiKeyForm; + + const searchAuthTypes = useMemo( + () => webSearchAuthData?.authTypes ?? [], + [webSearchAuthData?.authTypes], + ); + const codeAuthType = useMemo(() => codeAuthData?.message ?? false, [codeAuthData?.message]); + + return ( + <> + + + + ); +} + +export default ToolDialogs; diff --git a/client/src/components/Chat/Input/ToolsDropdown.tsx b/client/src/components/Chat/Input/ToolsDropdown.tsx new file mode 100644 index 0000000000..5fa3d9eaa4 --- /dev/null +++ b/client/src/components/Chat/Input/ToolsDropdown.tsx @@ -0,0 +1,322 @@ +import React, { useState, useMemo, useCallback } from 'react'; +import * as Ariakit from '@ariakit/react'; +import { Globe, Settings, Settings2, TerminalSquareIcon } from 'lucide-react'; +import type { MenuItemProps } from '~/common'; +import { Permissions, PermissionTypes, AuthType } from 'librechat-data-provider'; +import { TooltipAnchor, DropdownPopup } from '~/components'; +import MCPSubMenu from '~/components/Chat/Input/MCPSubMenu'; +import { PinIcon, VectorIcon } from '~/components/svg'; +import { useLocalize, useHasAccess } from '~/hooks'; +import { useBadgeRowContext } from '~/Providers'; +import { cn } from '~/utils'; + +interface ToolsDropdownProps { + disabled?: boolean; +} + +const ToolsDropdown = ({ disabled }: ToolsDropdownProps) => { + const localize = useLocalize(); + const isDisabled = disabled ?? false; + const [isPopoverActive, setIsPopoverActive] = useState(false); + const { webSearch, codeInterpreter, fileSearch, mcpSelect, searchApiKeyForm, codeApiKeyForm } = + useBadgeRowContext(); + const { setIsDialogOpen: setIsCodeDialogOpen, menuTriggerRef: codeMenuTriggerRef } = + codeApiKeyForm; + const { setIsDialogOpen: setIsSearchDialogOpen, menuTriggerRef: searchMenuTriggerRef } = + searchApiKeyForm; + const { + isPinned: isSearchPinned, + setIsPinned: setIsSearchPinned, + authData: webSearchAuthData, + } = webSearch; + const { + isPinned: isCodePinned, + setIsPinned: setIsCodePinned, + authData: codeAuthData, + } = codeInterpreter; + const { isPinned: isFileSearchPinned, setIsPinned: setIsFileSearchPinned } = fileSearch; + const { + mcpValues, + mcpServerNames, + isPinned: isMCPPinned, + setIsPinned: setIsMCPPinned, + } = mcpSelect; + + const canUseWebSearch = useHasAccess({ + permissionType: PermissionTypes.WEB_SEARCH, + permission: Permissions.USE, + }); + + const canRunCode = useHasAccess({ + permissionType: PermissionTypes.RUN_CODE, + permission: Permissions.USE, + }); + + const showWebSearchSettings = useMemo(() => { + const authTypes = webSearchAuthData?.authTypes ?? []; + if (authTypes.length === 0) return true; + return !authTypes.every(([, authType]) => authType === AuthType.SYSTEM_DEFINED); + }, [webSearchAuthData?.authTypes]); + + const showCodeSettings = useMemo( + () => codeAuthData?.message !== AuthType.SYSTEM_DEFINED, + [codeAuthData?.message], + ); + + const handleWebSearchToggle = useCallback(() => { + const newValue = !webSearch.toggleState; + webSearch.debouncedChange({ isChecked: newValue }); + }, [webSearch]); + + const handleCodeInterpreterToggle = useCallback(() => { + const newValue = !codeInterpreter.toggleState; + codeInterpreter.debouncedChange({ isChecked: newValue }); + }, [codeInterpreter]); + + const handleFileSearchToggle = useCallback(() => { + const newValue = !fileSearch.toggleState; + fileSearch.debouncedChange({ isChecked: newValue }); + }, [fileSearch]); + + const handleMCPToggle = useCallback( + (serverName: string) => { + const currentValues = mcpSelect.mcpValues ?? []; + const newValues = currentValues.includes(serverName) + ? currentValues.filter((v) => v !== serverName) + : [...currentValues, serverName]; + mcpSelect.setMCPValues(newValues); + }, + [mcpSelect], + ); + + const dropdownItems = useMemo(() => { + const items: MenuItemProps[] = [ + { + render: () => ( +
+ {localize('com_ui_tools')} +
+ ), + hideOnClick: false, + }, + ]; + + items.push({ + onClick: handleFileSearchToggle, + hideOnClick: false, + render: (props) => ( +
+
+ + {localize('com_assistants_file_search')} +
+ +
+ ), + }); + + if (canUseWebSearch) { + items.push({ + onClick: handleWebSearchToggle, + hideOnClick: false, + render: (props) => ( +
+
+ + {localize('com_ui_web_search')} +
+
+ {showWebSearchSettings && ( + + )} + +
+
+ ), + }); + } + + if (canRunCode) { + items.push({ + onClick: handleCodeInterpreterToggle, + hideOnClick: false, + render: (props) => ( +
+
+ + {localize('com_assistants_code_interpreter')} +
+
+ {showCodeSettings && ( + + )} + +
+
+ ), + }); + } + + if (mcpServerNames && mcpServerNames.length > 0) { + items.push({ + hideOnClick: false, + render: (props) => ( + + ), + }); + } + + return items; + }, [ + localize, + mcpValues, + canRunCode, + isMCPPinned, + isCodePinned, + mcpServerNames, + isSearchPinned, + setIsMCPPinned, + canUseWebSearch, + setIsCodePinned, + handleMCPToggle, + showCodeSettings, + setIsSearchPinned, + isFileSearchPinned, + codeMenuTriggerRef, + setIsCodeDialogOpen, + searchMenuTriggerRef, + showWebSearchSettings, + setIsFileSearchPinned, + handleWebSearchToggle, + setIsSearchDialogOpen, + handleFileSearchToggle, + handleCodeInterpreterToggle, + ]); + + const menuTrigger = ( + +
+ +
+ + } + id="tools-dropdown-button" + description={localize('com_ui_tools')} + disabled={isDisabled} + /> + ); + + return ( + + ); +}; + +export default React.memo(ToolsDropdown); diff --git a/client/src/components/Chat/Input/WebSearch.tsx b/client/src/components/Chat/Input/WebSearch.tsx index 6844ee1da0..44b5c4a28c 100644 --- a/client/src/components/Chat/Input/WebSearch.tsx +++ b/client/src/components/Chat/Input/WebSearch.tsx @@ -1,122 +1,37 @@ -import React, { memo, useRef, useMemo, useCallback } from 'react'; +import React, { memo } from 'react'; import { Globe } from 'lucide-react'; -import debounce from 'lodash/debounce'; -import { useRecoilState } from 'recoil'; -import { - Tools, - AuthType, - Constants, - Permissions, - PermissionTypes, - LocalStorageKeys, -} from 'librechat-data-provider'; -import ApiKeyDialog from '~/components/SidePanel/Agents/Search/ApiKeyDialog'; -import { useLocalize, useHasAccess, useSearchApiKeyForm } from '~/hooks'; +import { Permissions, PermissionTypes } from 'librechat-data-provider'; import CheckboxButton from '~/components/ui/CheckboxButton'; -import useLocalStorage from '~/hooks/useLocalStorageAlt'; -import { useVerifyAgentToolAuth } from '~/data-provider'; -import { ephemeralAgentByConvoId } from '~/store'; +import { useLocalize, useHasAccess } from '~/hooks'; +import { useBadgeRowContext } from '~/Providers'; -const storageCondition = (value: unknown, rawCurrentValue?: string | null) => { - if (rawCurrentValue) { - try { - const currentValue = rawCurrentValue?.trim() ?? ''; - if (currentValue === 'true' && value === false) { - return true; - } - } catch (e) { - console.error(e); - } - } - return value !== undefined && value !== null && value !== '' && value !== false; -}; - -function WebSearch({ conversationId }: { conversationId?: string | null }) { - const triggerRef = useRef(null); +function WebSearch() { const localize = useLocalize(); - const key = conversationId ?? Constants.NEW_CONVO; + const { webSearch: webSearchData, searchApiKeyForm } = useBadgeRowContext(); + const { toggleState: webSearch, debouncedChange, isPinned } = webSearchData; + const { badgeTriggerRef } = searchApiKeyForm; const canUseWebSearch = useHasAccess({ permissionType: PermissionTypes.WEB_SEARCH, permission: Permissions.USE, }); - const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key)); - const isWebSearchToggleEnabled = useMemo(() => { - return ephemeralAgent?.web_search ?? false; - }, [ephemeralAgent?.web_search]); - - const { data } = useVerifyAgentToolAuth( - { toolId: Tools.web_search }, - { - retry: 1, - }, - ); - const authTypes = useMemo(() => data?.authTypes ?? [], [data?.authTypes]); - const isAuthenticated = useMemo(() => data?.authenticated ?? false, [data?.authenticated]); - const { methods, onSubmit, isDialogOpen, setIsDialogOpen, handleRevokeApiKey } = - useSearchApiKeyForm({}); - - const setValue = useCallback( - (isChecked: boolean) => { - setEphemeralAgent((prev) => ({ - ...prev, - web_search: isChecked, - })); - }, - [setEphemeralAgent], - ); - - const [webSearch, setWebSearch] = useLocalStorage( - `${LocalStorageKeys.LAST_WEB_SEARCH_TOGGLE_}${key}`, - isWebSearchToggleEnabled, - setValue, - storageCondition, - ); - - const handleChange = useCallback( - (e: React.ChangeEvent, isChecked: boolean) => { - if (!isAuthenticated) { - setIsDialogOpen(true); - e.preventDefault(); - return; - } - setWebSearch(isChecked); - }, - [setWebSearch, setIsDialogOpen, isAuthenticated], - ); - - const debouncedChange = useMemo( - () => debounce(handleChange, 50, { leading: true }), - [handleChange], - ); if (!canUseWebSearch) { return null; } return ( - <> + (webSearch || isPinned) && ( } /> - - + ) ); } diff --git a/client/src/components/Chat/Menus/Endpoints/utils.ts b/client/src/components/Chat/Menus/Endpoints/utils.ts index 87c0133cf5..5ed155c6a0 100644 --- a/client/src/components/Chat/Menus/Endpoints/utils.ts +++ b/client/src/components/Chat/Menus/Endpoints/utils.ts @@ -83,7 +83,7 @@ export function filterModels( let modelName = modelId; if (isAgentsEndpoint(endpoint.value) && agentsMap && agentsMap[modelId]) { - modelName = agentsMap[modelId].name || modelId; + modelName = agentsMap[modelId]?.name || modelId; } else if ( isAssistantsEndpoint(endpoint.value) && assistantsMap && diff --git a/client/src/components/SidePanel/Agents/Code/ApiKeyDialog.tsx b/client/src/components/SidePanel/Agents/Code/ApiKeyDialog.tsx index 2fa271c733..b2fcac1b5a 100644 --- a/client/src/components/SidePanel/Agents/Code/ApiKeyDialog.tsx +++ b/client/src/components/SidePanel/Agents/Code/ApiKeyDialog.tsx @@ -15,6 +15,7 @@ export default function ApiKeyDialog({ register, handleSubmit, triggerRef, + triggerRefs, }: { isOpen: boolean; onOpenChange: (open: boolean) => void; @@ -24,7 +25,8 @@ export default function ApiKeyDialog({ isToolAuthenticated: boolean; register: UseFormRegister; handleSubmit: UseFormHandleSubmit; - triggerRef?: RefObject; + triggerRef?: RefObject; + triggerRefs?: RefObject[]; }) { const localize = useLocalize(); const languageIcons = [ @@ -41,7 +43,12 @@ export default function ApiKeyDialog({ ]; return ( - + void; @@ -30,7 +31,8 @@ export default function ApiKeyDialog({ isToolAuthenticated: boolean; register: UseFormRegister; handleSubmit: UseFormHandleSubmit; - triggerRef?: React.RefObject; + triggerRef?: React.RefObject; + triggerRefs?: React.RefObject[]; }) { const localize = useLocalize(); const { data: config } = useGetStartupConfig(); @@ -181,7 +183,12 @@ export default function ApiKeyDialog({ } return ( - + - - + + + ); } diff --git a/client/src/components/svg/VectorIcon.tsx b/client/src/components/svg/VectorIcon.tsx new file mode 100644 index 0000000000..209e9b912a --- /dev/null +++ b/client/src/components/svg/VectorIcon.tsx @@ -0,0 +1,15 @@ +export default function VectorIcon({ className }: { className?: string }) { + return ( + + + + ); +} diff --git a/client/src/components/svg/index.ts b/client/src/components/svg/index.ts index 73aad266d8..9efffb91c1 100644 --- a/client/src/components/svg/index.ts +++ b/client/src/components/svg/index.ts @@ -62,3 +62,5 @@ export { default as ThumbUpIcon } from './ThumbUpIcon'; export { default as ThumbDownIcon } from './ThumbDownIcon'; export { default as XAIcon } from './XAIcon'; export { default as PersonalizationIcon } from './PersonalizationIcon'; +export { default as MCPIcon } from './MCPIcon'; +export { default as VectorIcon } from './VectorIcon'; diff --git a/client/src/components/ui/CheckboxButton.tsx b/client/src/components/ui/CheckboxButton.tsx index 7985b73749..c664a61325 100644 --- a/client/src/components/ui/CheckboxButton.tsx +++ b/client/src/components/ui/CheckboxButton.tsx @@ -9,11 +9,12 @@ const CheckboxButton = React.forwardRef< icon?: React.ReactNode; label: string; className?: string; + checked?: boolean; defaultChecked?: boolean; isCheckedClassName?: string; - setValue?: (e: React.ChangeEvent, isChecked: boolean) => void; + setValue?: (values: { e?: React.ChangeEvent; isChecked: boolean }) => void; } ->(({ icon, label, setValue, className, defaultChecked, isCheckedClassName }, ref) => { +>(({ icon, label, setValue, className, checked, defaultChecked, isCheckedClassName }, ref) => { const checkbox = useCheckboxStore(); const isChecked = useStoreState(checkbox, (state) => state?.value); const onChange = (e: React.ChangeEvent) => { @@ -21,20 +22,28 @@ const CheckboxButton = React.forwardRef< if (typeof isChecked !== 'boolean') { return; } - setValue?.(e, !isChecked); + setValue?.({ e, isChecked: !isChecked }); }; + + // Sync with controlled checked prop useEffect(() => { - if (defaultChecked) { + if (checked !== undefined) { + checkbox.setValue(checked); + } + }, [checked, checkbox]); + + // Set initial value from defaultChecked + useEffect(() => { + if (defaultChecked !== undefined && checked === undefined) { checkbox.setValue(defaultChecked); } - }, [defaultChecked, checkbox]); + }, [defaultChecked, checked, checkbox]); return ( - - - - - ); -} diff --git a/client/src/components/ui/OriginalDialog.tsx b/client/src/components/ui/OriginalDialog.tsx index 4e1ff3bd48..cfb2fe2261 100644 --- a/client/src/components/ui/OriginalDialog.tsx +++ b/client/src/components/ui/OriginalDialog.tsx @@ -5,16 +5,26 @@ import { cn } from '~/utils'; interface OGDialogProps extends DialogPrimitive.DialogProps { triggerRef?: React.RefObject; + triggerRefs?: React.RefObject[]; } const Dialog = React.forwardRef( - ({ children, triggerRef, onOpenChange, ...props }, _ref) => { + ({ children, triggerRef, triggerRefs, onOpenChange, ...props }, _ref) => { const handleOpenChange = (open: boolean) => { if (!open && triggerRef?.current) { setTimeout(() => { triggerRef.current?.focus(); }, 0); } + if (triggerRefs?.length) { + triggerRefs.forEach((ref) => { + if (ref?.current) { + setTimeout(() => { + ref.current?.focus(); + }, 0); + } + }); + } onOpenChange?.(open); }; diff --git a/client/src/components/ui/index.ts b/client/src/components/ui/index.ts index 5edc18bd1c..31443c900f 100644 --- a/client/src/components/ui/index.ts +++ b/client/src/components/ui/index.ts @@ -28,7 +28,6 @@ export * from './Pagination'; export * from './Progress'; export * from './InputOTP'; export { default as Badge } from './Badge'; -export { default as MCPIcon } from './MCPIcon'; export { default as Combobox } from './Combobox'; export { default as Dropdown } from './Dropdown'; export { default as SplitText } from './SplitText'; diff --git a/client/src/hooks/Files/useDragHelpers.ts b/client/src/hooks/Files/useDragHelpers.ts index af4530e622..3968e2bb03 100644 --- a/client/src/hooks/Files/useDragHelpers.ts +++ b/client/src/hooks/Files/useDragHelpers.ts @@ -1,43 +1,46 @@ import { useState, useMemo } from 'react'; import { useDrop } from 'react-dnd'; -import { useRecoilValue } from 'recoil'; import { NativeTypes } from 'react-dnd-html5-backend'; import { useQueryClient } from '@tanstack/react-query'; +import { useRecoilValue, useSetRecoilState } from 'recoil'; import { - Constants, QueryKeys, + Constants, EModelEndpoint, - isAgentsEndpoint, - isEphemeralAgent, + EToolResources, AgentCapabilities, + isAssistantsEndpoint, } from 'librechat-data-provider'; -import type * as t from 'librechat-data-provider'; import type { DropTargetMonitor } from 'react-dnd'; -import useFileHandling from './useFileHandling'; +import type * as t from 'librechat-data-provider'; import store, { ephemeralAgentByConvoId } from '~/store'; +import useFileHandling from './useFileHandling'; export default function useDragHelpers() { const queryClient = useQueryClient(); const [showModal, setShowModal] = useState(false); const [draggedFiles, setDraggedFiles] = useState([]); const conversation = useRecoilValue(store.conversationByIndex(0)) || undefined; - const key = useMemo( - () => conversation?.conversationId ?? Constants.NEW_CONVO, - [conversation?.conversationId], + const setEphemeralAgent = useSetRecoilState( + ephemeralAgentByConvoId(conversation?.conversationId ?? Constants.NEW_CONVO), ); - const ephemeralAgent = useRecoilValue(ephemeralAgentByConvoId(key)); - const handleOptionSelect = (toolResource: string | undefined) => { + const handleOptionSelect = (toolResource: EToolResources | undefined) => { + /** File search is not automatically enabled to simulate legacy behavior */ + if (toolResource && toolResource !== EToolResources.file_search) { + setEphemeralAgent((prev) => ({ + ...prev, + [toolResource]: true, + })); + } handleFiles(draggedFiles, toolResource); setShowModal(false); setDraggedFiles([]); }; const isAgents = useMemo( - () => - isAgentsEndpoint(conversation?.endpoint) || - isEphemeralAgent(conversation?.endpoint, ephemeralAgent), - [conversation?.endpoint, ephemeralAgent], + () => !isAssistantsEndpoint(conversation?.endpoint), + [conversation?.endpoint], ); const { handleFiles } = useFileHandling({ diff --git a/client/src/hooks/Nav/useSideNavLinks.ts b/client/src/hooks/Nav/useSideNavLinks.ts index abc4688f73..13657c058e 100644 --- a/client/src/hooks/Nav/useSideNavLinks.ts +++ b/client/src/hooks/Nav/useSideNavLinks.ts @@ -15,12 +15,11 @@ import BookmarkPanel from '~/components/SidePanel/Bookmarks/BookmarkPanel'; import MemoryViewer from '~/components/SidePanel/Memories/MemoryViewer'; import PanelSwitch from '~/components/SidePanel/Builder/PanelSwitch'; import PromptsAccordion from '~/components/Prompts/PromptsAccordion'; +import { Blocks, MCPIcon, AttachmentIcon } from '~/components/svg'; import Parameters from '~/components/SidePanel/Parameters/Panel'; import FilesPanel from '~/components/SidePanel/Files/Panel'; import MCPPanel from '~/components/SidePanel/MCP/MCPPanel'; -import { Blocks, AttachmentIcon } from '~/components/svg'; import { useGetStartupConfig } from '~/data-provider'; -import MCPIcon from '~/components/ui/MCPIcon'; import { useHasAccess } from '~/hooks'; export default function useSideNavLinks({ diff --git a/client/src/hooks/Plugins/index.ts b/client/src/hooks/Plugins/index.ts index c2a0ffe97a..85b6c7186e 100644 --- a/client/src/hooks/Plugins/index.ts +++ b/client/src/hooks/Plugins/index.ts @@ -1,3 +1,5 @@ +export * from './useMCPSelect'; +export * from './useToolToggle'; export { default as useAuthCodeTool } from './useAuthCodeTool'; export { default as usePluginInstall } from './usePluginInstall'; export { default as useCodeApiKeyForm } from './useCodeApiKeyForm'; diff --git a/client/src/hooks/Plugins/useCodeApiKeyForm.ts b/client/src/hooks/Plugins/useCodeApiKeyForm.ts index 4b1f9814a3..32120c8ab2 100644 --- a/client/src/hooks/Plugins/useCodeApiKeyForm.ts +++ b/client/src/hooks/Plugins/useCodeApiKeyForm.ts @@ -1,5 +1,5 @@ // client/src/hooks/Plugins/useCodeApiKeyForm.ts -import { useState, useCallback } from 'react'; +import { useRef, useState, useCallback } from 'react'; import { useForm } from 'react-hook-form'; import type { ApiKeyFormData } from '~/common'; import useAuthCodeTool from '~/hooks/Plugins/useAuthCodeTool'; @@ -12,6 +12,8 @@ export default function useCodeApiKeyForm({ onRevoke?: () => void; }) { const methods = useForm(); + const menuTriggerRef = useRef(null); + const badgeTriggerRef = useRef(null); const [isDialogOpen, setIsDialogOpen] = useState(false); const { installTool, removeTool } = useAuthCodeTool({ isEntityTool: true }); const { reset } = methods; @@ -39,5 +41,7 @@ export default function useCodeApiKeyForm({ setIsDialogOpen, handleRevokeApiKey, onSubmit: onSubmitHandler, + badgeTriggerRef, + menuTriggerRef, }; } diff --git a/client/src/hooks/Plugins/useMCPSelect.ts b/client/src/hooks/Plugins/useMCPSelect.ts new file mode 100644 index 0000000000..113f0011bc --- /dev/null +++ b/client/src/hooks/Plugins/useMCPSelect.ts @@ -0,0 +1,121 @@ +import { useRef, useEffect, useCallback, useMemo } from 'react'; +import { useRecoilState } from 'recoil'; +import { Constants, LocalStorageKeys, EModelEndpoint } from 'librechat-data-provider'; +import type { TPlugin, TPluginAuthConfig } from 'librechat-data-provider'; +import { useAvailableToolsQuery } from '~/data-provider'; +import useLocalStorage from '~/hooks/useLocalStorageAlt'; +import { ephemeralAgentByConvoId } from '~/store'; + +const storageCondition = (value: unknown, rawCurrentValue?: string | null) => { + if (rawCurrentValue) { + try { + const currentValue = rawCurrentValue?.trim() ?? ''; + if (currentValue.length > 2) { + return true; + } + } catch (e) { + console.error(e); + } + } + return Array.isArray(value) && value.length > 0; +}; + +interface UseMCPSelectOptions { + conversationId?: string | null; +} + +export interface McpServerInfo { + name: string; + pluginKey: string; + authConfig?: TPluginAuthConfig[]; + authenticated?: boolean; +} + +export function useMCPSelect({ conversationId }: UseMCPSelectOptions) { + const key = conversationId ?? Constants.NEW_CONVO; + const hasSetFetched = useRef(null); + const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key)); + const { data: mcpToolDetails, isFetched } = useAvailableToolsQuery(EModelEndpoint.agents, { + select: (data: TPlugin[]) => { + const mcpToolsMap = new Map(); + data.forEach((tool) => { + const isMCP = tool.pluginKey.includes(Constants.mcp_delimiter); + if (isMCP && tool.chatMenu !== false) { + const parts = tool.pluginKey.split(Constants.mcp_delimiter); + const serverName = parts[parts.length - 1]; + if (!mcpToolsMap.has(serverName)) { + mcpToolsMap.set(serverName, { + name: serverName, + pluginKey: tool.pluginKey, + authConfig: tool.authConfig, + authenticated: tool.authenticated, + }); + } + } + }); + return Array.from(mcpToolsMap.values()); + }, + }); + + const mcpState = useMemo(() => { + return ephemeralAgent?.mcp ?? []; + }, [ephemeralAgent?.mcp]); + + const setSelectedValues = useCallback( + (values: string[] | null | undefined) => { + if (!values) { + return; + } + if (!Array.isArray(values)) { + return; + } + setEphemeralAgent((prev) => ({ + ...prev, + mcp: values, + })); + }, + [setEphemeralAgent], + ); + + const [mcpValues, setMCPValues] = useLocalStorage( + `${LocalStorageKeys.LAST_MCP_}${key}`, + mcpState, + setSelectedValues, + storageCondition, + ); + + const [isPinned, setIsPinned] = useLocalStorage( + `${LocalStorageKeys.PIN_MCP_}${key}`, + true, + ); + + useEffect(() => { + if (hasSetFetched.current === key) { + return; + } + if (!isFetched) { + return; + } + hasSetFetched.current = key; + if ((mcpToolDetails?.length ?? 0) > 0) { + setMCPValues(mcpValues.filter((mcp) => mcpToolDetails?.some((tool) => tool.name === mcp))); + return; + } + setMCPValues([]); + }, [isFetched, setMCPValues, mcpToolDetails, key, mcpValues]); + + const mcpServerNames = useMemo(() => { + return (mcpToolDetails ?? []).map((tool) => tool.name); + }, [mcpToolDetails]); + + return { + mcpValues, + setMCPValues, + mcpServerNames, + ephemeralAgent, + mcpToolDetails, + setEphemeralAgent, + isPinned, + setIsPinned, + }; +} diff --git a/client/src/hooks/Plugins/useSearchApiKeyForm.ts b/client/src/hooks/Plugins/useSearchApiKeyForm.ts index 0044f1c0d0..86543e303a 100644 --- a/client/src/hooks/Plugins/useSearchApiKeyForm.ts +++ b/client/src/hooks/Plugins/useSearchApiKeyForm.ts @@ -1,4 +1,4 @@ -import { useState, useCallback } from 'react'; +import { useRef, useState, useCallback } from 'react'; import { useForm } from 'react-hook-form'; import useAuthSearchTool from '~/hooks/Plugins/useAuthSearchTool'; import type { SearchApiKeyFormData } from '~/hooks/Plugins/useAuthSearchTool'; @@ -11,6 +11,8 @@ export default function useSearchApiKeyForm({ onRevoke?: () => void; }) { const methods = useForm(); + const menuTriggerRef = useRef(null); + const badgeTriggerRef = useRef(null); const [isDialogOpen, setIsDialogOpen] = useState(false); const { installTool, removeTool } = useAuthSearchTool({ isEntityTool: true }); const { reset } = methods; @@ -38,5 +40,7 @@ export default function useSearchApiKeyForm({ setIsDialogOpen, handleRevokeApiKey, onSubmit: onSubmitHandler, + badgeTriggerRef, + menuTriggerRef, }; } diff --git a/client/src/hooks/Plugins/useToolToggle.ts b/client/src/hooks/Plugins/useToolToggle.ts new file mode 100644 index 0000000000..27b1ff284c --- /dev/null +++ b/client/src/hooks/Plugins/useToolToggle.ts @@ -0,0 +1,119 @@ +import { useRef, useEffect, useCallback, useMemo } from 'react'; +import { useRecoilState } from 'recoil'; +import debounce from 'lodash/debounce'; +import { Constants, LocalStorageKeys } from 'librechat-data-provider'; +import type { VerifyToolAuthResponse } from 'librechat-data-provider'; +import type { UseQueryOptions } from '@tanstack/react-query'; +import { useVerifyAgentToolAuth } from '~/data-provider'; +import useLocalStorage from '~/hooks/useLocalStorageAlt'; +import { ephemeralAgentByConvoId } from '~/store'; + +const storageCondition = (value: unknown, rawCurrentValue?: string | null) => { + if (rawCurrentValue) { + try { + const currentValue = rawCurrentValue?.trim() ?? ''; + if (currentValue === 'true' && value === false) { + return true; + } + } catch (e) { + console.error(e); + } + } + return value !== undefined && value !== null && value !== '' && value !== false; +}; + +interface UseToolToggleOptions { + conversationId?: string | null; + toolKey: string; + localStorageKey: LocalStorageKeys; + isAuthenticated?: boolean; + setIsDialogOpen?: (open: boolean) => void; + /** Options for auth verification */ + authConfig?: { + toolId: string; + queryOptions?: UseQueryOptions; + }; +} + +export function useToolToggle({ + conversationId, + toolKey, + localStorageKey, + isAuthenticated: externalIsAuthenticated, + setIsDialogOpen, + authConfig, +}: UseToolToggleOptions) { + const key = conversationId ?? Constants.NEW_CONVO; + const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key)); + + const authQuery = useVerifyAgentToolAuth( + { toolId: authConfig?.toolId || '' }, + { + enabled: !!authConfig?.toolId, + ...authConfig?.queryOptions, + }, + ); + + const isAuthenticated = useMemo( + () => + externalIsAuthenticated ?? (authConfig ? (authQuery?.data?.authenticated ?? false) : false), + [externalIsAuthenticated, authConfig, authQuery.data?.authenticated], + ); + + const isToolEnabled = useMemo(() => { + return ephemeralAgent?.[toolKey] ?? false; + }, [ephemeralAgent, toolKey]); + + /** Track previous value to prevent infinite loops */ + const prevIsToolEnabled = useRef(isToolEnabled); + + const [toggleState, setToggleState] = useLocalStorage( + `${localStorageKey}${key}`, + isToolEnabled, + undefined, + storageCondition, + ); + + const [isPinned, setIsPinned] = useLocalStorage(`${localStorageKey}pinned`, false); + + const handleChange = useCallback( + ({ e, isChecked }: { e?: React.ChangeEvent; isChecked: boolean }) => { + if (isAuthenticated !== undefined && !isAuthenticated && setIsDialogOpen) { + setIsDialogOpen(true); + e?.preventDefault?.(); + return; + } + setToggleState(isChecked); + setEphemeralAgent((prev) => ({ + ...prev, + [toolKey]: isChecked, + })); + }, + [setToggleState, setIsDialogOpen, isAuthenticated, setEphemeralAgent, toolKey], + ); + + const debouncedChange = useMemo( + () => debounce(handleChange, 50, { leading: true }), + [handleChange], + ); + + useEffect(() => { + if (prevIsToolEnabled.current !== isToolEnabled) { + setToggleState(isToolEnabled); + } + prevIsToolEnabled.current = isToolEnabled; + }, [isToolEnabled, setToggleState]); + + return { + toggleState, + handleChange, + isToolEnabled, + setToggleState, + ephemeralAgent, + debouncedChange, + setEphemeralAgent, + authData: authQuery?.data, + isPinned, + setIsPinned, + }; +} diff --git a/client/src/hooks/SSE/useSSE.ts b/client/src/hooks/SSE/useSSE.ts index 7faad07e33..9e1cdf1d11 100644 --- a/client/src/hooks/SSE/useSSE.ts +++ b/client/src/hooks/SSE/useSSE.ts @@ -7,10 +7,8 @@ import { Constants, /* @ts-ignore */ createPayload, - isAgentsEndpoint, LocalStorageKeys, removeNullishValues, - isAssistantsEndpoint, } from 'librechat-data-provider'; import type { TMessage, TPayload, TSubmission, EventSubmission } from 'librechat-data-provider'; import type { EventHandlerParams } from './useEventHandlers'; @@ -100,9 +98,7 @@ export default function useSSE( const payloadData = createPayload(submission); let { payload } = payloadData; - if (isAssistantsEndpoint(payload.endpoint) || isAgentsEndpoint(payload.endpoint)) { - payload = removeNullishValues(payload) as TPayload; - } + payload = removeNullishValues(payload) as TPayload; let textIndex = null; diff --git a/e2e/specs/messages.spec.ts b/e2e/specs/messages.spec.ts index c418a6f49f..a19295bcda 100644 --- a/e2e/specs/messages.spec.ts +++ b/e2e/specs/messages.spec.ts @@ -12,9 +12,7 @@ function isUUID(uuid: string) { } const waitForServerStream = async (response: Response) => { - const endpointCheck = - response.url().includes(`/api/ask/${endpoint}`) || - response.url().includes(`/api/edit/${endpoint}`); + const endpointCheck = response.url().includes(`/api/agents`); return endpointCheck && response.status() === 200; }; diff --git a/package-lock.json b/package-lock.json index 0301bdce1b..b9718bb56c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -26085,9 +26085,9 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -27308,10 +27308,11 @@ "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -31075,10 +31076,11 @@ } }, "node_modules/filelist/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -31801,10 +31803,11 @@ "peer": true }, "node_modules/glob/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -43426,9 +43429,10 @@ } }, "node_modules/sucrase/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -46686,10 +46690,11 @@ } }, "packages/data-provider/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } diff --git a/packages/api/src/utils/index.ts b/packages/api/src/utils/index.ts index 7e08343ea8..e2cc1ab51b 100644 --- a/packages/api/src/utils/index.ts +++ b/packages/api/src/utils/index.ts @@ -4,5 +4,6 @@ export * from './common'; export * from './events'; export * from './files'; export * from './generators'; +export * from './llm'; export * from './openid'; export { default as Tokenizer } from './tokenizer'; diff --git a/packages/api/src/utils/llm.test.ts b/packages/api/src/utils/llm.test.ts new file mode 100644 index 0000000000..a7d18e0cf6 --- /dev/null +++ b/packages/api/src/utils/llm.test.ts @@ -0,0 +1,189 @@ +import { extractLibreChatParams } from './llm'; + +describe('extractLibreChatParams', () => { + it('should return defaults when options is undefined', () => { + const result = extractLibreChatParams(undefined); + + expect(result.resendFiles).toBe(true); + expect(result.promptPrefix).toBeUndefined(); + expect(result.maxContextTokens).toBeUndefined(); + expect(result.modelLabel).toBeUndefined(); + expect(result.modelOptions).toEqual({}); + }); + + it('should return defaults when options is null', () => { + const result = extractLibreChatParams(); + + expect(result.resendFiles).toBe(true); + expect(result.promptPrefix).toBeUndefined(); + expect(result.maxContextTokens).toBeUndefined(); + expect(result.modelLabel).toBeUndefined(); + expect(result.modelOptions).toEqual({}); + }); + + it('should extract all LibreChat params and leave model options', () => { + const options = { + resendFiles: false, + promptPrefix: 'You are a helpful assistant', + maxContextTokens: 4096, + modelLabel: 'GPT-4', + model: 'gpt-4', + temperature: 0.7, + max_tokens: 1000, + }; + + const result = extractLibreChatParams(options); + + expect(result.resendFiles).toBe(false); + expect(result.promptPrefix).toBe('You are a helpful assistant'); + expect(result.maxContextTokens).toBe(4096); + expect(result.modelLabel).toBe('GPT-4'); + expect(result.modelOptions).toEqual({ + model: 'gpt-4', + temperature: 0.7, + max_tokens: 1000, + }); + }); + + it('should handle null values for LibreChat params', () => { + const options = { + resendFiles: true, + promptPrefix: null, + maxContextTokens: 2048, + modelLabel: null, + model: 'claude-3', + }; + + const result = extractLibreChatParams(options); + + expect(result.resendFiles).toBe(true); + expect(result.promptPrefix).toBeNull(); + expect(result.maxContextTokens).toBe(2048); + expect(result.modelLabel).toBeNull(); + expect(result.modelOptions).toEqual({ + model: 'claude-3', + }); + }); + + it('should use default for resendFiles when not provided', () => { + const options = { + promptPrefix: 'Test prefix', + model: 'gpt-3.5-turbo', + temperature: 0.5, + }; + + const result = extractLibreChatParams(options); + + expect(result.resendFiles).toBe(true); // Should use default + expect(result.promptPrefix).toBe('Test prefix'); + expect(result.maxContextTokens).toBeUndefined(); + expect(result.modelLabel).toBeUndefined(); + expect(result.modelOptions).toEqual({ + model: 'gpt-3.5-turbo', + temperature: 0.5, + }); + }); + + it('should handle empty options object', () => { + const result = extractLibreChatParams({}); + + expect(result.resendFiles).toBe(true); // Should use default + expect(result.promptPrefix).toBeUndefined(); + expect(result.maxContextTokens).toBeUndefined(); + expect(result.modelLabel).toBeUndefined(); + expect(result.modelOptions).toEqual({}); + }); + + it('should only extract known LibreChat params', () => { + const options = { + resendFiles: false, + promptPrefix: 'Custom prompt', + maxContextTokens: 8192, + modelLabel: 'Custom Model', + // Model options + model: 'gpt-4', + temperature: 0.9, + top_p: 0.95, + frequency_penalty: 0.5, + presence_penalty: 0.5, + // Unknown params should stay in modelOptions + unknownParam: 'should remain', + customSetting: 123, + }; + + const result = extractLibreChatParams(options); + + // LibreChat params extracted + expect(result.resendFiles).toBe(false); + expect(result.promptPrefix).toBe('Custom prompt'); + expect(result.maxContextTokens).toBe(8192); + expect(result.modelLabel).toBe('Custom Model'); + + // Model options should include everything else + expect(result.modelOptions).toEqual({ + model: 'gpt-4', + temperature: 0.9, + top_p: 0.95, + frequency_penalty: 0.5, + presence_penalty: 0.5, + unknownParam: 'should remain', + customSetting: 123, + }); + }); + + it('should not mutate the original options object', () => { + const options = { + resendFiles: false, + promptPrefix: 'Test', + model: 'gpt-4', + temperature: 0.7, + }; + const originalOptions = { ...options }; + + extractLibreChatParams(options); + + // Original object should remain unchanged + expect(options).toEqual(originalOptions); + }); + + it('should handle undefined values for optional LibreChat params', () => { + const options = { + resendFiles: false, + promptPrefix: undefined, + maxContextTokens: undefined, + modelLabel: undefined, + model: 'claude-2', + }; + + const result = extractLibreChatParams(options); + + expect(result.resendFiles).toBe(false); + expect(result.promptPrefix).toBeUndefined(); + expect(result.maxContextTokens).toBeUndefined(); + expect(result.modelLabel).toBeUndefined(); + expect(result.modelOptions).toEqual({ + model: 'claude-2', + }); + }); + + it('should handle mixed null and undefined values', () => { + const options = { + promptPrefix: null, + maxContextTokens: undefined, + modelLabel: null, + model: 'gpt-3.5-turbo', + stop: ['\\n', '\\n\\n'], + }; + + const result = extractLibreChatParams(options); + + expect(result.resendFiles).toBe(true); // default + expect(result.promptPrefix).toBeNull(); + expect(result.maxContextTokens).toBeUndefined(); + expect(result.modelLabel).toBeNull(); + expect(result.modelOptions).toEqual({ + model: 'gpt-3.5-turbo', + stop: ['\\n', '\\n\\n'], + }); + }); +}); diff --git a/packages/api/src/utils/llm.ts b/packages/api/src/utils/llm.ts new file mode 100644 index 0000000000..65475c8059 --- /dev/null +++ b/packages/api/src/utils/llm.ts @@ -0,0 +1,47 @@ +import { librechat } from 'librechat-data-provider'; +import type { DynamicSettingProps } from 'librechat-data-provider'; + +type LibreChatKeys = keyof typeof librechat; + +type LibreChatParams = { + modelOptions: Omit, LibreChatKeys>; + resendFiles: boolean; + promptPrefix?: string | null; + maxContextTokens?: number; + modelLabel?: string | null; +}; + +/** + * Separates LibreChat-specific parameters from model options + * @param options - The combined options object + */ +export function extractLibreChatParams( + options?: DynamicSettingProps['conversation'], +): LibreChatParams { + if (!options) { + return { + modelOptions: {} as Omit, LibreChatKeys>, + resendFiles: librechat.resendFiles.default as boolean, + }; + } + + const modelOptions = { ...options }; + + const resendFiles = + (delete modelOptions.resendFiles, options.resendFiles) ?? + (librechat.resendFiles.default as boolean); + const promptPrefix = (delete modelOptions.promptPrefix, options.promptPrefix); + const maxContextTokens = (delete modelOptions.maxContextTokens, options.maxContextTokens); + const modelLabel = (delete modelOptions.modelLabel, options.modelLabel); + + return { + modelOptions: modelOptions as Omit< + NonNullable, + LibreChatKeys + >, + maxContextTokens, + promptPrefix, + resendFiles, + modelLabel, + }; +} diff --git a/packages/data-provider/src/api-endpoints.ts b/packages/data-provider/src/api-endpoints.ts index 88a9d4a148..3d396b0ba0 100644 --- a/packages/data-provider/src/api-endpoints.ts +++ b/packages/data-provider/src/api-endpoints.ts @@ -70,8 +70,6 @@ export const revokeUserKey = (name: string) => `${keysEndpoint}/${name}`; export const revokeAllUserKeys = () => `${keysEndpoint}?all=true`; -export const abortRequest = (endpoint: string) => `/api/ask/${endpoint}/abort`; - export const conversationsRoot = '/api/convos'; export const conversations = (params: q.ConversationListParams) => { diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index 4d1c95b69f..c18deef1e1 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -940,18 +940,10 @@ export const initialModelsConfig: TModelsConfig = { [EModelEndpoint.bedrock]: defaultModels[EModelEndpoint.bedrock], }; -export const EndpointURLs: { [key in EModelEndpoint]: string } = { - [EModelEndpoint.openAI]: `/api/ask/${EModelEndpoint.openAI}`, - [EModelEndpoint.google]: `/api/ask/${EModelEndpoint.google}`, - [EModelEndpoint.custom]: `/api/ask/${EModelEndpoint.custom}`, - [EModelEndpoint.anthropic]: `/api/ask/${EModelEndpoint.anthropic}`, - [EModelEndpoint.gptPlugins]: `/api/ask/${EModelEndpoint.gptPlugins}`, - [EModelEndpoint.azureOpenAI]: `/api/ask/${EModelEndpoint.azureOpenAI}`, - [EModelEndpoint.chatGPTBrowser]: `/api/ask/${EModelEndpoint.chatGPTBrowser}`, - [EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat', +export const EndpointURLs: Record = { [EModelEndpoint.assistants]: '/api/assistants/v2/chat', + [EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat', [EModelEndpoint.agents]: `/api/${EModelEndpoint.agents}/chat`, - [EModelEndpoint.bedrock]: `/api/${EModelEndpoint.bedrock}/chat`, }; export const modularEndpoints = new Set([ @@ -1451,10 +1443,18 @@ export enum LocalStorageKeys { LAST_CODE_TOGGLE_ = 'LAST_CODE_TOGGLE_', /** Last checked toggle for Web Search per conversation ID */ LAST_WEB_SEARCH_TOGGLE_ = 'LAST_WEB_SEARCH_TOGGLE_', + /** Last checked toggle for File Search per conversation ID */ + LAST_FILE_SEARCH_TOGGLE_ = 'LAST_FILE_SEARCH_TOGGLE_', /** Key for the last selected agent provider */ LAST_AGENT_PROVIDER = 'lastAgentProvider', /** Key for the last selected agent model */ LAST_AGENT_MODEL = 'lastAgentModel', + /** Pin state for MCP tools per conversation ID */ + PIN_MCP_ = 'PIN_MCP_', + /** Pin state for Web Search per conversation ID */ + PIN_WEB_SEARCH_ = 'PIN_WEB_SEARCH_', + /** Pin state for Code Interpreter per conversation ID */ + PIN_CODE_INTERPRETER_ = 'PIN_CODE_INTERPRETER_', } export enum ForkOptions { diff --git a/packages/data-provider/src/createPayload.ts b/packages/data-provider/src/createPayload.ts index 1640d877d8..d00ac45ffc 100644 --- a/packages/data-provider/src/createPayload.ts +++ b/packages/data-provider/src/createPayload.ts @@ -13,27 +13,23 @@ export default function createPayload(submission: t.TSubmission) { ephemeralAgent, } = submission; const { conversationId } = s.tConvoUpdateSchema.parse(conversation); - const { endpoint: _e, endpointType } = endpointOption as { + const { endpoint: _e } = endpointOption as { endpoint: s.EModelEndpoint; endpointType?: s.EModelEndpoint; }; + const endpoint = _e as s.EModelEndpoint; - let server = EndpointURLs[endpointType ?? endpoint]; - const isEphemeral = s.isEphemeralAgent(endpoint, ephemeralAgent); + let server = `${EndpointURLs[s.EModelEndpoint.agents]}/${endpoint}`; if (isEdited && s.isAssistantsEndpoint(endpoint)) { server += '/modify'; - } else if (isEdited) { - server = server.replace('/ask/', '/edit/'); - } else if (isEphemeral) { - server = `${EndpointURLs[s.EModelEndpoint.agents]}/${endpoint}`; } const payload: t.TPayload = { ...userMessage, ...endpointOption, endpoint, - ephemeralAgent: isEphemeral ? ephemeralAgent : undefined, + ephemeralAgent: s.isAssistantsEndpoint(endpoint) ? undefined : ephemeralAgent, isContinued: !!(isEdited && isContinued), conversationId, isTemporary, diff --git a/packages/data-provider/src/data-service.ts b/packages/data-provider/src/data-service.ts index 08a666dd78..c76efbac87 100644 --- a/packages/data-provider/src/data-service.ts +++ b/packages/data-provider/src/data-service.ts @@ -11,14 +11,6 @@ import request from './request'; import * as s from './schemas'; import * as r from './roles'; -export function abortRequestWithMessage( - endpoint: string, - abortKey: string, - message: string, -): Promise { - return request.post(endpoints.abortRequest(endpoint), { arg: { abortKey, message } }); -} - export function revokeUserKey(name: string): Promise { return request.delete(endpoints.revokeUserKey(name)); } diff --git a/packages/data-provider/src/parameterSettings.ts b/packages/data-provider/src/parameterSettings.ts index 1098bbbb79..af79eb2c1f 100644 --- a/packages/data-provider/src/parameterSettings.ts +++ b/packages/data-provider/src/parameterSettings.ts @@ -83,7 +83,7 @@ const createDefinition = ( return { ...base, ...overrides } as SettingDefinition; }; -const librechat: Record = { +export const librechat = { modelLabel: { key: 'modelLabel', label: 'com_endpoint_custom_name', @@ -94,7 +94,7 @@ const librechat: Record = { placeholder: 'com_endpoint_openai_custom_name_placeholder', placeholderCode: true, optionType: 'conversation', - }, + } as const, maxContextTokens: { key: 'maxContextTokens', label: 'com_endpoint_context_tokens', @@ -107,7 +107,7 @@ const librechat: Record = { descriptionCode: true, optionType: 'model', columnSpan: 2, - }, + } as const, resendFiles: { key: 'resendFiles', label: 'com_endpoint_plug_resend_files', @@ -120,7 +120,7 @@ const librechat: Record = { optionType: 'conversation', showDefault: false, columnSpan: 2, - }, + } as const, promptPrefix: { key: 'promptPrefix', label: 'com_endpoint_prompt_prefix', @@ -131,7 +131,7 @@ const librechat: Record = { placeholder: 'com_endpoint_openai_prompt_prefix_placeholder', placeholderCode: true, optionType: 'model', - }, + } as const, }; const openAIParams: Record = { diff --git a/packages/data-provider/src/parsers.ts b/packages/data-provider/src/parsers.ts index 1c3eb511d0..774bc56173 100644 --- a/packages/data-provider/src/parsers.ts +++ b/packages/data-provider/src/parsers.ts @@ -275,15 +275,11 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string => if (endpoint === EModelEndpoint.google) { if (modelLabel) { return modelLabel; - } else if (model && (model.includes('gemini') || model.includes('learnlm'))) { - return 'Gemini'; } else if (model?.toLowerCase().includes('gemma') === true) { return 'Gemma'; - } else if (model && model.includes('code')) { - return 'Codey'; } - return 'PaLM2'; + return 'Gemini'; } if (endpoint === EModelEndpoint.custom || endpointType === EModelEndpoint.custom) { diff --git a/packages/data-provider/src/react-query/react-query-service.ts b/packages/data-provider/src/react-query/react-query-service.ts index 6e33f7ce53..7e3fe549a8 100644 --- a/packages/data-provider/src/react-query/react-query-service.ts +++ b/packages/data-provider/src/react-query/react-query-service.ts @@ -12,23 +12,6 @@ import { QueryKeys } from '../keys'; import * as s from '../schemas'; import * as t from '../types'; -export const useAbortRequestWithMessage = (): UseMutationResult< - void, - Error, - { endpoint: string; abortKey: string; message: string } -> => { - const queryClient = useQueryClient(); - return useMutation( - ({ endpoint, abortKey, message }) => - dataService.abortRequestWithMessage(endpoint, abortKey, message), - { - onSuccess: () => { - queryClient.invalidateQueries([QueryKeys.balance]); - }, - }, - ); -}; - export const useGetSharedMessages = ( shareId: string, config?: UseQueryOptions, diff --git a/packages/data-provider/src/schemas.ts b/packages/data-provider/src/schemas.ts index 149b10062c..89048cf158 100644 --- a/packages/data-provider/src/schemas.ts +++ b/packages/data-provider/src/schemas.ts @@ -3,7 +3,6 @@ import { Tools } from './types/assistants'; import type { TMessageContentParts, FunctionTool, FunctionToolCall } from './types/assistants'; import { TFeedback, feedbackSchema } from './feedback'; import type { SearchResultData } from './types/web'; -import type { TEphemeralAgent } from './types'; import type { TFile } from './types/files'; export const isUUID = z.string().uuid(); @@ -91,22 +90,6 @@ export const isAgentsEndpoint = (_endpoint?: EModelEndpoint.agents | null | stri return endpoint === EModelEndpoint.agents; }; -export const isEphemeralAgent = ( - endpoint?: EModelEndpoint.agents | null | string, - ephemeralAgent?: TEphemeralAgent | null, -) => { - if (!ephemeralAgent) { - return false; - } - if (isAgentsEndpoint(endpoint)) { - return false; - } - const hasMCPSelected = (ephemeralAgent?.mcp?.length ?? 0) > 0; - const hasCodeSelected = (ephemeralAgent?.execute_code ?? false) === true; - const hasSearchSelected = (ephemeralAgent?.web_search ?? false) === true; - return hasMCPSelected || hasCodeSelected || hasSearchSelected; -}; - export const isParamEndpoint = ( endpoint: EModelEndpoint | string, endpointType?: EModelEndpoint | string, diff --git a/packages/data-provider/src/types.ts b/packages/data-provider/src/types.ts index 275c405c1a..469c378aba 100644 --- a/packages/data-provider/src/types.ts +++ b/packages/data-provider/src/types.ts @@ -98,6 +98,7 @@ export type TEndpointOption = Pick< export type TEphemeralAgent = { mcp?: string[]; web_search?: boolean; + file_search?: boolean; execute_code?: boolean; }; From a058963a9fce44c2757aada2a6cdbadc1b0d5e91 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Mon, 23 Jun 2025 12:39:27 -0400 Subject: [PATCH 02/65] =?UTF-8?q?=F0=9F=91=A4=20feat:=20User=20Placeholder?= =?UTF-8?q?=20Variables=20for=20Custom=20Endpoint=20Headers=20(#7993)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🔧 refactor: move `processMCPEnv` from `librechat-data-provider` and move to `@librechat/api` * 🔧 refactor: Update resolveHeaders import paths * 🔧 refactor: Enhance resolveHeaders to support user and custom variables - Updated resolveHeaders function to accept user and custom user variables for placeholder replacement. - Modified header resolution in multiple client and controller files to utilize the enhanced resolveHeaders functionality. - Added comprehensive tests for resolveHeaders to ensure correct processing of user and custom variables. * 🔧 fix: Update user ID placeholder processing in env.ts * 🔧 fix: Remove arguments passing this.user rather than req.user - Updated multiple client and controller files to call resolveHeaders without the user parameter * 🔧 refactor: Enhance processUserPlaceholders to be more readable / less nested * 🔧 refactor: Update processUserPlaceholders to pass all tests in mpc.spec.ts and env.spec.ts * chore: remove legacy ChatGPTClient * chore: remove LLM initialization code * chore: initial deprecation removal of `gptPlugins` * chore: remove cohere-ai dependency from package.json and package-lock.json * chore: update brace-expansion to version 2.0.2 and add license information * chore: remove PluginsClient test file * chore: remove legacy * ci: remove deprecated sendMessage/getCompletion/chatCompletion tests --------- Co-authored-by: Dustin Healy <54083382+dustinhealy@users.noreply.github.com> --- .env.example | 2 +- api/app/clients/ChatGPTClient.js | 804 ------------------ api/app/clients/OpenAIClient.js | 22 +- api/app/clients/PluginsClient.js | 542 ------------ api/app/clients/index.js | 4 - api/app/clients/specs/OpenAIClient.test.js | 38 - api/app/clients/specs/PluginsClient.test.js | 314 ------- api/package.json | 1 - api/server/cleanup.js | 3 - api/server/controllers/agents/llm.js | 106 --- api/server/middleware/buildEndpointOption.js | 11 - api/server/routes/edit/gptPlugins.js | 207 ----- api/server/routes/edit/index.js | 2 - .../Endpoints/azureAssistants/initialize.js | 22 +- .../services/Endpoints/gptPlugins/build.js | 41 - .../services/Endpoints/gptPlugins/index.js | 7 - .../Endpoints/gptPlugins/initialize.js | 134 --- .../Endpoints/gptPlugins/initialize.spec.js | 410 --------- .../services/Endpoints/openAI/initialize.js | 13 +- api/server/services/initializeMCP.js | 5 +- api/typedefs.js | 1 - package-lock.json | 26 +- .../api/src/endpoints/openai/initialize.ts | 13 +- packages/api/src/mcp/manager.ts | 17 +- .../specs => api/src/mcp}/mcp.spec.ts | 10 +- packages/api/src/utils/env.spec.ts | 317 +++++++ packages/api/src/utils/env.ts | 170 ++++ packages/api/src/utils/index.ts | 1 + packages/data-provider/src/mcp.ts | 130 +-- packages/data-provider/src/parsers.ts | 13 - 30 files changed, 542 insertions(+), 2844 deletions(-) delete mode 100644 api/app/clients/ChatGPTClient.js delete mode 100644 api/app/clients/PluginsClient.js delete mode 100644 api/app/clients/specs/PluginsClient.test.js delete mode 100644 api/server/controllers/agents/llm.js delete mode 100644 api/server/routes/edit/gptPlugins.js delete mode 100644 api/server/services/Endpoints/gptPlugins/build.js delete mode 100644 api/server/services/Endpoints/gptPlugins/index.js delete mode 100644 api/server/services/Endpoints/gptPlugins/initialize.js delete mode 100644 api/server/services/Endpoints/gptPlugins/initialize.spec.js rename packages/{data-provider/specs => api/src/mcp}/mcp.spec.ts (99%) create mode 100644 packages/api/src/utils/env.spec.ts create mode 100644 packages/api/src/utils/env.ts diff --git a/.env.example b/.env.example index 876535b345..086938043b 100644 --- a/.env.example +++ b/.env.example @@ -58,7 +58,7 @@ DEBUG_CONSOLE=false # Endpoints # #===================================================# -# ENDPOINTS=openAI,assistants,azureOpenAI,google,gptPlugins,anthropic +# ENDPOINTS=openAI,assistants,azureOpenAI,google,anthropic PROXY= diff --git a/api/app/clients/ChatGPTClient.js b/api/app/clients/ChatGPTClient.js deleted file mode 100644 index 555028dc3f..0000000000 --- a/api/app/clients/ChatGPTClient.js +++ /dev/null @@ -1,804 +0,0 @@ -const { Keyv } = require('keyv'); -const crypto = require('crypto'); -const { CohereClient } = require('cohere-ai'); -const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source'); -const { constructAzureURL, genAzureChatCompletion } = require('@librechat/api'); -const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken'); -const { - ImageDetail, - EModelEndpoint, - resolveHeaders, - CohereConstants, - mapModelToAzureConfig, -} = require('librechat-data-provider'); -const { createContextHandlers } = require('./prompts'); -const { createCoherePayload } = require('./llm'); -const { extractBaseURL } = require('~/utils'); -const BaseClient = require('./BaseClient'); -const { logger } = require('~/config'); - -const CHATGPT_MODEL = 'gpt-3.5-turbo'; -const tokenizersCache = {}; - -class ChatGPTClient extends BaseClient { - constructor(apiKey, options = {}, cacheOptions = {}) { - super(apiKey, options, cacheOptions); - - cacheOptions.namespace = cacheOptions.namespace || 'chatgpt'; - this.conversationsCache = new Keyv(cacheOptions); - this.setOptions(options); - } - - setOptions(options) { - if (this.options && !this.options.replaceOptions) { - // nested options aren't spread properly, so we need to do this manually - this.options.modelOptions = { - ...this.options.modelOptions, - ...options.modelOptions, - }; - delete options.modelOptions; - // now we can merge options - this.options = { - ...this.options, - ...options, - }; - } else { - this.options = options; - } - - if (this.options.openaiApiKey) { - this.apiKey = this.options.openaiApiKey; - } - - const modelOptions = this.options.modelOptions || {}; - this.modelOptions = { - ...modelOptions, - // set some good defaults (check for undefined in some cases because they may be 0) - model: modelOptions.model || CHATGPT_MODEL, - temperature: typeof modelOptions.temperature === 'undefined' ? 0.8 : modelOptions.temperature, - top_p: typeof modelOptions.top_p === 'undefined' ? 1 : modelOptions.top_p, - presence_penalty: - typeof modelOptions.presence_penalty === 'undefined' ? 1 : modelOptions.presence_penalty, - stop: modelOptions.stop, - }; - - this.isChatGptModel = this.modelOptions.model.includes('gpt-'); - const { isChatGptModel } = this; - this.isUnofficialChatGptModel = - this.modelOptions.model.startsWith('text-chat') || - this.modelOptions.model.startsWith('text-davinci-002-render'); - const { isUnofficialChatGptModel } = this; - - // Davinci models have a max context length of 4097 tokens. - this.maxContextTokens = this.options.maxContextTokens || (isChatGptModel ? 4095 : 4097); - // I decided to reserve 1024 tokens for the response. - // The max prompt tokens is determined by the max context tokens minus the max response tokens. - // Earlier messages will be dropped until the prompt is within the limit. - this.maxResponseTokens = this.modelOptions.max_tokens || 1024; - this.maxPromptTokens = - this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens; - - if (this.maxPromptTokens + this.maxResponseTokens > this.maxContextTokens) { - throw new Error( - `maxPromptTokens + max_tokens (${this.maxPromptTokens} + ${this.maxResponseTokens} = ${ - this.maxPromptTokens + this.maxResponseTokens - }) must be less than or equal to maxContextTokens (${this.maxContextTokens})`, - ); - } - - this.userLabel = this.options.userLabel || 'User'; - this.chatGptLabel = this.options.chatGptLabel || 'ChatGPT'; - - if (isChatGptModel) { - // Use these faux tokens to help the AI understand the context since we are building the chat log ourselves. - // Trying to use "<|im_start|>" causes the AI to still generate "<" or "<|" at the end sometimes for some reason, - // without tripping the stop sequences, so I'm using "||>" instead. - this.startToken = '||>'; - this.endToken = ''; - this.gptEncoder = this.constructor.getTokenizer('cl100k_base'); - } else if (isUnofficialChatGptModel) { - this.startToken = '<|im_start|>'; - this.endToken = '<|im_end|>'; - this.gptEncoder = this.constructor.getTokenizer('text-davinci-003', true, { - '<|im_start|>': 100264, - '<|im_end|>': 100265, - }); - } else { - // Previously I was trying to use "<|endoftext|>" but there seems to be some bug with OpenAI's token counting - // system that causes only the first "<|endoftext|>" to be counted as 1 token, and the rest are not treated - // as a single token. So we're using this instead. - this.startToken = '||>'; - this.endToken = ''; - try { - this.gptEncoder = this.constructor.getTokenizer(this.modelOptions.model, true); - } catch { - this.gptEncoder = this.constructor.getTokenizer('text-davinci-003', true); - } - } - - if (!this.modelOptions.stop) { - const stopTokens = [this.startToken]; - if (this.endToken && this.endToken !== this.startToken) { - stopTokens.push(this.endToken); - } - stopTokens.push(`\n${this.userLabel}:`); - stopTokens.push('<|diff_marker|>'); - // I chose not to do one for `chatGptLabel` because I've never seen it happen - this.modelOptions.stop = stopTokens; - } - - if (this.options.reverseProxyUrl) { - this.completionsUrl = this.options.reverseProxyUrl; - } else if (isChatGptModel) { - this.completionsUrl = 'https://api.openai.com/v1/chat/completions'; - } else { - this.completionsUrl = 'https://api.openai.com/v1/completions'; - } - - return this; - } - - static getTokenizer(encoding, isModelName = false, extendSpecialTokens = {}) { - if (tokenizersCache[encoding]) { - return tokenizersCache[encoding]; - } - let tokenizer; - if (isModelName) { - tokenizer = encodingForModel(encoding, extendSpecialTokens); - } else { - tokenizer = getEncoding(encoding, extendSpecialTokens); - } - tokenizersCache[encoding] = tokenizer; - return tokenizer; - } - - /** @type {getCompletion} */ - async getCompletion(input, onProgress, onTokenProgress, abortController = null) { - if (!abortController) { - abortController = new AbortController(); - } - - let modelOptions = { ...this.modelOptions }; - if (typeof onProgress === 'function') { - modelOptions.stream = true; - } - if (this.isChatGptModel) { - modelOptions.messages = input; - } else { - modelOptions.prompt = input; - } - - if (this.useOpenRouter && modelOptions.prompt) { - delete modelOptions.stop; - } - - const { debug } = this.options; - let baseURL = this.completionsUrl; - if (debug) { - console.debug(); - console.debug(baseURL); - console.debug(modelOptions); - console.debug(); - } - - const opts = { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - }; - - if (this.isVisionModel) { - modelOptions.max_tokens = 4000; - } - - /** @type {TAzureConfig | undefined} */ - const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI]; - - const isAzure = this.azure || this.options.azure; - if ( - (isAzure && this.isVisionModel && azureConfig) || - (azureConfig && this.isVisionModel && this.options.endpoint === EModelEndpoint.azureOpenAI) - ) { - const { modelGroupMap, groupMap } = azureConfig; - const { - azureOptions, - baseURL, - headers = {}, - serverless, - } = mapModelToAzureConfig({ - modelName: modelOptions.model, - modelGroupMap, - groupMap, - }); - opts.headers = resolveHeaders(headers); - this.langchainProxy = extractBaseURL(baseURL); - this.apiKey = azureOptions.azureOpenAIApiKey; - - const groupName = modelGroupMap[modelOptions.model].group; - this.options.addParams = azureConfig.groupMap[groupName].addParams; - this.options.dropParams = azureConfig.groupMap[groupName].dropParams; - // Note: `forcePrompt` not re-assigned as only chat models are vision models - - this.azure = !serverless && azureOptions; - this.azureEndpoint = - !serverless && genAzureChatCompletion(this.azure, modelOptions.model, this); - if (serverless === true) { - this.options.defaultQuery = azureOptions.azureOpenAIApiVersion - ? { 'api-version': azureOptions.azureOpenAIApiVersion } - : undefined; - this.options.headers['api-key'] = this.apiKey; - } - } - - if (this.options.defaultQuery) { - opts.defaultQuery = this.options.defaultQuery; - } - - if (this.options.headers) { - opts.headers = { ...opts.headers, ...this.options.headers }; - } - - if (isAzure) { - // Azure does not accept `model` in the body, so we need to remove it. - delete modelOptions.model; - - baseURL = this.langchainProxy - ? constructAzureURL({ - baseURL: this.langchainProxy, - azureOptions: this.azure, - }) - : this.azureEndpoint.split(/(? msg.role === 'system'); - - if (systemMessageIndex > 0) { - const [systemMessage] = messages.splice(systemMessageIndex, 1); - messages.unshift(systemMessage); - } - - modelOptions.messages = messages; - - if (messages.length === 1 && messages[0].role === 'system') { - modelOptions.messages[0].role = 'user'; - } - } - - if (this.options.addParams && typeof this.options.addParams === 'object') { - modelOptions = { - ...modelOptions, - ...this.options.addParams, - }; - logger.debug('[ChatGPTClient] chatCompletion: added params', { - addParams: this.options.addParams, - modelOptions, - }); - } - - if (this.options.dropParams && Array.isArray(this.options.dropParams)) { - this.options.dropParams.forEach((param) => { - delete modelOptions[param]; - }); - logger.debug('[ChatGPTClient] chatCompletion: dropped params', { - dropParams: this.options.dropParams, - modelOptions, - }); - } - - if (baseURL.startsWith(CohereConstants.API_URL)) { - const payload = createCoherePayload({ modelOptions }); - return await this.cohereChatCompletion({ payload, onTokenProgress }); - } - - if (baseURL.includes('v1') && !baseURL.includes('/completions') && !this.isChatCompletion) { - baseURL = baseURL.split('v1')[0] + 'v1/completions'; - } else if ( - baseURL.includes('v1') && - !baseURL.includes('/chat/completions') && - this.isChatCompletion - ) { - baseURL = baseURL.split('v1')[0] + 'v1/chat/completions'; - } - - const BASE_URL = new URL(baseURL); - if (opts.defaultQuery) { - Object.entries(opts.defaultQuery).forEach(([key, value]) => { - BASE_URL.searchParams.append(key, value); - }); - delete opts.defaultQuery; - } - - const completionsURL = BASE_URL.toString(); - opts.body = JSON.stringify(modelOptions); - - if (modelOptions.stream) { - return new Promise(async (resolve, reject) => { - try { - let done = false; - await fetchEventSource(completionsURL, { - ...opts, - signal: abortController.signal, - async onopen(response) { - if (response.status === 200) { - return; - } - if (debug) { - console.debug(response); - } - let error; - try { - const body = await response.text(); - error = new Error(`Failed to send message. HTTP ${response.status} - ${body}`); - error.status = response.status; - error.json = JSON.parse(body); - } catch { - error = error || new Error(`Failed to send message. HTTP ${response.status}`); - } - throw error; - }, - onclose() { - if (debug) { - console.debug('Server closed the connection unexpectedly, returning...'); - } - // workaround for private API not sending [DONE] event - if (!done) { - onProgress('[DONE]'); - resolve(); - } - }, - onerror(err) { - if (debug) { - console.debug(err); - } - // rethrow to stop the operation - throw err; - }, - onmessage(message) { - if (debug) { - console.debug(message); - } - if (!message.data || message.event === 'ping') { - return; - } - if (message.data === '[DONE]') { - onProgress('[DONE]'); - resolve(); - done = true; - return; - } - onProgress(JSON.parse(message.data)); - }, - }); - } catch (err) { - reject(err); - } - }); - } - const response = await fetch(completionsURL, { - ...opts, - signal: abortController.signal, - }); - if (response.status !== 200) { - const body = await response.text(); - const error = new Error(`Failed to send message. HTTP ${response.status} - ${body}`); - error.status = response.status; - try { - error.json = JSON.parse(body); - } catch { - error.body = body; - } - throw error; - } - return response.json(); - } - - /** @type {cohereChatCompletion} */ - async cohereChatCompletion({ payload, onTokenProgress }) { - const cohere = new CohereClient({ - token: this.apiKey, - environment: this.completionsUrl, - }); - - if (!payload.stream) { - const chatResponse = await cohere.chat(payload); - return chatResponse.text; - } - - const chatStream = await cohere.chatStream(payload); - let reply = ''; - for await (const message of chatStream) { - if (!message) { - continue; - } - - if (message.eventType === 'text-generation' && message.text) { - onTokenProgress(message.text); - reply += message.text; - } - /* - Cohere API Chinese Unicode character replacement hotfix. - Should be un-commented when the following issue is resolved: - https://github.com/cohere-ai/cohere-typescript/issues/151 - - else if (message.eventType === 'stream-end' && message.response) { - reply = message.response.text; - } - */ - } - - return reply; - } - - async generateTitle(userMessage, botMessage) { - const instructionsPayload = { - role: 'system', - content: `Write an extremely concise subtitle for this conversation with no more than a few words. All words should be capitalized. Exclude punctuation. - -||>Message: -${userMessage.message} -||>Response: -${botMessage.message} - -||>Title:`, - }; - - const titleGenClientOptions = JSON.parse(JSON.stringify(this.options)); - titleGenClientOptions.modelOptions = { - model: 'gpt-3.5-turbo', - temperature: 0, - presence_penalty: 0, - frequency_penalty: 0, - }; - const titleGenClient = new ChatGPTClient(this.apiKey, titleGenClientOptions); - const result = await titleGenClient.getCompletion([instructionsPayload], null); - // remove any non-alphanumeric characters, replace multiple spaces with 1, and then trim - return result.choices[0].message.content - .replace(/[^a-zA-Z0-9' ]/g, '') - .replace(/\s+/g, ' ') - .trim(); - } - - async sendMessage(message, opts = {}) { - if (opts.clientOptions && typeof opts.clientOptions === 'object') { - this.setOptions(opts.clientOptions); - } - - const conversationId = opts.conversationId || crypto.randomUUID(); - const parentMessageId = opts.parentMessageId || crypto.randomUUID(); - - let conversation = - typeof opts.conversation === 'object' - ? opts.conversation - : await this.conversationsCache.get(conversationId); - - let isNewConversation = false; - if (!conversation) { - conversation = { - messages: [], - createdAt: Date.now(), - }; - isNewConversation = true; - } - - const shouldGenerateTitle = opts.shouldGenerateTitle && isNewConversation; - - const userMessage = { - id: crypto.randomUUID(), - parentMessageId, - role: 'User', - message, - }; - conversation.messages.push(userMessage); - - // Doing it this way instead of having each message be a separate element in the array seems to be more reliable, - // especially when it comes to keeping the AI in character. It also seems to improve coherency and context retention. - const { prompt: payload, context } = await this.buildPrompt( - conversation.messages, - userMessage.id, - { - isChatGptModel: this.isChatGptModel, - promptPrefix: opts.promptPrefix, - }, - ); - - if (this.options.keepNecessaryMessagesOnly) { - conversation.messages = context; - } - - let reply = ''; - let result = null; - if (typeof opts.onProgress === 'function') { - await this.getCompletion( - payload, - (progressMessage) => { - if (progressMessage === '[DONE]') { - return; - } - const token = this.isChatGptModel - ? progressMessage.choices[0].delta.content - : progressMessage.choices[0].text; - // first event's delta content is always undefined - if (!token) { - return; - } - if (this.options.debug) { - console.debug(token); - } - if (token === this.endToken) { - return; - } - opts.onProgress(token); - reply += token; - }, - opts.abortController || new AbortController(), - ); - } else { - result = await this.getCompletion( - payload, - null, - opts.abortController || new AbortController(), - ); - if (this.options.debug) { - console.debug(JSON.stringify(result)); - } - if (this.isChatGptModel) { - reply = result.choices[0].message.content; - } else { - reply = result.choices[0].text.replace(this.endToken, ''); - } - } - - // avoids some rendering issues when using the CLI app - if (this.options.debug) { - console.debug(); - } - - reply = reply.trim(); - - const replyMessage = { - id: crypto.randomUUID(), - parentMessageId: userMessage.id, - role: 'ChatGPT', - message: reply, - }; - conversation.messages.push(replyMessage); - - const returnData = { - response: replyMessage.message, - conversationId, - parentMessageId: replyMessage.parentMessageId, - messageId: replyMessage.id, - details: result || {}, - }; - - if (shouldGenerateTitle) { - conversation.title = await this.generateTitle(userMessage, replyMessage); - returnData.title = conversation.title; - } - - await this.conversationsCache.set(conversationId, conversation); - - if (this.options.returnConversation) { - returnData.conversation = conversation; - } - - return returnData; - } - - async buildPrompt(messages, { isChatGptModel = false, promptPrefix = null }) { - promptPrefix = (promptPrefix || this.options.promptPrefix || '').trim(); - - // Handle attachments and create augmentedPrompt - if (this.options.attachments) { - const attachments = await this.options.attachments; - const lastMessage = messages[messages.length - 1]; - - if (this.message_file_map) { - this.message_file_map[lastMessage.messageId] = attachments; - } else { - this.message_file_map = { - [lastMessage.messageId]: attachments, - }; - } - - const files = await this.addImageURLs(lastMessage, attachments); - this.options.attachments = files; - - this.contextHandlers = createContextHandlers(this.options.req, lastMessage.text); - } - - if (this.message_file_map) { - this.contextHandlers = createContextHandlers( - this.options.req, - messages[messages.length - 1].text, - ); - } - - // Calculate image token cost and process embedded files - messages.forEach((message, i) => { - if (this.message_file_map && this.message_file_map[message.messageId]) { - const attachments = this.message_file_map[message.messageId]; - for (const file of attachments) { - if (file.embedded) { - this.contextHandlers?.processFile(file); - continue; - } - - messages[i].tokenCount = - (messages[i].tokenCount || 0) + - this.calculateImageTokenCost({ - width: file.width, - height: file.height, - detail: this.options.imageDetail ?? ImageDetail.auto, - }); - } - } - }); - - if (this.contextHandlers) { - this.augmentedPrompt = await this.contextHandlers.createContext(); - promptPrefix = this.augmentedPrompt + promptPrefix; - } - - if (promptPrefix) { - // If the prompt prefix doesn't end with the end token, add it. - if (!promptPrefix.endsWith(`${this.endToken}`)) { - promptPrefix = `${promptPrefix.trim()}${this.endToken}\n\n`; - } - promptPrefix = `${this.startToken}Instructions:\n${promptPrefix}`; - } - const promptSuffix = `${this.startToken}${this.chatGptLabel}:\n`; // Prompt ChatGPT to respond. - - const instructionsPayload = { - role: 'system', - content: promptPrefix, - }; - - const messagePayload = { - role: 'system', - content: promptSuffix, - }; - - let currentTokenCount; - if (isChatGptModel) { - currentTokenCount = - this.getTokenCountForMessage(instructionsPayload) + - this.getTokenCountForMessage(messagePayload); - } else { - currentTokenCount = this.getTokenCount(`${promptPrefix}${promptSuffix}`); - } - let promptBody = ''; - const maxTokenCount = this.maxPromptTokens; - - const context = []; - - // Iterate backwards through the messages, adding them to the prompt until we reach the max token count. - // Do this within a recursive async function so that it doesn't block the event loop for too long. - const buildPromptBody = async () => { - if (currentTokenCount < maxTokenCount && messages.length > 0) { - const message = messages.pop(); - const roleLabel = - message?.isCreatedByUser || message?.role?.toLowerCase() === 'user' - ? this.userLabel - : this.chatGptLabel; - const messageString = `${this.startToken}${roleLabel}:\n${ - message?.text ?? message?.message - }${this.endToken}\n`; - let newPromptBody; - if (promptBody || isChatGptModel) { - newPromptBody = `${messageString}${promptBody}`; - } else { - // Always insert prompt prefix before the last user message, if not gpt-3.5-turbo. - // This makes the AI obey the prompt instructions better, which is important for custom instructions. - // After a bunch of testing, it doesn't seem to cause the AI any confusion, even if you ask it things - // like "what's the last thing I wrote?". - newPromptBody = `${promptPrefix}${messageString}${promptBody}`; - } - - context.unshift(message); - - const tokenCountForMessage = this.getTokenCount(messageString); - const newTokenCount = currentTokenCount + tokenCountForMessage; - if (newTokenCount > maxTokenCount) { - if (promptBody) { - // This message would put us over the token limit, so don't add it. - return false; - } - // This is the first message, so we can't add it. Just throw an error. - throw new Error( - `Prompt is too long. Max token count is ${maxTokenCount}, but prompt is ${newTokenCount} tokens long.`, - ); - } - promptBody = newPromptBody; - currentTokenCount = newTokenCount; - // wait for next tick to avoid blocking the event loop - await new Promise((resolve) => setImmediate(resolve)); - return buildPromptBody(); - } - return true; - }; - - await buildPromptBody(); - - const prompt = `${promptBody}${promptSuffix}`; - if (isChatGptModel) { - messagePayload.content = prompt; - // Add 3 tokens for Assistant Label priming after all messages have been counted. - currentTokenCount += 3; - } - - // Use up to `this.maxContextTokens` tokens (prompt + response), but try to leave `this.maxTokens` tokens for the response. - this.modelOptions.max_tokens = Math.min( - this.maxContextTokens - currentTokenCount, - this.maxResponseTokens, - ); - - if (isChatGptModel) { - return { prompt: [instructionsPayload, messagePayload], context }; - } - return { prompt, context, promptTokens: currentTokenCount }; - } - - getTokenCount(text) { - return this.gptEncoder.encode(text, 'all').length; - } - - /** - * Algorithm adapted from "6. Counting tokens for chat API calls" of - * https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb - * - * An additional 3 tokens need to be added for assistant label priming after all messages have been counted. - * - * @param {Object} message - */ - getTokenCountForMessage(message) { - // Note: gpt-3.5-turbo and gpt-4 may update over time. Use default for these as well as for unknown models - let tokensPerMessage = 3; - let tokensPerName = 1; - - if (this.modelOptions.model === 'gpt-3.5-turbo-0301') { - tokensPerMessage = 4; - tokensPerName = -1; - } - - let numTokens = tokensPerMessage; - for (let [key, value] of Object.entries(message)) { - numTokens += this.getTokenCount(value); - if (key === 'name') { - numTokens += tokensPerName; - } - } - - return numTokens; - } -} - -module.exports = ChatGPTClient; diff --git a/api/app/clients/OpenAIClient.js b/api/app/clients/OpenAIClient.js index 2d4146bd9c..2eda322640 100644 --- a/api/app/clients/OpenAIClient.js +++ b/api/app/clients/OpenAIClient.js @@ -5,6 +5,7 @@ const { isEnabled, Tokenizer, createFetch, + resolveHeaders, constructAzureURL, genAzureChatCompletion, createStreamEventHandlers, @@ -15,7 +16,6 @@ const { ContentTypes, parseTextParts, EModelEndpoint, - resolveHeaders, KnownEndpoints, openAISettings, ImageDetailCost, @@ -37,7 +37,6 @@ const { addSpaceIfNeeded, sleep } = require('~/server/utils'); const { spendTokens } = require('~/models/spendTokens'); const { handleOpenAIErrors } = require('./tools/util'); const { createLLM, RunManager } = require('./llm'); -const ChatGPTClient = require('./ChatGPTClient'); const { summaryBuffer } = require('./memory'); const { runTitleChain } = require('./chains'); const { tokenSplit } = require('./document'); @@ -47,12 +46,6 @@ const { logger } = require('~/config'); class OpenAIClient extends BaseClient { constructor(apiKey, options = {}) { super(apiKey, options); - this.ChatGPTClient = new ChatGPTClient(); - this.buildPrompt = this.ChatGPTClient.buildPrompt.bind(this); - /** @type {getCompletion} */ - this.getCompletion = this.ChatGPTClient.getCompletion.bind(this); - /** @type {cohereChatCompletion} */ - this.cohereChatCompletion = this.ChatGPTClient.cohereChatCompletion.bind(this); this.contextStrategy = options.contextStrategy ? options.contextStrategy.toLowerCase() : 'discard'; @@ -379,23 +372,12 @@ class OpenAIClient extends BaseClient { return files; } - async buildMessages( - messages, - parentMessageId, - { isChatCompletion = false, promptPrefix = null }, - opts, - ) { + async buildMessages(messages, parentMessageId, { promptPrefix = null }, opts) { let orderedMessages = this.constructor.getMessagesForConversation({ messages, parentMessageId, summary: this.shouldSummarize, }); - if (!isChatCompletion) { - return await this.buildPrompt(orderedMessages, { - isChatGptModel: isChatCompletion, - promptPrefix, - }); - } let payload; let instructions; diff --git a/api/app/clients/PluginsClient.js b/api/app/clients/PluginsClient.js deleted file mode 100644 index d0ffe2ef75..0000000000 --- a/api/app/clients/PluginsClient.js +++ /dev/null @@ -1,542 +0,0 @@ -const OpenAIClient = require('./OpenAIClient'); -const { CallbackManager } = require('@langchain/core/callbacks/manager'); -const { BufferMemory, ChatMessageHistory } = require('langchain/memory'); -const { addImages, buildErrorInput, buildPromptPrefix } = require('./output_parsers'); -const { initializeCustomAgent, initializeFunctionsAgent } = require('./agents'); -const { processFileURL } = require('~/server/services/Files/process'); -const { EModelEndpoint } = require('librechat-data-provider'); -const { checkBalance } = require('~/models/balanceMethods'); -const { formatLangChainMessages } = require('./prompts'); -const { extractBaseURL } = require('~/utils'); -const { loadTools } = require('./tools/util'); -const { logger } = require('~/config'); - -class PluginsClient extends OpenAIClient { - constructor(apiKey, options = {}) { - super(apiKey, options); - this.sender = options.sender ?? 'Assistant'; - this.tools = []; - this.actions = []; - this.setOptions(options); - this.openAIApiKey = this.apiKey; - this.executor = null; - } - - setOptions(options) { - this.agentOptions = { ...options.agentOptions }; - this.functionsAgent = this.agentOptions?.agent === 'functions'; - this.agentIsGpt3 = this.agentOptions?.model?.includes('gpt-3'); - - super.setOptions(options); - - this.isGpt3 = this.modelOptions?.model?.includes('gpt-3'); - - if (this.options.reverseProxyUrl) { - this.langchainProxy = extractBaseURL(this.options.reverseProxyUrl); - } - } - - getSaveOptions() { - return { - artifacts: this.options.artifacts, - chatGptLabel: this.options.chatGptLabel, - modelLabel: this.options.modelLabel, - promptPrefix: this.options.promptPrefix, - tools: this.options.tools, - ...this.modelOptions, - agentOptions: this.agentOptions, - iconURL: this.options.iconURL, - greeting: this.options.greeting, - spec: this.options.spec, - }; - } - - saveLatestAction(action) { - this.actions.push(action); - } - - getFunctionModelName(input) { - if (/-(?!0314)\d{4}/.test(input)) { - return input; - } else if (input.includes('gpt-3.5-turbo')) { - return 'gpt-3.5-turbo'; - } else if (input.includes('gpt-4')) { - return 'gpt-4'; - } else { - return 'gpt-3.5-turbo'; - } - } - - getBuildMessagesOptions(opts) { - return { - isChatCompletion: true, - promptPrefix: opts.promptPrefix, - abortController: opts.abortController, - }; - } - - async initialize({ user, message, onAgentAction, onChainEnd, signal }) { - const modelOptions = { - modelName: this.agentOptions.model, - temperature: this.agentOptions.temperature, - }; - - const model = this.initializeLLM({ - ...modelOptions, - context: 'plugins', - initialMessageCount: this.currentMessages.length + 1, - }); - - logger.debug( - `[PluginsClient] Agent Model: ${model.modelName} | Temp: ${model.temperature} | Functions: ${this.functionsAgent}`, - ); - - // Map Messages to Langchain format - const pastMessages = formatLangChainMessages(this.currentMessages.slice(0, -1), { - userName: this.options?.name, - }); - logger.debug('[PluginsClient] pastMessages: ' + pastMessages.length); - - // TODO: use readOnly memory, TokenBufferMemory? (both unavailable in LangChainJS) - const memory = new BufferMemory({ - llm: model, - chatHistory: new ChatMessageHistory(pastMessages), - }); - - const { loadedTools } = await loadTools({ - user, - model, - tools: this.options.tools, - functions: this.functionsAgent, - options: { - memory, - signal: this.abortController.signal, - openAIApiKey: this.openAIApiKey, - conversationId: this.conversationId, - fileStrategy: this.options.req.app.locals.fileStrategy, - processFileURL, - message, - }, - useSpecs: true, - }); - - if (loadedTools.length === 0) { - return; - } - - this.tools = loadedTools; - - logger.debug('[PluginsClient] Requested Tools', this.options.tools); - logger.debug( - '[PluginsClient] Loaded Tools', - this.tools.map((tool) => tool.name), - ); - - const handleAction = (action, runId, callback = null) => { - this.saveLatestAction(action); - - logger.debug('[PluginsClient] Latest Agent Action ', this.actions[this.actions.length - 1]); - - if (typeof callback === 'function') { - callback(action, runId); - } - }; - - // initialize agent - const initializer = this.functionsAgent ? initializeFunctionsAgent : initializeCustomAgent; - - let customInstructions = (this.options.promptPrefix ?? '').trim(); - if (typeof this.options.artifactsPrompt === 'string' && this.options.artifactsPrompt) { - customInstructions = `${customInstructions ?? ''}\n${this.options.artifactsPrompt}`.trim(); - } - - this.executor = await initializer({ - model, - signal, - pastMessages, - tools: this.tools, - customInstructions, - verbose: this.options.debug, - returnIntermediateSteps: true, - customName: this.options.chatGptLabel, - currentDateString: this.currentDateString, - callbackManager: CallbackManager.fromHandlers({ - async handleAgentAction(action, runId) { - handleAction(action, runId, onAgentAction); - }, - async handleChainEnd(action) { - if (typeof onChainEnd === 'function') { - onChainEnd(action); - } - }, - }), - }); - - logger.debug('[PluginsClient] Loaded agent.'); - } - - async executorCall(message, { signal, stream, onToolStart, onToolEnd }) { - let errorMessage = ''; - const maxAttempts = 1; - - for (let attempts = 1; attempts <= maxAttempts; attempts++) { - const errorInput = buildErrorInput({ - message, - errorMessage, - actions: this.actions, - functionsAgent: this.functionsAgent, - }); - const input = attempts > 1 ? errorInput : message; - - logger.debug(`[PluginsClient] Attempt ${attempts} of ${maxAttempts}`); - - if (errorMessage.length > 0) { - logger.debug('[PluginsClient] Caught error, input: ' + JSON.stringify(input)); - } - - try { - this.result = await this.executor.call({ input, signal }, [ - { - async handleToolStart(...args) { - await onToolStart(...args); - }, - async handleToolEnd(...args) { - await onToolEnd(...args); - }, - async handleLLMEnd(output) { - const { generations } = output; - const { text } = generations[0][0]; - if (text && typeof stream === 'function') { - await stream(text); - } - }, - }, - ]); - break; // Exit the loop if the function call is successful - } catch (err) { - logger.error('[PluginsClient] executorCall error:', err); - if (attempts === maxAttempts) { - const { run } = this.runManager.getRunByConversationId(this.conversationId); - const defaultOutput = `Encountered an error while attempting to respond: ${err.message}`; - this.result.output = run && run.error ? run.error : defaultOutput; - this.result.errorMessage = run && run.error ? run.error : err.message; - this.result.intermediateSteps = this.actions; - break; - } - } - } - } - - /** - * - * @param {TMessage} responseMessage - * @param {Partial} saveOptions - * @param {string} user - * @returns - */ - async handleResponseMessage(responseMessage, saveOptions, user) { - const { output, errorMessage, ...result } = this.result; - logger.debug('[PluginsClient][handleResponseMessage] Output:', { - output, - errorMessage, - ...result, - }); - const { error } = responseMessage; - if (!error) { - responseMessage.tokenCount = this.getTokenCountForResponse(responseMessage); - responseMessage.completionTokens = this.getTokenCount(responseMessage.text); - } - - // Record usage only when completion is skipped as it is already recorded in the agent phase. - if (!this.agentOptions.skipCompletion && !error) { - await this.recordTokenUsage(responseMessage); - } - - const databasePromise = this.saveMessageToDatabase(responseMessage, saveOptions, user); - delete responseMessage.tokenCount; - return { ...responseMessage, ...result, databasePromise }; - } - - async sendMessage(message, opts = {}) { - /** @type {Promise} */ - let userMessagePromise; - /** @type {{ filteredTools: string[], includedTools: string[] }} */ - const { filteredTools = [], includedTools = [] } = this.options.req.app.locals; - - if (includedTools.length > 0) { - const tools = this.options.tools.filter((plugin) => includedTools.includes(plugin)); - this.options.tools = tools; - } else { - const tools = this.options.tools.filter((plugin) => !filteredTools.includes(plugin)); - this.options.tools = tools; - } - - // If a message is edited, no tools can be used. - const completionMode = this.options.tools.length === 0 || opts.isEdited; - if (completionMode) { - this.setOptions(opts); - return super.sendMessage(message, opts); - } - - logger.debug('[PluginsClient] sendMessage', { userMessageText: message, opts }); - const { - user, - conversationId, - responseMessageId, - saveOptions, - userMessage, - onAgentAction, - onChainEnd, - onToolStart, - onToolEnd, - } = await this.handleStartMethods(message, opts); - - if (opts.progressCallback) { - opts.onProgress = opts.progressCallback.call(null, { - ...(opts.progressOptions ?? {}), - parentMessageId: userMessage.messageId, - messageId: responseMessageId, - }); - } - - this.currentMessages.push(userMessage); - - let { - prompt: payload, - tokenCountMap, - promptTokens, - } = await this.buildMessages( - this.currentMessages, - userMessage.messageId, - this.getBuildMessagesOptions({ - promptPrefix: null, - abortController: this.abortController, - }), - ); - - if (tokenCountMap) { - logger.debug('[PluginsClient] tokenCountMap', { tokenCountMap }); - if (tokenCountMap[userMessage.messageId]) { - userMessage.tokenCount = tokenCountMap[userMessage.messageId]; - logger.debug('[PluginsClient] userMessage.tokenCount', userMessage.tokenCount); - } - this.handleTokenCountMap(tokenCountMap); - } - - this.result = {}; - if (payload) { - this.currentMessages = payload; - } - - if (!this.skipSaveUserMessage) { - userMessagePromise = this.saveMessageToDatabase(userMessage, saveOptions, user); - if (typeof opts?.getReqData === 'function') { - opts.getReqData({ - userMessagePromise, - }); - } - } - - const balance = this.options.req?.app?.locals?.balance; - if (balance?.enabled) { - await checkBalance({ - req: this.options.req, - res: this.options.res, - txData: { - user: this.user, - tokenType: 'prompt', - amount: promptTokens, - debug: this.options.debug, - model: this.modelOptions.model, - endpoint: EModelEndpoint.openAI, - }, - }); - } - - const responseMessage = { - endpoint: EModelEndpoint.gptPlugins, - iconURL: this.options.iconURL, - messageId: responseMessageId, - conversationId, - parentMessageId: userMessage.messageId, - isCreatedByUser: false, - model: this.modelOptions.model, - sender: this.sender, - promptTokens, - }; - - await this.initialize({ - user, - message, - onAgentAction, - onChainEnd, - signal: this.abortController.signal, - onProgress: opts.onProgress, - }); - - // const stream = async (text) => { - // await this.generateTextStream.call(this, text, opts.onProgress, { delay: 1 }); - // }; - await this.executorCall(message, { - signal: this.abortController.signal, - // stream, - onToolStart, - onToolEnd, - }); - - // If message was aborted mid-generation - if (this.result?.errorMessage?.length > 0 && this.result?.errorMessage?.includes('cancel')) { - responseMessage.text = 'Cancelled.'; - return await this.handleResponseMessage(responseMessage, saveOptions, user); - } - - // If error occurred during generation (likely token_balance) - if (this.result?.errorMessage?.length > 0) { - responseMessage.error = true; - responseMessage.text = this.result.output; - return await this.handleResponseMessage(responseMessage, saveOptions, user); - } - - if (this.agentOptions.skipCompletion && this.result.output && this.functionsAgent) { - const partialText = opts.getPartialText(); - const trimmedPartial = opts.getPartialText().replaceAll(':::plugin:::\n', ''); - responseMessage.text = - trimmedPartial.length === 0 ? `${partialText}${this.result.output}` : partialText; - addImages(this.result.intermediateSteps, responseMessage); - await this.generateTextStream(this.result.output, opts.onProgress, { delay: 5 }); - return await this.handleResponseMessage(responseMessage, saveOptions, user); - } - - if (this.agentOptions.skipCompletion && this.result.output) { - responseMessage.text = this.result.output; - addImages(this.result.intermediateSteps, responseMessage); - await this.generateTextStream(this.result.output, opts.onProgress, { delay: 5 }); - return await this.handleResponseMessage(responseMessage, saveOptions, user); - } - - logger.debug('[PluginsClient] Completion phase: this.result', this.result); - - const promptPrefix = buildPromptPrefix({ - result: this.result, - message, - functionsAgent: this.functionsAgent, - }); - - logger.debug('[PluginsClient]', { promptPrefix }); - - payload = await this.buildCompletionPrompt({ - messages: this.currentMessages, - promptPrefix, - }); - - logger.debug('[PluginsClient] buildCompletionPrompt Payload', payload); - responseMessage.text = await this.sendCompletion(payload, opts); - return await this.handleResponseMessage(responseMessage, saveOptions, user); - } - - async buildCompletionPrompt({ messages, promptPrefix: _promptPrefix }) { - logger.debug('[PluginsClient] buildCompletionPrompt messages', messages); - - const orderedMessages = messages; - let promptPrefix = _promptPrefix.trim(); - // If the prompt prefix doesn't end with the end token, add it. - if (!promptPrefix.endsWith(`${this.endToken}`)) { - promptPrefix = `${promptPrefix.trim()}${this.endToken}\n\n`; - } - promptPrefix = `${this.startToken}Instructions:\n${promptPrefix}`; - const promptSuffix = `${this.startToken}${this.chatGptLabel ?? 'Assistant'}:\n`; - - const instructionsPayload = { - role: 'system', - content: promptPrefix, - }; - - const messagePayload = { - role: 'system', - content: promptSuffix, - }; - - if (this.isGpt3) { - instructionsPayload.role = 'user'; - messagePayload.role = 'user'; - instructionsPayload.content += `\n${promptSuffix}`; - } - - // testing if this works with browser endpoint - if (!this.isGpt3 && this.options.reverseProxyUrl) { - instructionsPayload.role = 'user'; - } - - let currentTokenCount = - this.getTokenCountForMessage(instructionsPayload) + - this.getTokenCountForMessage(messagePayload); - - let promptBody = ''; - const maxTokenCount = this.maxPromptTokens; - // Iterate backwards through the messages, adding them to the prompt until we reach the max token count. - // Do this within a recursive async function so that it doesn't block the event loop for too long. - const buildPromptBody = async () => { - if (currentTokenCount < maxTokenCount && orderedMessages.length > 0) { - const message = orderedMessages.pop(); - const isCreatedByUser = message.isCreatedByUser || message.role?.toLowerCase() === 'user'; - const roleLabel = isCreatedByUser ? this.userLabel : this.chatGptLabel; - let messageString = `${this.startToken}${roleLabel}:\n${ - message.text ?? message.content ?? '' - }${this.endToken}\n`; - let newPromptBody = `${messageString}${promptBody}`; - - const tokenCountForMessage = this.getTokenCount(messageString); - const newTokenCount = currentTokenCount + tokenCountForMessage; - if (newTokenCount > maxTokenCount) { - if (promptBody) { - // This message would put us over the token limit, so don't add it. - return false; - } - // This is the first message, so we can't add it. Just throw an error. - throw new Error( - `Prompt is too long. Max token count is ${maxTokenCount}, but prompt is ${newTokenCount} tokens long.`, - ); - } - promptBody = newPromptBody; - currentTokenCount = newTokenCount; - // wait for next tick to avoid blocking the event loop - await new Promise((resolve) => setTimeout(resolve, 0)); - return buildPromptBody(); - } - return true; - }; - - await buildPromptBody(); - const prompt = promptBody; - messagePayload.content = prompt; - // Add 2 tokens for metadata after all messages have been counted. - currentTokenCount += 2; - - if (this.isGpt3 && messagePayload.content.length > 0) { - const context = 'Chat History:\n'; - messagePayload.content = `${context}${prompt}`; - currentTokenCount += this.getTokenCount(context); - } - - // Use up to `this.maxContextTokens` tokens (prompt + response), but try to leave `this.maxTokens` tokens for the response. - this.modelOptions.max_tokens = Math.min( - this.maxContextTokens - currentTokenCount, - this.maxResponseTokens, - ); - - if (this.isGpt3) { - messagePayload.content += promptSuffix; - return [instructionsPayload, messagePayload]; - } - - const result = [messagePayload, instructionsPayload]; - - if (this.functionsAgent && !this.isGpt3) { - result[1].content = `${result[1].content}\n${this.startToken}${this.chatGptLabel}:\nSure thing! Here is the output you requested:\n`; - } - - return result.filter((message) => message.content.length > 0); - } -} - -module.exports = PluginsClient; diff --git a/api/app/clients/index.js b/api/app/clients/index.js index a5e8eee504..d8b2bae27b 100644 --- a/api/app/clients/index.js +++ b/api/app/clients/index.js @@ -1,15 +1,11 @@ -const ChatGPTClient = require('./ChatGPTClient'); const OpenAIClient = require('./OpenAIClient'); -const PluginsClient = require('./PluginsClient'); const GoogleClient = require('./GoogleClient'); const TextStream = require('./TextStream'); const AnthropicClient = require('./AnthropicClient'); const toolUtils = require('./tools/util'); module.exports = { - ChatGPTClient, OpenAIClient, - PluginsClient, GoogleClient, TextStream, AnthropicClient, diff --git a/api/app/clients/specs/OpenAIClient.test.js b/api/app/clients/specs/OpenAIClient.test.js index cc4aa84d5d..efca66a867 100644 --- a/api/app/clients/specs/OpenAIClient.test.js +++ b/api/app/clients/specs/OpenAIClient.test.js @@ -531,44 +531,6 @@ describe('OpenAIClient', () => { }); }); - describe('sendMessage/getCompletion/chatCompletion', () => { - afterEach(() => { - delete process.env.AZURE_OPENAI_DEFAULT_MODEL; - delete process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME; - }); - - it('should call getCompletion and fetchEventSource when using a text/instruct model', async () => { - const model = 'text-davinci-003'; - const onProgress = jest.fn().mockImplementation(() => ({})); - - const testClient = new OpenAIClient('test-api-key', { - ...defaultOptions, - modelOptions: { model }, - }); - - const getCompletion = jest.spyOn(testClient, 'getCompletion'); - await testClient.sendMessage('Hi mom!', { onProgress }); - - expect(getCompletion).toHaveBeenCalled(); - expect(getCompletion.mock.calls.length).toBe(1); - - expect(getCompletion.mock.calls[0][0]).toBe('||>User:\nHi mom!\n||>Assistant:\n'); - - expect(fetchEventSource).toHaveBeenCalled(); - expect(fetchEventSource.mock.calls.length).toBe(1); - - // Check if the first argument (url) is correct - const firstCallArgs = fetchEventSource.mock.calls[0]; - - const expectedURL = 'https://api.openai.com/v1/completions'; - expect(firstCallArgs[0]).toBe(expectedURL); - - const requestBody = JSON.parse(firstCallArgs[1].body); - expect(requestBody).toHaveProperty('model'); - expect(requestBody.model).toBe(model); - }); - }); - describe('checkVisionRequest functionality', () => { let client; const attachments = [{ type: 'image/png' }]; diff --git a/api/app/clients/specs/PluginsClient.test.js b/api/app/clients/specs/PluginsClient.test.js deleted file mode 100644 index 4928acefd1..0000000000 --- a/api/app/clients/specs/PluginsClient.test.js +++ /dev/null @@ -1,314 +0,0 @@ -const crypto = require('crypto'); -const { Constants } = require('librechat-data-provider'); -const { HumanMessage, AIMessage } = require('@langchain/core/messages'); -const PluginsClient = require('../PluginsClient'); - -jest.mock('~/db/connect'); -jest.mock('~/models/Conversation', () => { - return function () { - return { - save: jest.fn(), - deleteConvos: jest.fn(), - }; - }; -}); - -const defaultAzureOptions = { - azureOpenAIApiInstanceName: 'your-instance-name', - azureOpenAIApiDeploymentName: 'your-deployment-name', - azureOpenAIApiVersion: '2020-07-01-preview', -}; - -describe('PluginsClient', () => { - let TestAgent; - let options = { - tools: [], - modelOptions: { - model: 'gpt-3.5-turbo', - temperature: 0, - max_tokens: 2, - }, - agentOptions: { - model: 'gpt-3.5-turbo', - }, - }; - let parentMessageId; - let conversationId; - const fakeMessages = []; - const userMessage = 'Hello, ChatGPT!'; - const apiKey = 'fake-api-key'; - - beforeEach(() => { - TestAgent = new PluginsClient(apiKey, options); - TestAgent.loadHistory = jest - .fn() - .mockImplementation((conversationId, parentMessageId = null) => { - if (!conversationId) { - TestAgent.currentMessages = []; - return Promise.resolve([]); - } - - const orderedMessages = TestAgent.constructor.getMessagesForConversation({ - messages: fakeMessages, - parentMessageId, - }); - - const chatMessages = orderedMessages.map((msg) => - msg?.isCreatedByUser || msg?.role?.toLowerCase() === 'user' - ? new HumanMessage(msg.text) - : new AIMessage(msg.text), - ); - - TestAgent.currentMessages = orderedMessages; - return Promise.resolve(chatMessages); - }); - TestAgent.sendMessage = jest.fn().mockImplementation(async (message, opts = {}) => { - if (opts && typeof opts === 'object') { - TestAgent.setOptions(opts); - } - const conversationId = opts.conversationId || crypto.randomUUID(); - const parentMessageId = opts.parentMessageId || Constants.NO_PARENT; - const userMessageId = opts.overrideParentMessageId || crypto.randomUUID(); - this.pastMessages = await TestAgent.loadHistory( - conversationId, - TestAgent.options?.parentMessageId, - ); - - const userMessage = { - text: message, - sender: 'ChatGPT', - isCreatedByUser: true, - messageId: userMessageId, - parentMessageId, - conversationId, - }; - - const response = { - sender: 'ChatGPT', - text: 'Hello, User!', - isCreatedByUser: false, - messageId: crypto.randomUUID(), - parentMessageId: userMessage.messageId, - conversationId, - }; - - fakeMessages.push(userMessage); - fakeMessages.push(response); - return response; - }); - }); - - test('initializes PluginsClient without crashing', () => { - expect(TestAgent).toBeInstanceOf(PluginsClient); - }); - - test('check setOptions function', () => { - expect(TestAgent.agentIsGpt3).toBe(true); - }); - - describe('sendMessage', () => { - test('sendMessage should return a response message', async () => { - const expectedResult = expect.objectContaining({ - sender: 'ChatGPT', - text: expect.any(String), - isCreatedByUser: false, - messageId: expect.any(String), - parentMessageId: expect.any(String), - conversationId: expect.any(String), - }); - - const response = await TestAgent.sendMessage(userMessage); - parentMessageId = response.messageId; - conversationId = response.conversationId; - expect(response).toEqual(expectedResult); - }); - - test('sendMessage should work with provided conversationId and parentMessageId', async () => { - const userMessage = 'Second message in the conversation'; - const opts = { - conversationId, - parentMessageId, - }; - - const expectedResult = expect.objectContaining({ - sender: 'ChatGPT', - text: expect.any(String), - isCreatedByUser: false, - messageId: expect.any(String), - parentMessageId: expect.any(String), - conversationId: opts.conversationId, - }); - - const response = await TestAgent.sendMessage(userMessage, opts); - parentMessageId = response.messageId; - expect(response.conversationId).toEqual(conversationId); - expect(response).toEqual(expectedResult); - }); - - test('should return chat history', async () => { - const chatMessages = await TestAgent.loadHistory(conversationId, parentMessageId); - expect(TestAgent.currentMessages).toHaveLength(4); - expect(chatMessages[0].text).toEqual(userMessage); - }); - }); - - describe('getFunctionModelName', () => { - let client; - - beforeEach(() => { - client = new PluginsClient('dummy_api_key'); - }); - - test('should return the input when it includes a dash followed by four digits', () => { - expect(client.getFunctionModelName('-1234')).toBe('-1234'); - expect(client.getFunctionModelName('gpt-4-5678-preview')).toBe('gpt-4-5678-preview'); - }); - - test('should return the input for all function-capable models (`0613` models and above)', () => { - expect(client.getFunctionModelName('gpt-4-0613')).toBe('gpt-4-0613'); - expect(client.getFunctionModelName('gpt-4-32k-0613')).toBe('gpt-4-32k-0613'); - expect(client.getFunctionModelName('gpt-3.5-turbo-0613')).toBe('gpt-3.5-turbo-0613'); - expect(client.getFunctionModelName('gpt-3.5-turbo-16k-0613')).toBe('gpt-3.5-turbo-16k-0613'); - expect(client.getFunctionModelName('gpt-3.5-turbo-1106')).toBe('gpt-3.5-turbo-1106'); - expect(client.getFunctionModelName('gpt-4-1106-preview')).toBe('gpt-4-1106-preview'); - expect(client.getFunctionModelName('gpt-4-1106')).toBe('gpt-4-1106'); - }); - - test('should return the corresponding model if input is non-function capable (`0314` models)', () => { - expect(client.getFunctionModelName('gpt-4-0314')).toBe('gpt-4'); - expect(client.getFunctionModelName('gpt-4-32k-0314')).toBe('gpt-4'); - expect(client.getFunctionModelName('gpt-3.5-turbo-0314')).toBe('gpt-3.5-turbo'); - expect(client.getFunctionModelName('gpt-3.5-turbo-16k-0314')).toBe('gpt-3.5-turbo'); - }); - - test('should return "gpt-3.5-turbo" when the input includes "gpt-3.5-turbo"', () => { - expect(client.getFunctionModelName('test gpt-3.5-turbo model')).toBe('gpt-3.5-turbo'); - }); - - test('should return "gpt-4" when the input includes "gpt-4"', () => { - expect(client.getFunctionModelName('testing gpt-4')).toBe('gpt-4'); - }); - - test('should return "gpt-3.5-turbo" for input that does not meet any specific condition', () => { - expect(client.getFunctionModelName('random string')).toBe('gpt-3.5-turbo'); - expect(client.getFunctionModelName('')).toBe('gpt-3.5-turbo'); - }); - }); - - describe('Azure OpenAI tests specific to Plugins', () => { - // TODO: add more tests for Azure OpenAI integration with Plugins - // let client; - // beforeEach(() => { - // client = new PluginsClient('dummy_api_key'); - // }); - - test('should not call getFunctionModelName when azure options are set', () => { - const spy = jest.spyOn(PluginsClient.prototype, 'getFunctionModelName'); - const model = 'gpt-4-turbo'; - - // note, without the azure change in PR #1766, `getFunctionModelName` is called twice - const testClient = new PluginsClient('dummy_api_key', { - agentOptions: { - model, - agent: 'functions', - }, - azure: defaultAzureOptions, - }); - - expect(spy).not.toHaveBeenCalled(); - expect(testClient.agentOptions.model).toBe(model); - - spy.mockRestore(); - }); - }); - - describe('sendMessage with filtered tools', () => { - let TestAgent; - const apiKey = 'fake-api-key'; - const mockTools = [{ name: 'tool1' }, { name: 'tool2' }, { name: 'tool3' }, { name: 'tool4' }]; - - beforeEach(() => { - TestAgent = new PluginsClient(apiKey, { - tools: mockTools, - modelOptions: { - model: 'gpt-3.5-turbo', - temperature: 0, - max_tokens: 2, - }, - agentOptions: { - model: 'gpt-3.5-turbo', - }, - }); - - TestAgent.options.req = { - app: { - locals: {}, - }, - }; - - TestAgent.sendMessage = jest.fn().mockImplementation(async () => { - const { filteredTools = [], includedTools = [] } = TestAgent.options.req.app.locals; - - if (includedTools.length > 0) { - const tools = TestAgent.options.tools.filter((plugin) => - includedTools.includes(plugin.name), - ); - TestAgent.options.tools = tools; - } else { - const tools = TestAgent.options.tools.filter( - (plugin) => !filteredTools.includes(plugin.name), - ); - TestAgent.options.tools = tools; - } - - return { - text: 'Mocked response', - tools: TestAgent.options.tools, - }; - }); - }); - - test('should filter out tools when filteredTools is provided', async () => { - TestAgent.options.req.app.locals.filteredTools = ['tool1', 'tool3']; - const response = await TestAgent.sendMessage('Test message'); - expect(response.tools).toHaveLength(2); - expect(response.tools).toEqual( - expect.arrayContaining([ - expect.objectContaining({ name: 'tool2' }), - expect.objectContaining({ name: 'tool4' }), - ]), - ); - }); - - test('should only include specified tools when includedTools is provided', async () => { - TestAgent.options.req.app.locals.includedTools = ['tool2', 'tool4']; - const response = await TestAgent.sendMessage('Test message'); - expect(response.tools).toHaveLength(2); - expect(response.tools).toEqual( - expect.arrayContaining([ - expect.objectContaining({ name: 'tool2' }), - expect.objectContaining({ name: 'tool4' }), - ]), - ); - }); - - test('should prioritize includedTools over filteredTools', async () => { - TestAgent.options.req.app.locals.filteredTools = ['tool1', 'tool3']; - TestAgent.options.req.app.locals.includedTools = ['tool1', 'tool2']; - const response = await TestAgent.sendMessage('Test message'); - expect(response.tools).toHaveLength(2); - expect(response.tools).toEqual( - expect.arrayContaining([ - expect.objectContaining({ name: 'tool1' }), - expect.objectContaining({ name: 'tool2' }), - ]), - ); - }); - - test('should not modify tools when no filters are provided', async () => { - const response = await TestAgent.sendMessage('Test message'); - expect(response.tools).toHaveLength(4); - expect(response.tools).toEqual(expect.arrayContaining(mockTools)); - }); - }); -}); diff --git a/api/package.json b/api/package.json index 6633a99c3f..893baddd5d 100644 --- a/api/package.json +++ b/api/package.json @@ -55,7 +55,6 @@ "@waylaidwanderer/fetch-event-source": "^3.0.1", "axios": "^1.8.2", "bcryptjs": "^2.4.3", - "cohere-ai": "^7.9.1", "compression": "^1.7.4", "connect-redis": "^7.1.0", "cookie": "^0.7.2", diff --git a/api/server/cleanup.js b/api/server/cleanup.js index de7450cea0..84164eb641 100644 --- a/api/server/cleanup.js +++ b/api/server/cleanup.js @@ -169,9 +169,6 @@ function disposeClient(client) { client.isGenerativeModel = null; } // Properties specific to OpenAIClient - if (client.ChatGPTClient) { - client.ChatGPTClient = null; - } if (client.completionsUrl) { client.completionsUrl = null; } diff --git a/api/server/controllers/agents/llm.js b/api/server/controllers/agents/llm.js deleted file mode 100644 index 438a38b6cb..0000000000 --- a/api/server/controllers/agents/llm.js +++ /dev/null @@ -1,106 +0,0 @@ -const { HttpsProxyAgent } = require('https-proxy-agent'); -const { resolveHeaders } = require('librechat-data-provider'); -const { createLLM } = require('~/app/clients/llm'); - -/** - * Initializes and returns a Language Learning Model (LLM) instance. - * - * @param {Object} options - Configuration options for the LLM. - * @param {string} options.model - The model identifier. - * @param {string} options.modelName - The specific name of the model. - * @param {number} options.temperature - The temperature setting for the model. - * @param {number} options.presence_penalty - The presence penalty for the model. - * @param {number} options.frequency_penalty - The frequency penalty for the model. - * @param {number} options.max_tokens - The maximum number of tokens for the model output. - * @param {boolean} options.streaming - Whether to use streaming for the model output. - * @param {Object} options.context - The context for the conversation. - * @param {number} options.tokenBuffer - The token buffer size. - * @param {number} options.initialMessageCount - The initial message count. - * @param {string} options.conversationId - The ID of the conversation. - * @param {string} options.user - The user identifier. - * @param {string} options.langchainProxy - The langchain proxy URL. - * @param {boolean} options.useOpenRouter - Whether to use OpenRouter. - * @param {Object} options.options - Additional options. - * @param {Object} options.options.headers - Custom headers for the request. - * @param {string} options.options.proxy - Proxy URL. - * @param {Object} options.options.req - The request object. - * @param {Object} options.options.res - The response object. - * @param {boolean} options.options.debug - Whether to enable debug mode. - * @param {string} options.apiKey - The API key for authentication. - * @param {Object} options.azure - Azure-specific configuration. - * @param {Object} options.abortController - The AbortController instance. - * @returns {Object} The initialized LLM instance. - */ -function initializeLLM(options) { - const { - model, - modelName, - temperature, - presence_penalty, - frequency_penalty, - max_tokens, - streaming, - user, - langchainProxy, - useOpenRouter, - options: { headers, proxy }, - apiKey, - azure, - } = options; - - const modelOptions = { - modelName: modelName || model, - temperature, - presence_penalty, - frequency_penalty, - user, - }; - - if (max_tokens) { - modelOptions.max_tokens = max_tokens; - } - - const configOptions = {}; - - if (langchainProxy) { - configOptions.basePath = langchainProxy; - } - - if (useOpenRouter) { - configOptions.basePath = 'https://openrouter.ai/api/v1'; - configOptions.baseOptions = { - headers: { - 'HTTP-Referer': 'https://librechat.ai', - 'X-Title': 'LibreChat', - }, - }; - } - - if (headers && typeof headers === 'object' && !Array.isArray(headers)) { - configOptions.baseOptions = { - headers: resolveHeaders({ - ...headers, - ...configOptions?.baseOptions?.headers, - }), - }; - } - - if (proxy) { - configOptions.httpAgent = new HttpsProxyAgent(proxy); - configOptions.httpsAgent = new HttpsProxyAgent(proxy); - } - - const llm = createLLM({ - modelOptions, - configOptions, - openAIApiKey: apiKey, - azure, - streaming, - }); - - return llm; -} - -module.exports = { - initializeLLM, -}; diff --git a/api/server/middleware/buildEndpointOption.js b/api/server/middleware/buildEndpointOption.js index f3138bf6e9..d302bf8743 100644 --- a/api/server/middleware/buildEndpointOption.js +++ b/api/server/middleware/buildEndpointOption.js @@ -7,7 +7,6 @@ const { } = require('librechat-data-provider'); const azureAssistants = require('~/server/services/Endpoints/azureAssistants'); const assistants = require('~/server/services/Endpoints/assistants'); -const gptPlugins = require('~/server/services/Endpoints/gptPlugins'); const { processFiles } = require('~/server/services/Files/process'); const anthropic = require('~/server/services/Endpoints/anthropic'); const bedrock = require('~/server/services/Endpoints/bedrock'); @@ -25,7 +24,6 @@ const buildFunction = { [EModelEndpoint.bedrock]: bedrock.buildOptions, [EModelEndpoint.azureOpenAI]: openAI.buildOptions, [EModelEndpoint.anthropic]: anthropic.buildOptions, - [EModelEndpoint.gptPlugins]: gptPlugins.buildOptions, [EModelEndpoint.assistants]: assistants.buildOptions, [EModelEndpoint.azureAssistants]: azureAssistants.buildOptions, }; @@ -60,15 +58,6 @@ async function buildEndpointOption(req, res, next) { return handleError(res, { text: 'Model spec mismatch' }); } - if ( - currentModelSpec.preset.endpoint !== EModelEndpoint.gptPlugins && - currentModelSpec.preset.tools - ) { - return handleError(res, { - text: `Only the "${EModelEndpoint.gptPlugins}" endpoint can have tools defined in the preset`, - }); - } - try { currentModelSpec.preset.spec = spec; if (currentModelSpec.iconURL != null && currentModelSpec.iconURL !== '') { diff --git a/api/server/routes/edit/gptPlugins.js b/api/server/routes/edit/gptPlugins.js deleted file mode 100644 index 94d9b91d0b..0000000000 --- a/api/server/routes/edit/gptPlugins.js +++ /dev/null @@ -1,207 +0,0 @@ -const express = require('express'); -const { getResponseSender } = require('librechat-data-provider'); -const { - setHeaders, - moderateText, - validateModel, - handleAbortError, - validateEndpoint, - buildEndpointOption, - createAbortController, -} = require('~/server/middleware'); -const { sendMessage, createOnProgress, formatSteps, formatAction } = require('~/server/utils'); -const { initializeClient } = require('~/server/services/Endpoints/gptPlugins'); -const { saveMessage, updateMessage } = require('~/models'); -const { validateTools } = require('~/app'); -const { logger } = require('~/config'); - -const router = express.Router(); - -router.use(moderateText); - -router.post( - '/', - validateEndpoint, - validateModel, - buildEndpointOption, - setHeaders, - async (req, res) => { - let { - text, - generation, - endpointOption, - conversationId, - responseMessageId, - isContinued = false, - parentMessageId = null, - overrideParentMessageId = null, - } = req.body; - - logger.debug('[/edit/gptPlugins]', { - text, - generation, - isContinued, - conversationId, - ...endpointOption, - }); - - let userMessage; - let userMessagePromise; - let promptTokens; - const sender = getResponseSender({ - ...endpointOption, - model: endpointOption.modelOptions.model, - }); - const userMessageId = parentMessageId; - const user = req.user.id; - - const plugin = { - loading: true, - inputs: [], - latest: null, - outputs: null, - }; - - const getReqData = (data = {}) => { - for (let key in data) { - if (key === 'userMessage') { - userMessage = data[key]; - } else if (key === 'userMessagePromise') { - userMessagePromise = data[key]; - } else if (key === 'responseMessageId') { - responseMessageId = data[key]; - } else if (key === 'promptTokens') { - promptTokens = data[key]; - } - } - }; - - const { - onProgress: progressCallback, - sendIntermediateMessage, - getPartialText, - } = createOnProgress({ - generation, - onProgress: () => { - if (plugin.loading === true) { - plugin.loading = false; - } - }, - }); - - const onChainEnd = (data) => { - let { intermediateSteps: steps } = data; - plugin.outputs = steps && steps[0].action ? formatSteps(steps) : 'An error occurred.'; - plugin.loading = false; - saveMessage( - req, - { ...userMessage, user }, - { context: 'api/server/routes/ask/gptPlugins.js - onChainEnd' }, - ); - sendIntermediateMessage(res, { - plugin, - parentMessageId: userMessage.messageId, - messageId: responseMessageId, - }); - // logger.debug('CHAIN END', plugin.outputs); - }; - - const getAbortData = () => ({ - sender, - conversationId, - userMessagePromise, - messageId: responseMessageId, - parentMessageId: overrideParentMessageId ?? userMessageId, - text: getPartialText(), - plugin: { ...plugin, loading: false }, - userMessage, - promptTokens, - }); - const { abortController, onStart } = createAbortController(req, res, getAbortData, getReqData); - - try { - endpointOption.tools = await validateTools(user, endpointOption.tools); - const { client } = await initializeClient({ req, res, endpointOption }); - - const onAgentAction = (action, start = false) => { - const formattedAction = formatAction(action); - plugin.inputs.push(formattedAction); - plugin.latest = formattedAction.plugin; - if (!start && !client.skipSaveUserMessage) { - saveMessage( - req, - { ...userMessage, user }, - { context: 'api/server/routes/ask/gptPlugins.js - onAgentAction' }, - ); - } - sendIntermediateMessage(res, { - plugin, - parentMessageId: userMessage.messageId, - messageId: responseMessageId, - }); - // logger.debug('PLUGIN ACTION', formattedAction); - }; - - let response = await client.sendMessage(text, { - user, - generation, - isContinued, - isEdited: true, - conversationId, - parentMessageId, - responseMessageId, - overrideParentMessageId, - getReqData, - onAgentAction, - onChainEnd, - onStart, - ...endpointOption, - progressCallback, - progressOptions: { - res, - plugin, - // parentMessageId: overrideParentMessageId || userMessageId, - }, - abortController, - }); - - if (overrideParentMessageId) { - response.parentMessageId = overrideParentMessageId; - } - - logger.debug('[/edit/gptPlugins] CLIENT RESPONSE', response); - - const { conversation = {} } = await response.databasePromise; - delete response.databasePromise; - conversation.title = - conversation && !conversation.title ? null : conversation?.title || 'New Chat'; - - sendMessage(res, { - title: conversation.title, - final: true, - conversation, - requestMessage: userMessage, - responseMessage: response, - }); - res.end(); - - response.plugin = { ...plugin, loading: false }; - await updateMessage( - req, - { ...response, user }, - { context: 'api/server/routes/edit/gptPlugins.js' }, - ); - } catch (error) { - const partialText = getPartialText(); - handleAbortError(res, req, error, { - partialText, - conversationId, - sender, - messageId: responseMessageId, - parentMessageId: userMessageId ?? parentMessageId, - }); - } - }, -); - -module.exports = router; diff --git a/api/server/routes/edit/index.js b/api/server/routes/edit/index.js index f1d47af3f9..92a1e63f63 100644 --- a/api/server/routes/edit/index.js +++ b/api/server/routes/edit/index.js @@ -3,7 +3,6 @@ const openAI = require('./openAI'); const custom = require('./custom'); const google = require('./google'); const anthropic = require('./anthropic'); -const gptPlugins = require('./gptPlugins'); const { isEnabled } = require('~/server/utils'); const { EModelEndpoint } = require('librechat-data-provider'); const { @@ -39,7 +38,6 @@ if (isEnabled(LIMIT_MESSAGE_USER)) { router.use(validateConvoAccess); router.use([`/${EModelEndpoint.azureOpenAI}`, `/${EModelEndpoint.openAI}`], openAI); -router.use(`/${EModelEndpoint.gptPlugins}`, gptPlugins); router.use(`/${EModelEndpoint.anthropic}`, anthropic); router.use(`/${EModelEndpoint.google}`, google); router.use(`/${EModelEndpoint.custom}`, custom); diff --git a/api/server/services/Endpoints/azureAssistants/initialize.js b/api/server/services/Endpoints/azureAssistants/initialize.js index 88acef23e5..132c123e7e 100644 --- a/api/server/services/Endpoints/azureAssistants/initialize.js +++ b/api/server/services/Endpoints/azureAssistants/initialize.js @@ -1,12 +1,7 @@ const OpenAI = require('openai'); const { HttpsProxyAgent } = require('https-proxy-agent'); -const { constructAzureURL, isUserProvided } = require('@librechat/api'); -const { - ErrorTypes, - EModelEndpoint, - resolveHeaders, - mapModelToAzureConfig, -} = require('librechat-data-provider'); +const { constructAzureURL, isUserProvided, resolveHeaders } = require('@librechat/api'); +const { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } = require('librechat-data-provider'); const { getUserKeyValues, getUserKeyExpiry, @@ -114,11 +109,14 @@ const initializeClient = async ({ req, res, version, endpointOption, initAppClie apiKey = azureOptions.azureOpenAIApiKey; opts.defaultQuery = { 'api-version': azureOptions.azureOpenAIApiVersion }; - opts.defaultHeaders = resolveHeaders({ - ...headers, - 'api-key': apiKey, - 'OpenAI-Beta': `assistants=${version}`, - }); + opts.defaultHeaders = resolveHeaders( + { + ...headers, + 'api-key': apiKey, + 'OpenAI-Beta': `assistants=${version}`, + }, + req.user, + ); opts.model = azureOptions.azureOpenAIApiDeploymentName; if (initAppClient) { diff --git a/api/server/services/Endpoints/gptPlugins/build.js b/api/server/services/Endpoints/gptPlugins/build.js deleted file mode 100644 index 0d1ec097ad..0000000000 --- a/api/server/services/Endpoints/gptPlugins/build.js +++ /dev/null @@ -1,41 +0,0 @@ -const { removeNullishValues } = require('librechat-data-provider'); -const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts'); - -const buildOptions = (endpoint, parsedBody) => { - const { - modelLabel, - chatGptLabel, - promptPrefix, - agentOptions, - tools = [], - iconURL, - greeting, - spec, - maxContextTokens, - artifacts, - ...modelOptions - } = parsedBody; - const endpointOption = removeNullishValues({ - endpoint, - tools: tools - .map((tool) => tool?.pluginKey ?? tool) - .filter((toolName) => typeof toolName === 'string'), - modelLabel, - chatGptLabel, - promptPrefix, - agentOptions, - iconURL, - greeting, - spec, - maxContextTokens, - modelOptions, - }); - - if (typeof artifacts === 'string') { - endpointOption.artifactsPrompt = generateArtifactsPrompt({ endpoint, artifacts }); - } - - return endpointOption; -}; - -module.exports = buildOptions; diff --git a/api/server/services/Endpoints/gptPlugins/index.js b/api/server/services/Endpoints/gptPlugins/index.js deleted file mode 100644 index 202cb0e4d7..0000000000 --- a/api/server/services/Endpoints/gptPlugins/index.js +++ /dev/null @@ -1,7 +0,0 @@ -const buildOptions = require('./build'); -const initializeClient = require('./initialize'); - -module.exports = { - buildOptions, - initializeClient, -}; diff --git a/api/server/services/Endpoints/gptPlugins/initialize.js b/api/server/services/Endpoints/gptPlugins/initialize.js deleted file mode 100644 index d2af6c757e..0000000000 --- a/api/server/services/Endpoints/gptPlugins/initialize.js +++ /dev/null @@ -1,134 +0,0 @@ -const { - EModelEndpoint, - resolveHeaders, - mapModelToAzureConfig, -} = require('librechat-data-provider'); -const { isEnabled, isUserProvided, getAzureCredentials } = require('@librechat/api'); -const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService'); -const { PluginsClient } = require('~/app'); - -const initializeClient = async ({ req, res, endpointOption }) => { - const { - PROXY, - OPENAI_API_KEY, - AZURE_API_KEY, - PLUGINS_USE_AZURE, - OPENAI_REVERSE_PROXY, - AZURE_OPENAI_BASEURL, - OPENAI_SUMMARIZE, - DEBUG_PLUGINS, - } = process.env; - - const { key: expiresAt, model: modelName } = req.body; - const contextStrategy = isEnabled(OPENAI_SUMMARIZE) ? 'summarize' : null; - - let useAzure = isEnabled(PLUGINS_USE_AZURE); - let endpoint = useAzure ? EModelEndpoint.azureOpenAI : EModelEndpoint.openAI; - - /** @type {false | TAzureConfig} */ - const azureConfig = req.app.locals[EModelEndpoint.azureOpenAI]; - useAzure = useAzure || azureConfig?.plugins; - - if (useAzure && endpoint !== EModelEndpoint.azureOpenAI) { - endpoint = EModelEndpoint.azureOpenAI; - } - - const credentials = { - [EModelEndpoint.openAI]: OPENAI_API_KEY, - [EModelEndpoint.azureOpenAI]: AZURE_API_KEY, - }; - - const baseURLOptions = { - [EModelEndpoint.openAI]: OPENAI_REVERSE_PROXY, - [EModelEndpoint.azureOpenAI]: AZURE_OPENAI_BASEURL, - }; - - const userProvidesKey = isUserProvided(credentials[endpoint]); - const userProvidesURL = isUserProvided(baseURLOptions[endpoint]); - - let userValues = null; - if (expiresAt && (userProvidesKey || userProvidesURL)) { - checkUserKeyExpiry(expiresAt, endpoint); - userValues = await getUserKeyValues({ userId: req.user.id, name: endpoint }); - } - - let apiKey = userProvidesKey ? userValues?.apiKey : credentials[endpoint]; - let baseURL = userProvidesURL ? userValues?.baseURL : baseURLOptions[endpoint]; - - const clientOptions = { - contextStrategy, - debug: isEnabled(DEBUG_PLUGINS), - reverseProxyUrl: baseURL ? baseURL : null, - proxy: PROXY ?? null, - req, - res, - ...endpointOption, - }; - - if (useAzure && azureConfig) { - const { modelGroupMap, groupMap } = azureConfig; - const { - azureOptions, - baseURL, - headers = {}, - serverless, - } = mapModelToAzureConfig({ - modelName, - modelGroupMap, - groupMap, - }); - - clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl; - clientOptions.headers = resolveHeaders({ ...headers, ...(clientOptions.headers ?? {}) }); - - clientOptions.titleConvo = azureConfig.titleConvo; - clientOptions.titleModel = azureConfig.titleModel; - clientOptions.titleMethod = azureConfig.titleMethod ?? 'completion'; - - const azureRate = modelName.includes('gpt-4') ? 30 : 17; - clientOptions.streamRate = azureConfig.streamRate ?? azureRate; - - const groupName = modelGroupMap[modelName].group; - clientOptions.addParams = azureConfig.groupMap[groupName].addParams; - clientOptions.dropParams = azureConfig.groupMap[groupName].dropParams; - clientOptions.forcePrompt = azureConfig.groupMap[groupName].forcePrompt; - - apiKey = azureOptions.azureOpenAIApiKey; - clientOptions.azure = !serverless && azureOptions; - if (serverless === true) { - clientOptions.defaultQuery = azureOptions.azureOpenAIApiVersion - ? { 'api-version': azureOptions.azureOpenAIApiVersion } - : undefined; - clientOptions.headers['api-key'] = apiKey; - } - } else if (useAzure || (apiKey && apiKey.includes('{"azure') && !clientOptions.azure)) { - clientOptions.azure = userProvidesKey ? JSON.parse(userValues.apiKey) : getAzureCredentials(); - apiKey = clientOptions.azure.azureOpenAIApiKey; - } - - /** @type {undefined | TBaseEndpoint} */ - const pluginsConfig = req.app.locals[EModelEndpoint.gptPlugins]; - - if (!useAzure && pluginsConfig) { - clientOptions.streamRate = pluginsConfig.streamRate; - } - - /** @type {undefined | TBaseEndpoint} */ - const allConfig = req.app.locals.all; - if (allConfig) { - clientOptions.streamRate = allConfig.streamRate; - } - - if (!apiKey) { - throw new Error(`${endpoint} API key not provided. Please provide it again.`); - } - - const client = new PluginsClient(apiKey, clientOptions); - return { - client, - azure: clientOptions.azure, - openAIApiKey: apiKey, - }; -}; - -module.exports = initializeClient; diff --git a/api/server/services/Endpoints/gptPlugins/initialize.spec.js b/api/server/services/Endpoints/gptPlugins/initialize.spec.js deleted file mode 100644 index f9cb2750a4..0000000000 --- a/api/server/services/Endpoints/gptPlugins/initialize.spec.js +++ /dev/null @@ -1,410 +0,0 @@ -// gptPlugins/initializeClient.spec.js -jest.mock('~/cache/getLogStores'); -const { EModelEndpoint, ErrorTypes, validateAzureGroups } = require('librechat-data-provider'); -const { getUserKey, getUserKeyValues } = require('~/server/services/UserService'); -const initializeClient = require('./initialize'); -const { PluginsClient } = require('~/app'); - -// Mock getUserKey since it's the only function we want to mock -jest.mock('~/server/services/UserService', () => ({ - getUserKey: jest.fn(), - getUserKeyValues: jest.fn(), - checkUserKeyExpiry: jest.requireActual('~/server/services/UserService').checkUserKeyExpiry, -})); - -describe('gptPlugins/initializeClient', () => { - // Set up environment variables - const originalEnvironment = process.env; - const app = { - locals: {}, - }; - - const validAzureConfigs = [ - { - group: 'librechat-westus', - apiKey: 'WESTUS_API_KEY', - instanceName: 'librechat-westus', - version: '2023-12-01-preview', - models: { - 'gpt-4-vision-preview': { - deploymentName: 'gpt-4-vision-preview', - version: '2024-02-15-preview', - }, - 'gpt-3.5-turbo': { - deploymentName: 'gpt-35-turbo', - }, - 'gpt-3.5-turbo-1106': { - deploymentName: 'gpt-35-turbo-1106', - }, - 'gpt-4': { - deploymentName: 'gpt-4', - }, - 'gpt-4-1106-preview': { - deploymentName: 'gpt-4-1106-preview', - }, - }, - }, - { - group: 'librechat-eastus', - apiKey: 'EASTUS_API_KEY', - instanceName: 'librechat-eastus', - deploymentName: 'gpt-4-turbo', - version: '2024-02-15-preview', - models: { - 'gpt-4-turbo': true, - }, - baseURL: 'https://eastus.example.com', - additionalHeaders: { - 'x-api-key': 'x-api-key-value', - }, - }, - { - group: 'mistral-inference', - apiKey: 'AZURE_MISTRAL_API_KEY', - baseURL: - 'https://Mistral-large-vnpet-serverless.region.inference.ai.azure.com/v1/chat/completions', - serverless: true, - models: { - 'mistral-large': true, - }, - }, - { - group: 'llama-70b-chat', - apiKey: 'AZURE_LLAMA2_70B_API_KEY', - baseURL: - 'https://Llama-2-70b-chat-qmvyb-serverless.region.inference.ai.azure.com/v1/chat/completions', - serverless: true, - models: { - 'llama-70b-chat': true, - }, - }, - ]; - - const { modelNames, modelGroupMap, groupMap } = validateAzureGroups(validAzureConfigs); - - beforeEach(() => { - jest.resetModules(); // Clears the cache - process.env = { ...originalEnvironment }; // Make a copy - }); - - afterAll(() => { - process.env = originalEnvironment; // Restore original env vars - }); - - test('should initialize PluginsClient with OpenAI API key and default options', async () => { - process.env.OPENAI_API_KEY = 'test-openai-api-key'; - process.env.PLUGINS_USE_AZURE = 'false'; - process.env.DEBUG_PLUGINS = 'false'; - process.env.OPENAI_SUMMARIZE = 'false'; - - const req = { - body: { key: null }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'default-model' } }; - - const { client, openAIApiKey } = await initializeClient({ req, res, endpointOption }); - - expect(openAIApiKey).toBe('test-openai-api-key'); - expect(client).toBeInstanceOf(PluginsClient); - }); - - test('should initialize PluginsClient with Azure credentials when PLUGINS_USE_AZURE is true', async () => { - process.env.AZURE_API_KEY = 'test-azure-api-key'; - (process.env.AZURE_OPENAI_API_INSTANCE_NAME = 'some-value'), - (process.env.AZURE_OPENAI_API_DEPLOYMENT_NAME = 'some-value'), - (process.env.AZURE_OPENAI_API_VERSION = 'some-value'), - (process.env.AZURE_OPENAI_API_COMPLETIONS_DEPLOYMENT_NAME = 'some-value'), - (process.env.AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME = 'some-value'), - (process.env.PLUGINS_USE_AZURE = 'true'); - process.env.DEBUG_PLUGINS = 'false'; - process.env.OPENAI_SUMMARIZE = 'false'; - - const req = { - body: { key: null }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'test-model' } }; - - const { client, azure } = await initializeClient({ req, res, endpointOption }); - - expect(azure.azureOpenAIApiKey).toBe('test-azure-api-key'); - expect(client).toBeInstanceOf(PluginsClient); - }); - - test('should use the debug option when DEBUG_PLUGINS is enabled', async () => { - process.env.OPENAI_API_KEY = 'test-openai-api-key'; - process.env.DEBUG_PLUGINS = 'true'; - - const req = { - body: { key: null }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'default-model' } }; - - const { client } = await initializeClient({ req, res, endpointOption }); - - expect(client.options.debug).toBe(true); - }); - - test('should set contextStrategy to summarize when OPENAI_SUMMARIZE is enabled', async () => { - process.env.OPENAI_API_KEY = 'test-openai-api-key'; - process.env.OPENAI_SUMMARIZE = 'true'; - - const req = { - body: { key: null }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'default-model' } }; - - const { client } = await initializeClient({ req, res, endpointOption }); - - expect(client.options.contextStrategy).toBe('summarize'); - }); - - // ... additional tests for reverseProxyUrl, proxy, user-provided keys, etc. - - test('should throw an error if no API keys are provided in the environment', async () => { - // Clear the environment variables for API keys - delete process.env.OPENAI_API_KEY; - delete process.env.AZURE_API_KEY; - - const req = { - body: { key: null }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'default-model' } }; - - await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow( - `${EModelEndpoint.openAI} API key not provided.`, - ); - }); - - // Additional tests for gptPlugins/initializeClient.spec.js - - // ... (previous test setup code) - - test('should handle user-provided OpenAI keys and check expiry', async () => { - process.env.OPENAI_API_KEY = 'user_provided'; - process.env.PLUGINS_USE_AZURE = 'false'; - - const futureDate = new Date(Date.now() + 10000).toISOString(); - const req = { - body: { key: futureDate }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'default-model' } }; - - getUserKeyValues.mockResolvedValue({ apiKey: 'test-user-provided-openai-api-key' }); - - const { openAIApiKey } = await initializeClient({ req, res, endpointOption }); - - expect(openAIApiKey).toBe('test-user-provided-openai-api-key'); - }); - - test('should handle user-provided Azure keys and check expiry', async () => { - process.env.AZURE_API_KEY = 'user_provided'; - process.env.PLUGINS_USE_AZURE = 'true'; - - const futureDate = new Date(Date.now() + 10000).toISOString(); - const req = { - body: { key: futureDate }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'test-model' } }; - - getUserKeyValues.mockResolvedValue({ - apiKey: JSON.stringify({ - azureOpenAIApiKey: 'test-user-provided-azure-api-key', - azureOpenAIApiDeploymentName: 'test-deployment', - }), - }); - - const { azure } = await initializeClient({ req, res, endpointOption }); - - expect(azure.azureOpenAIApiKey).toBe('test-user-provided-azure-api-key'); - }); - - test('should throw an error if the user-provided key has expired', async () => { - process.env.OPENAI_API_KEY = 'user_provided'; - process.env.PLUGINS_USE_AZURE = 'FALSE'; - const expiresAt = new Date(Date.now() - 10000).toISOString(); // Expired - const req = { - body: { key: expiresAt }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'default-model' } }; - - await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow( - /expired_user_key/, - ); - }); - - test('should throw an error if the user-provided Azure key is invalid JSON', async () => { - process.env.AZURE_API_KEY = 'user_provided'; - process.env.PLUGINS_USE_AZURE = 'true'; - - const req = { - body: { key: new Date(Date.now() + 10000).toISOString() }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'default-model' } }; - - // Simulate an invalid JSON string returned from getUserKey - getUserKey.mockResolvedValue('invalid-json'); - getUserKeyValues.mockImplementation(() => { - let userValues = getUserKey(); - try { - userValues = JSON.parse(userValues); - } catch (e) { - throw new Error( - JSON.stringify({ - type: ErrorTypes.INVALID_USER_KEY, - }), - ); - } - return userValues; - }); - - await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow( - /invalid_user_key/, - ); - }); - - test('should correctly handle the presence of a reverse proxy', async () => { - process.env.OPENAI_REVERSE_PROXY = 'http://reverse.proxy'; - process.env.PROXY = 'http://proxy'; - process.env.OPENAI_API_KEY = 'test-openai-api-key'; - - const req = { - body: { key: null }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = { modelOptions: { model: 'default-model' } }; - - const { client } = await initializeClient({ req, res, endpointOption }); - - expect(client.options.reverseProxyUrl).toBe('http://reverse.proxy'); - expect(client.options.proxy).toBe('http://proxy'); - }); - - test('should throw an error when user-provided values are not valid JSON', async () => { - process.env.OPENAI_API_KEY = 'user_provided'; - const req = { - body: { key: new Date(Date.now() + 10000).toISOString(), endpoint: 'openAI' }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = {}; - - // Mock getUserKey to return a non-JSON string - getUserKey.mockResolvedValue('not-a-json'); - getUserKeyValues.mockImplementation(() => { - let userValues = getUserKey(); - try { - userValues = JSON.parse(userValues); - } catch (e) { - throw new Error( - JSON.stringify({ - type: ErrorTypes.INVALID_USER_KEY, - }), - ); - } - return userValues; - }); - - await expect(initializeClient({ req, res, endpointOption })).rejects.toThrow( - /invalid_user_key/, - ); - }); - - test('should initialize client correctly for Azure OpenAI with valid configuration', async () => { - const req = { - body: { - key: null, - endpoint: EModelEndpoint.gptPlugins, - model: modelNames[0], - }, - user: { id: '123' }, - app: { - locals: { - [EModelEndpoint.azureOpenAI]: { - plugins: true, - modelNames, - modelGroupMap, - groupMap, - }, - }, - }, - }; - const res = {}; - const endpointOption = {}; - - const client = await initializeClient({ req, res, endpointOption }); - expect(client.client.options.azure).toBeDefined(); - }); - - test('should initialize client with default options when certain env vars are not set', async () => { - delete process.env.OPENAI_SUMMARIZE; - process.env.OPENAI_API_KEY = 'some-api-key'; - - const req = { - body: { key: null, endpoint: EModelEndpoint.gptPlugins }, - user: { id: '123' }, - app, - }; - const res = {}; - const endpointOption = {}; - - const client = await initializeClient({ req, res, endpointOption }); - expect(client.client.options.contextStrategy).toBe(null); - }); - - test('should correctly use user-provided apiKey and baseURL when provided', async () => { - process.env.OPENAI_API_KEY = 'user_provided'; - process.env.OPENAI_REVERSE_PROXY = 'user_provided'; - const req = { - body: { - key: new Date(Date.now() + 10000).toISOString(), - endpoint: 'openAI', - }, - user: { - id: '123', - }, - app, - }; - const res = {}; - const endpointOption = {}; - - getUserKeyValues.mockResolvedValue({ - apiKey: 'test', - baseURL: 'https://user-provided-url.com', - }); - - const result = await initializeClient({ req, res, endpointOption }); - - expect(result.openAIApiKey).toBe('test'); - expect(result.client.options.reverseProxyUrl).toBe('https://user-provided-url.com'); - }); -}); diff --git a/api/server/services/Endpoints/openAI/initialize.js b/api/server/services/Endpoints/openAI/initialize.js index bc0907b3de..0e9a07789e 100644 --- a/api/server/services/Endpoints/openAI/initialize.js +++ b/api/server/services/Endpoints/openAI/initialize.js @@ -1,11 +1,7 @@ -const { - ErrorTypes, - EModelEndpoint, - resolveHeaders, - mapModelToAzureConfig, -} = require('librechat-data-provider'); +const { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } = require('librechat-data-provider'); const { isEnabled, + resolveHeaders, isUserProvided, getOpenAIConfig, getAzureCredentials, @@ -84,7 +80,10 @@ const initializeClient = async ({ }); clientOptions.reverseProxyUrl = baseURL ?? clientOptions.reverseProxyUrl; - clientOptions.headers = resolveHeaders({ ...headers, ...(clientOptions.headers ?? {}) }); + clientOptions.headers = resolveHeaders( + { ...headers, ...(clientOptions.headers ?? {}) }, + req.user, + ); clientOptions.titleConvo = azureConfig.titleConvo; clientOptions.titleModel = azureConfig.titleModel; diff --git a/api/server/services/initializeMCP.js b/api/server/services/initializeMCP.js index d7c5ab7d8a..98b87d156e 100644 --- a/api/server/services/initializeMCP.js +++ b/api/server/services/initializeMCP.js @@ -1,9 +1,9 @@ const { logger } = require('@librechat/data-schemas'); -const { CacheKeys, processMCPEnv } = require('librechat-data-provider'); +const { CacheKeys } = require('librechat-data-provider'); +const { findToken, updateToken, createToken, deleteTokens } = require('~/models'); const { getMCPManager, getFlowStateManager } = require('~/config'); const { getCachedTools, setCachedTools } = require('./Config'); const { getLogStores } = require('~/cache'); -const { findToken, updateToken, createToken, deleteTokens } = require('~/models'); /** * Initialize MCP servers @@ -30,7 +30,6 @@ async function initializeMCP(app) { createToken, deleteTokens, }, - processMCPEnv, }); delete app.locals.mcpConfig; diff --git a/api/typedefs.js b/api/typedefs.js index 58cd802425..c0e0dd5f46 100644 --- a/api/typedefs.js +++ b/api/typedefs.js @@ -1503,7 +1503,6 @@ * @property {boolean|{userProvide: boolean}} [anthropic] - Flag to indicate if Anthropic endpoint is user provided, or its configuration. * @property {boolean|{userProvide: boolean}} [google] - Flag to indicate if Google endpoint is user provided, or its configuration. * @property {boolean|{userProvide: boolean, userProvideURL: boolean, name: string}} [custom] - Custom Endpoint configuration. - * @property {boolean|GptPlugins} [gptPlugins] - Configuration for GPT plugins. * @memberof typedefs */ diff --git a/package-lock.json b/package-lock.json index b9718bb56c..39e41975e3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -71,7 +71,6 @@ "@waylaidwanderer/fetch-event-source": "^3.0.1", "axios": "^1.8.2", "bcryptjs": "^2.4.3", - "cohere-ai": "^7.9.1", "compression": "^1.7.4", "connect-redis": "^7.1.0", "cookie": "^0.7.2", @@ -28101,6 +28100,8 @@ "version": "7.9.1", "resolved": "https://registry.npmjs.org/cohere-ai/-/cohere-ai-7.9.1.tgz", "integrity": "sha512-shMz0Bs3p6/Nw5Yi+6Wc9tZ7DCGTtEnf1eAcuesnlyeKoFuZ7+bzeiHkt5E8SvTgAHxN1GCP3UkIoW85QhHKTA==", + "optional": true, + "peer": true, "dependencies": { "form-data": "4.0.0", "js-base64": "3.7.2", @@ -28113,6 +28114,8 @@ "version": "6.11.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.2.tgz", "integrity": "sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA==", + "optional": true, + "peer": true, "dependencies": { "side-channel": "^1.0.4" }, @@ -34441,7 +34444,9 @@ "node_modules/js-base64": { "version": "3.7.2", "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-3.7.2.tgz", - "integrity": "sha512-NnRs6dsyqUXejqk/yv2aiXlAvOs56sLkX6nUdeaNezI5LFFLlsZjOThmwnrcwh5ZZRwZlCMnVAY3CvhIhoVEKQ==" + "integrity": "sha512-NnRs6dsyqUXejqk/yv2aiXlAvOs56sLkX6nUdeaNezI5LFFLlsZjOThmwnrcwh5ZZRwZlCMnVAY3CvhIhoVEKQ==", + "optional": true, + "peer": true }, "node_modules/js-cookie": { "version": "3.0.5", @@ -44808,7 +44813,9 @@ "node_modules/url-join": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", - "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==" + "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", + "optional": true, + "peer": true }, "node_modules/url-parse": { "version": "1.5.10", @@ -46575,10 +46582,11 @@ } }, "packages/api/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } @@ -46832,9 +46840,9 @@ } }, "packages/data-schemas/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { diff --git a/packages/api/src/endpoints/openai/initialize.ts b/packages/api/src/endpoints/openai/initialize.ts index d921af781d..91e92db85a 100644 --- a/packages/api/src/endpoints/openai/initialize.ts +++ b/packages/api/src/endpoints/openai/initialize.ts @@ -1,9 +1,4 @@ -import { - ErrorTypes, - EModelEndpoint, - resolveHeaders, - mapModelToAzureConfig, -} from 'librechat-data-provider'; +import { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } from 'librechat-data-provider'; import type { LLMConfigOptions, UserKeyValues, @@ -13,6 +8,7 @@ import type { import { createHandleLLMNewToken } from '~/utils/generators'; import { getAzureCredentials } from '~/utils/azure'; import { isUserProvided } from '~/utils/common'; +import { resolveHeaders } from '~/utils/env'; import { getOpenAIConfig } from './llm'; /** @@ -91,7 +87,10 @@ export const initializeOpenAI = async ({ }); clientOptions.reverseProxyUrl = configBaseURL ?? clientOptions.reverseProxyUrl; - clientOptions.headers = resolveHeaders({ ...headers, ...(clientOptions.headers ?? {}) }); + clientOptions.headers = resolveHeaders( + { ...headers, ...(clientOptions.headers ?? {}) }, + req.user, + ); const groupName = modelGroupMap[modelName || '']?.group; if (groupName && groupMap[groupName]) { diff --git a/packages/api/src/mcp/manager.ts b/packages/api/src/mcp/manager.ts index 19d4d4e72b..8b51b3cf8a 100644 --- a/packages/api/src/mcp/manager.ts +++ b/packages/api/src/mcp/manager.ts @@ -2,7 +2,7 @@ import { logger } from '@librechat/data-schemas'; import { CallToolResultSchema, ErrorCode, McpError } from '@modelcontextprotocol/sdk/types.js'; import type { RequestOptions } from '@modelcontextprotocol/sdk/shared/protocol.js'; import type { OAuthClientInformation } from '@modelcontextprotocol/sdk/shared/auth.js'; -import type { JsonSchemaType, MCPOptions, TUser } from 'librechat-data-provider'; +import type { JsonSchemaType, TUser } from 'librechat-data-provider'; import type { TokenMethods } from '@librechat/data-schemas'; import type { FlowStateManager } from '~/flow/manager'; import type { MCPOAuthTokens, MCPOAuthFlowMetadata } from './oauth/types'; @@ -13,6 +13,7 @@ import { MCPOAuthHandler } from './oauth/handler'; import { MCPTokenStorage } from './oauth/tokens'; import { formatToolContent } from './parsers'; import { MCPConnection } from './connection'; +import { processMCPEnv } from '~/utils/env'; export class MCPManager { private static instance: MCPManager | null = null; @@ -24,11 +25,6 @@ export class MCPManager { private userLastActivity: Map = new Map(); private readonly USER_CONNECTION_IDLE_TIMEOUT = 15 * 60 * 1000; // 15 minutes (TODO: make configurable) private mcpConfigs: t.MCPServers = {}; - private processMCPEnv?: ( - obj: MCPOptions, - user?: TUser, - customUserVars?: Record, - ) => MCPOptions; // Store the processing function /** Store MCP server instructions */ private serverInstructions: Map = new Map(); @@ -46,14 +42,11 @@ export class MCPManager { mcpServers, flowManager, tokenMethods, - processMCPEnv, }: { mcpServers: t.MCPServers; flowManager: FlowStateManager; tokenMethods?: TokenMethods; - processMCPEnv?: (obj: MCPOptions) => MCPOptions; }): Promise { - this.processMCPEnv = processMCPEnv; // Store the function this.mcpConfigs = mcpServers; if (!flowManager) { @@ -68,7 +61,7 @@ export class MCPManager { const connectionResults = await Promise.allSettled( entries.map(async ([serverName, _config], i) => { /** Process env for app-level connections */ - const config = this.processMCPEnv ? this.processMCPEnv(_config) : _config; + const config = processMCPEnv(_config); /** Existing tokens for system-level connections */ let tokens: MCPOAuthTokens | null = null; @@ -444,9 +437,7 @@ export class MCPManager { ); } - if (this.processMCPEnv) { - config = { ...(this.processMCPEnv(config, user, customUserVars) ?? {}) }; - } + config = { ...(processMCPEnv(config, user, customUserVars) ?? {}) }; /** If no in-memory tokens, tokens from persistent storage */ let tokens: MCPOAuthTokens | null = null; if (tokenMethods?.findToken) { diff --git a/packages/data-provider/specs/mcp.spec.ts b/packages/api/src/mcp/mcp.spec.ts similarity index 99% rename from packages/data-provider/specs/mcp.spec.ts rename to packages/api/src/mcp/mcp.spec.ts index 37493f1bbc..a9f5b7db1d 100644 --- a/packages/data-provider/specs/mcp.spec.ts +++ b/packages/api/src/mcp/mcp.spec.ts @@ -1,10 +1,10 @@ -import type { TUser } from 'librechat-data-provider'; import { - StreamableHTTPOptionsSchema, - StdioOptionsSchema, - processMCPEnv, MCPOptions, -} from '../src/mcp'; + StdioOptionsSchema, + StreamableHTTPOptionsSchema, +} from 'librechat-data-provider'; +import type { TUser } from 'librechat-data-provider'; +import { processMCPEnv } from '~/utils/env'; // Helper function to create test user objects function createTestUser( diff --git a/packages/api/src/utils/env.spec.ts b/packages/api/src/utils/env.spec.ts new file mode 100644 index 0000000000..35f3f13272 --- /dev/null +++ b/packages/api/src/utils/env.spec.ts @@ -0,0 +1,317 @@ +import { resolveHeaders } from './env'; +import type { TUser } from 'librechat-data-provider'; + +// Helper function to create test user objects +function createTestUser(overrides: Partial = {}): TUser { + return { + id: 'test-user-id', + username: 'testuser', + email: 'test@example.com', + name: 'Test User', + avatar: 'https://example.com/avatar.png', + provider: 'email', + role: 'user', + createdAt: new Date('2021-01-01').toISOString(), + updatedAt: new Date('2021-01-01').toISOString(), + ...overrides, + }; +} + +describe('resolveHeaders', () => { + beforeEach(() => { + // Set up test environment variables + process.env.TEST_API_KEY = 'test-api-key-value'; + process.env.ANOTHER_SECRET = 'another-secret-value'; + }); + + afterEach(() => { + // Clean up environment variables + delete process.env.TEST_API_KEY; + delete process.env.ANOTHER_SECRET; + }); + + it('should return empty object when headers is undefined', () => { + const result = resolveHeaders(undefined); + expect(result).toEqual({}); + }); + + it('should return empty object when headers is null', () => { + const result = resolveHeaders(null as unknown as Record | undefined); + expect(result).toEqual({}); + }); + + it('should return empty object when headers is empty', () => { + const result = resolveHeaders({}); + expect(result).toEqual({}); + }); + + it('should process environment variables in headers', () => { + const headers = { + Authorization: '${TEST_API_KEY}', + 'X-Secret': '${ANOTHER_SECRET}', + 'Content-Type': 'application/json', + }; + + const result = resolveHeaders(headers); + + expect(result).toEqual({ + Authorization: 'test-api-key-value', + 'X-Secret': 'another-secret-value', + 'Content-Type': 'application/json', + }); + }); + + it('should process user ID placeholder when user has id', () => { + const user = { id: 'test-user-123' }; + const headers = { + 'User-Id': '{{LIBRECHAT_USER_ID}}', + 'Content-Type': 'application/json', + }; + + const result = resolveHeaders(headers, user); + + expect(result).toEqual({ + 'User-Id': 'test-user-123', + 'Content-Type': 'application/json', + }); + }); + + it('should not process user ID placeholder when user is undefined', () => { + const headers = { + 'User-Id': '{{LIBRECHAT_USER_ID}}', + 'Content-Type': 'application/json', + }; + + const result = resolveHeaders(headers); + + expect(result).toEqual({ + 'User-Id': '{{LIBRECHAT_USER_ID}}', + 'Content-Type': 'application/json', + }); + }); + + it('should not process user ID placeholder when user has no id', () => { + const user = { id: '' }; + const headers = { + 'User-Id': '{{LIBRECHAT_USER_ID}}', + 'Content-Type': 'application/json', + }; + + const result = resolveHeaders(headers, user); + + expect(result).toEqual({ + 'User-Id': '{{LIBRECHAT_USER_ID}}', + 'Content-Type': 'application/json', + }); + }); + + it('should process full user object placeholders', () => { + const user = createTestUser({ + id: 'user-123', + email: 'test@example.com', + username: 'testuser', + name: 'Test User', + role: 'admin', + }); + + const headers = { + 'User-Email': '{{LIBRECHAT_USER_EMAIL}}', + 'User-Name': '{{LIBRECHAT_USER_NAME}}', + 'User-Username': '{{LIBRECHAT_USER_USERNAME}}', + 'User-Role': '{{LIBRECHAT_USER_ROLE}}', + 'User-Id': '{{LIBRECHAT_USER_ID}}', + 'Content-Type': 'application/json', + }; + + const result = resolveHeaders(headers, user); + + expect(result).toEqual({ + 'User-Email': 'test@example.com', + 'User-Name': 'Test User', + 'User-Username': 'testuser', + 'User-Role': 'admin', + 'User-Id': 'user-123', + 'Content-Type': 'application/json', + }); + }); + + it('should handle missing user fields gracefully', () => { + const user = createTestUser({ + id: 'user-123', + email: 'test@example.com', + username: undefined, // explicitly set to undefined + }); + + const headers = { + 'User-Email': '{{LIBRECHAT_USER_EMAIL}}', + 'User-Username': '{{LIBRECHAT_USER_USERNAME}}', + 'Non-Existent': '{{LIBRECHAT_USER_NONEXISTENT}}', + }; + + const result = resolveHeaders(headers, user); + + expect(result).toEqual({ + 'User-Email': 'test@example.com', + 'User-Username': '', // Empty string for missing field + 'Non-Existent': '{{LIBRECHAT_USER_NONEXISTENT}}', // Unchanged for non-existent field + }); + }); + + it('should process custom user variables', () => { + const user = { id: 'user-123' }; + const customUserVars = { + CUSTOM_TOKEN: 'user-specific-token', + REGION: 'us-west-1', + }; + + const headers = { + Authorization: 'Bearer {{CUSTOM_TOKEN}}', + 'X-Region': '{{REGION}}', + 'X-System-Key': '${TEST_API_KEY}', + 'X-User-Id': '{{LIBRECHAT_USER_ID}}', + }; + + const result = resolveHeaders(headers, user, customUserVars); + + expect(result).toEqual({ + Authorization: 'Bearer user-specific-token', + 'X-Region': 'us-west-1', + 'X-System-Key': 'test-api-key-value', + 'X-User-Id': 'user-123', + }); + }); + + it('should prioritize custom user variables over user fields', () => { + const user = createTestUser({ + id: 'user-123', + email: 'user-email@example.com', + }); + const customUserVars = { + LIBRECHAT_USER_EMAIL: 'custom-email@example.com', + }; + + const headers = { + 'Test-Email': '{{LIBRECHAT_USER_EMAIL}}', + }; + + const result = resolveHeaders(headers, user, customUserVars); + + expect(result).toEqual({ + 'Test-Email': 'custom-email@example.com', + }); + }); + + it('should handle boolean user fields', () => { + const user = createTestUser({ + id: 'user-123', + // Note: TUser doesn't have these boolean fields, so we'll test with string fields + role: 'admin', + }); + + const headers = { + 'User-Role': '{{LIBRECHAT_USER_ROLE}}', + 'User-Id': '{{LIBRECHAT_USER_ID}}', + }; + + const result = resolveHeaders(headers, user); + + expect(result).toEqual({ + 'User-Role': 'admin', + 'User-Id': 'user-123', + }); + }); + + it('should handle multiple occurrences of the same placeholder', () => { + const user = createTestUser({ + id: 'user-123', + email: 'test@example.com', + }); + + const headers = { + 'Primary-Email': '{{LIBRECHAT_USER_EMAIL}}', + 'Secondary-Email': '{{LIBRECHAT_USER_EMAIL}}', + 'Backup-Email': '{{LIBRECHAT_USER_EMAIL}}', + }; + + const result = resolveHeaders(headers, user); + + expect(result).toEqual({ + 'Primary-Email': 'test@example.com', + 'Secondary-Email': 'test@example.com', + 'Backup-Email': 'test@example.com', + }); + }); + + it('should handle mixed variable types in the same headers object', () => { + const user = createTestUser({ + id: 'user-123', + email: 'test@example.com', + }); + const customUserVars = { + CUSTOM_TOKEN: 'secret-token', + }; + + const headers = { + Authorization: 'Bearer {{CUSTOM_TOKEN}}', + 'X-User-Id': '{{LIBRECHAT_USER_ID}}', + 'X-System-Key': '${TEST_API_KEY}', + 'X-User-Email': '{{LIBRECHAT_USER_EMAIL}}', + 'Content-Type': 'application/json', + }; + + const result = resolveHeaders(headers, user, customUserVars); + + expect(result).toEqual({ + Authorization: 'Bearer secret-token', + 'X-User-Id': 'user-123', + 'X-System-Key': 'test-api-key-value', + 'X-User-Email': 'test@example.com', + 'Content-Type': 'application/json', + }); + }); + + it('should not modify the original headers object', () => { + const originalHeaders = { + Authorization: '${TEST_API_KEY}', + 'User-Id': '{{LIBRECHAT_USER_ID}}', + }; + const user = { id: 'user-123' }; + + const result = resolveHeaders(originalHeaders, user); + + // Verify the result is processed + expect(result).toEqual({ + Authorization: 'test-api-key-value', + 'User-Id': 'user-123', + }); + + // Verify the original object is unchanged + expect(originalHeaders).toEqual({ + Authorization: '${TEST_API_KEY}', + 'User-Id': '{{LIBRECHAT_USER_ID}}', + }); + }); + + it('should handle special characters in custom variable names', () => { + const user = { id: 'user-123' }; + const customUserVars = { + 'CUSTOM-VAR': 'dash-value', + CUSTOM_VAR: 'underscore-value', + 'CUSTOM.VAR': 'dot-value', + }; + + const headers = { + 'Dash-Header': '{{CUSTOM-VAR}}', + 'Underscore-Header': '{{CUSTOM_VAR}}', + 'Dot-Header': '{{CUSTOM.VAR}}', + }; + + const result = resolveHeaders(headers, user, customUserVars); + + expect(result).toEqual({ + 'Dash-Header': 'dash-value', + 'Underscore-Header': 'underscore-value', + 'Dot-Header': 'dot-value', + }); + }); +}); diff --git a/packages/api/src/utils/env.ts b/packages/api/src/utils/env.ts new file mode 100644 index 0000000000..83530b93e4 --- /dev/null +++ b/packages/api/src/utils/env.ts @@ -0,0 +1,170 @@ +import { extractEnvVariable } from 'librechat-data-provider'; +import type { TUser, MCPOptions } from 'librechat-data-provider'; + +/** + * List of allowed user fields that can be used in MCP environment variables. + * These are non-sensitive string/boolean fields from the IUser interface. + */ +const ALLOWED_USER_FIELDS = [ + 'id', + 'name', + 'username', + 'email', + 'provider', + 'role', + 'googleId', + 'facebookId', + 'openidId', + 'samlId', + 'ldapId', + 'githubId', + 'discordId', + 'appleId', + 'emailVerified', + 'twoFactorEnabled', + 'termsAccepted', +] as const; + +/** + * Processes a string value to replace user field placeholders + * @param value - The string value to process + * @param user - The user object + * @returns The processed string with placeholders replaced + */ +function processUserPlaceholders(value: string, user?: TUser): string { + if (!user || typeof value !== 'string') { + return value; + } + + for (const field of ALLOWED_USER_FIELDS) { + const placeholder = `{{LIBRECHAT_USER_${field.toUpperCase()}}}`; + if (!value.includes(placeholder)) { + continue; + } + + const fieldValue = user[field as keyof TUser]; + + // Skip replacement if field doesn't exist in user object + if (!(field in user)) { + continue; + } + + // Special case for 'id' field: skip if undefined or empty + if (field === 'id' && (fieldValue === undefined || fieldValue === '')) { + continue; + } + + const replacementValue = fieldValue == null ? '' : String(fieldValue); + value = value.replace(new RegExp(placeholder, 'g'), replacementValue); + } + + return value; +} + +/** + * Processes a single string value by replacing various types of placeholders + * @param originalValue - The original string value to process + * @param customUserVars - Optional custom user variables to replace placeholders + * @param user - Optional user object for replacing user field placeholders + * @returns The processed string with all placeholders replaced + */ +function processSingleValue({ + originalValue, + customUserVars, + user, +}: { + originalValue: string; + customUserVars?: Record; + user?: TUser; +}): string { + let value = originalValue; + + // 1. Replace custom user variables + if (customUserVars) { + for (const [varName, varVal] of Object.entries(customUserVars)) { + /** Escaped varName for use in regex to avoid issues with special characters */ + const escapedVarName = varName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + const placeholderRegex = new RegExp(`\\{\\{${escapedVarName}\\}\\}`, 'g'); + value = value.replace(placeholderRegex, varVal); + } + } + + // 2. Replace user field placeholders (e.g., {{LIBRECHAT_USER_EMAIL}}, {{LIBRECHAT_USER_ID}}) + value = processUserPlaceholders(value, user); + + // 3. Replace system environment variables + value = extractEnvVariable(value); + + return value; +} + +/** + * Recursively processes an object to replace environment variables in string values + * @param obj - The object to process + * @param user - The user object containing all user fields + * @param customUserVars - vars that user set in settings + * @returns - The processed object with environment variables replaced + */ +export function processMCPEnv( + obj: Readonly, + user?: TUser, + customUserVars?: Record, +): MCPOptions { + if (obj === null || obj === undefined) { + return obj; + } + + const newObj: MCPOptions = structuredClone(obj); + + if ('env' in newObj && newObj.env) { + const processedEnv: Record = {}; + for (const [key, originalValue] of Object.entries(newObj.env)) { + processedEnv[key] = processSingleValue({ originalValue, customUserVars, user }); + } + newObj.env = processedEnv; + } + + // Process headers if they exist (for WebSocket, SSE, StreamableHTTP types) + // Note: `env` and `headers` are on different branches of the MCPOptions union type. + if ('headers' in newObj && newObj.headers) { + const processedHeaders: Record = {}; + for (const [key, originalValue] of Object.entries(newObj.headers)) { + processedHeaders[key] = processSingleValue({ originalValue, customUserVars, user }); + } + newObj.headers = processedHeaders; + } + + // Process URL if it exists (for WebSocket, SSE, StreamableHTTP types) + if ('url' in newObj && newObj.url) { + newObj.url = processSingleValue({ originalValue: newObj.url, customUserVars, user }); + } + + return newObj; +} + +/** + * Resolves header values by replacing user placeholders, custom variables, and environment variables + * @param headers - The headers object to process + * @param user - Optional user object for replacing user field placeholders (can be partial with just id) + * @param customUserVars - Optional custom user variables to replace placeholders + * @returns - The processed headers with all placeholders replaced + */ +export function resolveHeaders( + headers: Record | undefined, + user?: Partial | { id: string }, + customUserVars?: Record, +) { + const resolvedHeaders = { ...(headers ?? {}) }; + + if (headers && typeof headers === 'object' && !Array.isArray(headers)) { + Object.keys(headers).forEach((key) => { + resolvedHeaders[key] = processSingleValue({ + originalValue: headers[key], + customUserVars, + user: user as TUser, + }); + }); + } + + return resolvedHeaders; +} diff --git a/packages/api/src/utils/index.ts b/packages/api/src/utils/index.ts index e2cc1ab51b..807686ca44 100644 --- a/packages/api/src/utils/index.ts +++ b/packages/api/src/utils/index.ts @@ -1,6 +1,7 @@ export * from './axios'; export * from './azure'; export * from './common'; +export * from './env'; export * from './events'; export * from './files'; export * from './generators'; diff --git a/packages/data-provider/src/mcp.ts b/packages/data-provider/src/mcp.ts index 05b37115fc..696777131d 100644 --- a/packages/data-provider/src/mcp.ts +++ b/packages/data-provider/src/mcp.ts @@ -1,7 +1,6 @@ import { z } from 'zod'; -import type { TUser } from './types'; -import { extractEnvVariable } from './utils'; import { TokenExchangeMethodEnum } from './types/agents'; +import { extractEnvVariable } from './utils'; const BaseOptionsSchema = z.object({ iconPath: z.string().optional(), @@ -153,130 +152,3 @@ export const MCPOptionsSchema = z.union([ export const MCPServersSchema = z.record(z.string(), MCPOptionsSchema); export type MCPOptions = z.infer; - -/** - * List of allowed user fields that can be used in MCP environment variables. - * These are non-sensitive string/boolean fields from the IUser interface. - */ -const ALLOWED_USER_FIELDS = [ - 'name', - 'username', - 'email', - 'provider', - 'role', - 'googleId', - 'facebookId', - 'openidId', - 'samlId', - 'ldapId', - 'githubId', - 'discordId', - 'appleId', - 'emailVerified', - 'twoFactorEnabled', - 'termsAccepted', -] as const; - -/** - * Processes a string value to replace user field placeholders - * @param value - The string value to process - * @param user - The user object - * @returns The processed string with placeholders replaced - */ -function processUserPlaceholders(value: string, user?: TUser): string { - if (!user || typeof value !== 'string') { - return value; - } - - for (const field of ALLOWED_USER_FIELDS) { - const placeholder = `{{LIBRECHAT_USER_${field.toUpperCase()}}}`; - if (value.includes(placeholder)) { - const fieldValue = user[field as keyof TUser]; - const replacementValue = fieldValue != null ? String(fieldValue) : ''; - value = value.replace(new RegExp(placeholder, 'g'), replacementValue); - } - } - - return value; -} - -function processSingleValue({ - originalValue, - customUserVars, - user, -}: { - originalValue: string; - customUserVars?: Record; - user?: TUser; -}): string { - let value = originalValue; - - // 1. Replace custom user variables - if (customUserVars) { - for (const [varName, varVal] of Object.entries(customUserVars)) { - /** Escaped varName for use in regex to avoid issues with special characters */ - const escapedVarName = varName.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); - const placeholderRegex = new RegExp(`\\{\\{${escapedVarName}\\}\\}`, 'g'); - value = value.replace(placeholderRegex, varVal); - } - } - - // 2.A. Special handling for LIBRECHAT_USER_ID placeholder - // This ensures {{LIBRECHAT_USER_ID}} is replaced only if user.id is available. - // If user.id is null/undefined, the placeholder remains - if (user && user.id != null && value.includes('{{LIBRECHAT_USER_ID}}')) { - value = value.replace(/\{\{LIBRECHAT_USER_ID\}\}/g, String(user.id)); - } - - // 2.B. Replace other standard user field placeholders (e.g., {{LIBRECHAT_USER_EMAIL}}) - value = processUserPlaceholders(value, user); - - // 3. Replace system environment variables - value = extractEnvVariable(value); - - return value; -} - -/** - * Recursively processes an object to replace environment variables in string values - * @param obj - The object to process - * @param user - The user object containing all user fields - * @param customUserVars - vars that user set in settings - * @returns - The processed object with environment variables replaced - */ -export function processMCPEnv( - obj: Readonly, - user?: TUser, - customUserVars?: Record, -): MCPOptions { - if (obj === null || obj === undefined) { - return obj; - } - - const newObj: MCPOptions = structuredClone(obj); - - if ('env' in newObj && newObj.env) { - const processedEnv: Record = {}; - for (const [key, originalValue] of Object.entries(newObj.env)) { - processedEnv[key] = processSingleValue({ originalValue, customUserVars, user }); - } - newObj.env = processedEnv; - } - - // Process headers if they exist (for WebSocket, SSE, StreamableHTTP types) - // Note: `env` and `headers` are on different branches of the MCPOptions union type. - if ('headers' in newObj && newObj.headers) { - const processedHeaders: Record = {}; - for (const [key, originalValue] of Object.entries(newObj.headers)) { - processedHeaders[key] = processSingleValue({ originalValue, customUserVars, user }); - } - newObj.headers = processedHeaders; - } - - // Process URL if it exists (for WebSocket, SSE, StreamableHTTP types) - if ('url' in newObj && newObj.url) { - newObj.url = processSingleValue({ originalValue: newObj.url, customUserVars, user }); - } - - return newObj; -} diff --git a/packages/data-provider/src/parsers.ts b/packages/data-provider/src/parsers.ts index 774bc56173..42379ad02e 100644 --- a/packages/data-provider/src/parsers.ts +++ b/packages/data-provider/src/parsers.ts @@ -122,19 +122,6 @@ export function errorsToString(errors: ZodIssue[]) { .join(' '); } -/** Resolves header values to env variables if detected */ -export function resolveHeaders(headers: Record | undefined) { - const resolvedHeaders = { ...(headers ?? {}) }; - - if (headers && typeof headers === 'object' && !Array.isArray(headers)) { - Object.keys(headers).forEach((key) => { - resolvedHeaders[key] = extractEnvVariable(headers[key]); - }); - } - - return resolvedHeaders; -} - export function getFirstDefinedValue(possibleValues: string[]) { let returnValue; for (const value of possibleValues) { From 2b2f7fe28926900f4681154d244e43ceca13f1c7 Mon Sep 17 00:00:00 2001 From: Dustin Healy <54083382+dustinhealy@users.noreply.github.com> Date: Mon, 23 Jun 2025 10:21:01 -0700 Subject: [PATCH 03/65] =?UTF-8?q?=E2=9C=A8=20feat:=20Configurable=20MCP=20?= =?UTF-8?q?Dropdown=20Placeholder=20(#7988)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * new env variable for mcp label * 🔄 refactor: Update MCPSelect placeholderText to draw from interface section of librechat.yaml rather than .env * 🧹 chore: extract mcpServers schema for better maintainability * 🔄 refactor: Update MCPSelect and useMCPSelect to utilize TPlugin type for better type consistency * 🔄 refactor: Pass placeholder from startupConfig to MCPSubMenu for improved localization * 🔄 refactor: Integrate startupConfig into BadgeRowContext and related components for enhanced configuration management --------- Co-authored-by: mwbrandao Co-authored-by: Danny Avila --- api/server/services/start/interface.js | 1 + client/src/Providers/BadgeRowContext.tsx | 6 ++++++ client/src/components/Chat/Input/MCPSelect.tsx | 11 ++++++----- client/src/components/Chat/Input/MCPSubMenu.tsx | 4 +++- .../src/components/Chat/Input/ToolsDropdown.tsx | 17 ++++++++++++++--- client/src/hooks/Plugins/useMCPSelect.ts | 15 ++++----------- librechat.example.yaml | 3 +++ packages/data-provider/src/config.ts | 8 ++++++++ packages/data-provider/src/schemas.ts | 2 +- 9 files changed, 46 insertions(+), 21 deletions(-) diff --git a/api/server/services/start/interface.js b/api/server/services/start/interface.js index c98fdb60bc..5c08b1af2e 100644 --- a/api/server/services/start/interface.js +++ b/api/server/services/start/interface.js @@ -41,6 +41,7 @@ async function loadDefaultInterface(config, configDefaults, roleName = SystemRol sidePanel: interfaceConfig?.sidePanel ?? defaults.sidePanel, privacyPolicy: interfaceConfig?.privacyPolicy ?? defaults.privacyPolicy, termsOfService: interfaceConfig?.termsOfService ?? defaults.termsOfService, + mcpServers: interfaceConfig?.mcpServers ?? defaults.mcpServers, bookmarks: interfaceConfig?.bookmarks ?? defaults.bookmarks, memories: shouldDisableMemories ? false : (interfaceConfig?.memories ?? defaults.memories), prompts: interfaceConfig?.prompts ?? defaults.prompts, diff --git a/client/src/Providers/BadgeRowContext.tsx b/client/src/Providers/BadgeRowContext.tsx index 860c59da46..01590b1948 100644 --- a/client/src/Providers/BadgeRowContext.tsx +++ b/client/src/Providers/BadgeRowContext.tsx @@ -1,6 +1,7 @@ import React, { createContext, useContext } from 'react'; import { Tools, LocalStorageKeys } from 'librechat-data-provider'; import { useMCPSelect, useToolToggle, useCodeApiKeyForm, useSearchApiKeyForm } from '~/hooks'; +import { useGetStartupConfig } from '~/data-provider'; interface BadgeRowContextType { conversationId?: string | null; @@ -10,6 +11,7 @@ interface BadgeRowContextType { fileSearch: ReturnType; codeApiKeyForm: ReturnType; searchApiKeyForm: ReturnType; + startupConfig: ReturnType['data']; } const BadgeRowContext = createContext(undefined); @@ -28,6 +30,9 @@ interface BadgeRowProviderProps { } export default function BadgeRowProvider({ children, conversationId }: BadgeRowProviderProps) { + /** Startup config */ + const { data: startupConfig } = useGetStartupConfig(); + /** MCPSelect hook */ const mcpSelect = useMCPSelect({ conversationId }); @@ -73,6 +78,7 @@ export default function BadgeRowProvider({ children, conversationId }: BadgeRowP mcpSelect, webSearch, fileSearch, + startupConfig, conversationId, codeApiKeyForm, codeInterpreter, diff --git a/client/src/components/Chat/Input/MCPSelect.tsx b/client/src/components/Chat/Input/MCPSelect.tsx index 13c1a4a26a..0a03decd53 100644 --- a/client/src/components/Chat/Input/MCPSelect.tsx +++ b/client/src/components/Chat/Input/MCPSelect.tsx @@ -2,8 +2,7 @@ import React, { memo, useCallback, useState } from 'react'; import { SettingsIcon } from 'lucide-react'; import { Constants } from 'librechat-data-provider'; import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query'; -import type { TUpdateUserPlugins } from 'librechat-data-provider'; -import type { McpServerInfo } from '~/hooks/Plugins/useMCPSelect'; +import type { TUpdateUserPlugins, TPlugin } from 'librechat-data-provider'; import MCPConfigDialog, { type ConfigFieldDetail } from '~/components/ui/MCPConfigDialog'; import { useToastContext, useBadgeRowContext } from '~/Providers'; import MultiSelect from '~/components/ui/MultiSelect'; @@ -18,11 +17,11 @@ const getBaseMCPPluginKey = (fullPluginKey: string): string => { function MCPSelect() { const localize = useLocalize(); const { showToast } = useToastContext(); - const { mcpSelect } = useBadgeRowContext(); + const { mcpSelect, startupConfig } = useBadgeRowContext(); const { mcpValues, setMCPValues, mcpServerNames, mcpToolDetails, isPinned } = mcpSelect; const [isConfigModalOpen, setIsConfigModalOpen] = useState(false); - const [selectedToolForConfig, setSelectedToolForConfig] = useState(null); + const [selectedToolForConfig, setSelectedToolForConfig] = useState(null); const updateUserPluginsMutation = useUpdateUserPluginsMutation({ onSuccess: () => { @@ -129,6 +128,8 @@ function MCPSelect() { return null; } + const placeholderText = + startupConfig?.interface?.mcpServers?.placeholder || localize('com_ui_mcp_servers'); return ( <> } diff --git a/client/src/components/Chat/Input/MCPSubMenu.tsx b/client/src/components/Chat/Input/MCPSubMenu.tsx index a955f2bd90..16772acd02 100644 --- a/client/src/components/Chat/Input/MCPSubMenu.tsx +++ b/client/src/components/Chat/Input/MCPSubMenu.tsx @@ -11,6 +11,7 @@ interface MCPSubMenuProps { mcpValues?: string[]; mcpServerNames: string[]; handleMCPToggle: (serverName: string) => void; + placeholder?: string; } const MCPSubMenu = ({ @@ -19,6 +20,7 @@ const MCPSubMenu = ({ mcpServerNames, setIsMCPPinned, handleMCPToggle, + placeholder, ...props }: MCPSubMenuProps) => { const localize = useLocalize(); @@ -38,7 +40,7 @@ const MCPSubMenu = ({ >
- {localize('com_ui_mcp_servers')} + {placeholder || localize('com_ui_mcp_servers')}
{ const mcpPlaceholder = startupConfig?.interface?.mcpServers?.placeholder; const dropdownItems = useMemo(() => { - const items: MenuItemProps[] = [ - { - render: () => ( -
- {localize('com_ui_tools')} -
- ), - hideOnClick: false, - }, - ]; - + const items: MenuItemProps[] = []; items.push({ onClick: handleFileSearchToggle, hideOnClick: false, From 1b7e044bf52cdcaafbe5650860b0cce3e149efb1 Mon Sep 17 00:00:00 2001 From: Marco Beretta <81851188+berry-13@users.noreply.github.com> Date: Mon, 23 Jun 2025 20:30:15 +0200 Subject: [PATCH 05/65] =?UTF-8?q?=F0=9F=A4=A9=20style:=20DialogImage,=20Up?= =?UTF-8?q?date=20Stylesheet,=20and=20Improve=20Accessibility=20(#8014)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🔧 fix: Adjust typography and border styles for improved readability in markdown components * 🔧 fix: Enhance code block styling in markdown for better visibility and consistency * 🔧 fix: Adjust margins and line heights for improved readability in markdown elements * 🔧 fix: Adjust spacing for horizontal rules in markdown for improved consistency * 🔧 fix: Refactor DialogImage component for improved quality styling and layout consistency * 🔧 fix: Enhance zoom and pan functionality in DialogImage component with improved controls and user experience * 🔧 fix: Improve zoom and pan functionality in DialogImage component with enhanced controls and reset zoom feature --- .../Chat/Messages/Content/DialogImage.tsx | 246 +++++++++++++++--- client/src/locales/en/translation.json | 3 +- client/src/style.css | 146 +++++++---- 3 files changed, 314 insertions(+), 81 deletions(-) diff --git a/client/src/components/Chat/Messages/Content/DialogImage.tsx b/client/src/components/Chat/Messages/Content/DialogImage.tsx index 907902f4ed..0711757df1 100644 --- a/client/src/components/Chat/Messages/Content/DialogImage.tsx +++ b/client/src/components/Chat/Messages/Content/DialogImage.tsx @@ -1,14 +1,33 @@ -import { useState, useEffect } from 'react'; -import { X, ArrowDownToLine, PanelLeftOpen, PanelLeftClose } from 'lucide-react'; +import { useState, useEffect, useCallback, useRef } from 'react'; +import { X, ArrowDownToLine, PanelLeftOpen, PanelLeftClose, RotateCcw } from 'lucide-react'; import { Button, OGDialog, OGDialogContent, TooltipAnchor } from '~/components'; import { useLocalize } from '~/hooks'; +const getQualityStyles = (quality: string): string => { + if (quality === 'high') { + return 'bg-green-100 text-green-800'; + } + if (quality === 'low') { + return 'bg-orange-100 text-orange-800'; + } + return 'bg-gray-100 text-gray-800'; +}; + export default function DialogImage({ isOpen, onOpenChange, src = '', downloadImage, args }) { const localize = useLocalize(); const [isPromptOpen, setIsPromptOpen] = useState(false); const [imageSize, setImageSize] = useState(null); - const getImageSize = async (url: string) => { + // Zoom and pan state + const [zoom, setZoom] = useState(1); + const [panX, setPanX] = useState(0); + const [panY, setPanY] = useState(0); + const [isDragging, setIsDragging] = useState(false); + const [dragStart, setDragStart] = useState({ x: 0, y: 0 }); + + const containerRef = useRef(null); + + const getImageSize = useCallback(async (url: string) => { try { const response = await fetch(url, { method: 'HEAD' }); const contentLength = response.headers.get('Content-Length'); @@ -25,7 +44,7 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm console.error('Error getting image size:', error); return null; } - }; + }, []); const formatFileSize = (bytes: number): string => { if (bytes === 0) return '0 Bytes'; @@ -37,11 +56,129 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i]; }; + const getImageMaxWidth = () => { + // On mobile (when panel overlays), use full width minus padding + // On desktop, account for the side panel width + if (isPromptOpen) { + return window.innerWidth >= 640 ? 'calc(100vw - 22rem)' : 'calc(100vw - 2rem)'; + } + return 'calc(100vw - 2rem)'; + }; + + const resetZoom = useCallback(() => { + setZoom(1); + setPanX(0); + setPanY(0); + }, []); + + const getCursor = () => { + if (zoom <= 1) return 'default'; + return isDragging ? 'grabbing' : 'grab'; + }; + + const handleDoubleClick = useCallback(() => { + if (zoom > 1) { + resetZoom(); + } else { + // Zoom in to 2x on double click when at normal zoom + setZoom(2); + } + }, [zoom, resetZoom]); + + const handleWheel = useCallback( + (e: React.WheelEvent) => { + e.preventDefault(); + if (!containerRef.current) return; + + const rect = containerRef.current.getBoundingClientRect(); + const mouseX = e.clientX - rect.left; + const mouseY = e.clientY - rect.top; + + // Calculate zoom factor + const zoomFactor = e.deltaY > 0 ? 0.9 : 1.1; + const newZoom = Math.min(Math.max(zoom * zoomFactor, 1), 5); + + if (newZoom === zoom) return; + + // If zooming back to 1, reset pan to center the image + if (newZoom === 1) { + setZoom(1); + setPanX(0); + setPanY(0); + return; + } + + // Calculate the zoom center relative to the current viewport + const containerCenterX = rect.width / 2; + const containerCenterY = rect.height / 2; + + // Calculate new pan position to zoom towards mouse cursor + const zoomRatio = newZoom / zoom; + const deltaX = (mouseX - containerCenterX - panX) * (zoomRatio - 1); + const deltaY = (mouseY - containerCenterY - panY) * (zoomRatio - 1); + + setZoom(newZoom); + setPanX(panX - deltaX); + setPanY(panY - deltaY); + }, + [zoom, panX, panY], + ); + + const handleMouseDown = useCallback( + (e: React.MouseEvent) => { + e.preventDefault(); + if (zoom <= 1) return; + setIsDragging(true); + setDragStart({ + x: e.clientX - panX, + y: e.clientY - panY, + }); + }, + [zoom, panX, panY], + ); + + const handleMouseMove = useCallback( + (e: React.MouseEvent) => { + if (!isDragging || zoom <= 1) return; + const newPanX = e.clientX - dragStart.x; + const newPanY = e.clientY - dragStart.y; + setPanX(newPanX); + setPanY(newPanY); + }, + [isDragging, dragStart, zoom], + ); + const handleMouseUp = useCallback(() => { + setIsDragging(false); + }, []); + + useEffect(() => { + const onKey = (e: KeyboardEvent) => e.key === 'Escape' && resetZoom(); + document.addEventListener('keydown', onKey); + return () => document.removeEventListener('keydown', onKey); + }, [resetZoom]); + useEffect(() => { if (isOpen && src) { getImageSize(src).then(setImageSize); + resetZoom(); } - }, [isOpen, src]); + }, [isOpen, src, getImageSize, resetZoom]); + + // Ensure image is centered when zoom changes to 1 + useEffect(() => { + if (zoom === 1) { + setPanX(0); + setPanY(0); + } + }, [zoom]); + + // Reset pan when panel opens/closes to maintain centering + useEffect(() => { + if (zoom === 1) { + setPanX(0); + setPanY(0); + } + }, [isPromptOpen, zoom]); return ( @@ -52,7 +189,7 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm overlayClassName="bg-surface-primary opacity-95 z-50" >
- + } /> -
+
+ {zoom > 1 && ( + + + + } + /> + )} {isPromptOpen ? ( - + ) : ( - + )} } @@ -100,36 +247,81 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm {/* Main content area with image */}
-
- Image 1 ? 'hidden' : 'visible', + minHeight: 0, // Allow flexbox to shrink + }} + > +
+ > + Image +
{/* Side Panel */}
-
-
+ {/* Mobile pull handle - removed for cleaner look */} + +
+ {/* Mobile close button */} +
+

+ {localize('com_ui_image_details')} +

+ +
+ +

{localize('com_ui_image_details')}

-
+
{/* Prompt Section */}

@@ -157,13 +349,7 @@ export default function DialogImage({ isOpen, onOpenChange, src = '', downloadIm
{localize('com_ui_quality')}: {args?.quality || 'Standard'} diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index 34ef2a7448..b875644a20 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -1054,6 +1054,7 @@ "com_ui_x_selected": "{{0}} selected", "com_ui_yes": "Yes", "com_ui_zoom": "Zoom", + "com_ui_reset_zoom": "Reset Zoom", "com_user_message": "You", "com_warning_resubmit_unsupported": "Resubmitting the AI message is not supported for this endpoint." -} \ No newline at end of file +} diff --git a/client/src/style.css b/client/src/style.css index 438f197a55..4139dfd043 100644 --- a/client/src/style.css +++ b/client/src/style.css @@ -818,14 +818,14 @@ pre { max-width: 65ch; font-size: var(--markdown-font-size, var(--font-size-base)); line-height: calc( - 28px * var(--markdown-font-size, var(--font-size-base)) / var(--font-size-base) + 22px * var(--markdown-font-size, var(--font-size-base)) / var(--font-size-base) ); } .prose :where([class~='lead']):not(:where([class~='not-prose'] *)) { color: var(--tw-prose-lead); font-size: 1.25em; - line-height: 1.6; + line-height: 1.3; margin-bottom: 1.2em; margin-top: 1.2em; } @@ -853,8 +853,8 @@ pre { .prose :where(hr):not(:where([class~='not-prose'] *)) { border-color: var(--tw-prose-hr); border-top-width: 1px; - margin-bottom: 3em; - margin-top: 3em; + margin-bottom: 0.8em; + margin-top: 0.8em; } .prose :where(blockquote):not(:where([class~='not-prose'] *)) { border-left-color: var(--tw-prose-quote-borders); @@ -878,9 +878,9 @@ pre { color: var(--tw-prose-headings); font-size: 2.25em; font-weight: 800; - line-height: 1.1111111; - margin-bottom: 0.8888889em; - margin-top: 0; + line-height: 1; + margin-bottom: 0.4em; + margin-top: 0.6em; } .prose :where(h1 strong):not(:where([class~='not-prose'] *)) { color: inherit; @@ -890,9 +890,9 @@ pre { color: var(--tw-prose-headings); font-size: 1.5em; font-weight: 700; - line-height: 1.3333333; - margin-bottom: 1em; - margin-top: 2em; + line-height: 1.1; + margin-bottom: 0.4em; + margin-top: 0.8em; } .prose :where(h2 strong):not(:where([class~='not-prose'] *)) { color: inherit; @@ -902,9 +902,9 @@ pre { color: var(--tw-prose-headings); font-size: 1.25em; font-weight: 600; - line-height: 1.6; - margin-bottom: 0.6em; - margin-top: 1.6em; + line-height: 1.3; + margin-bottom: 0.3em; + margin-top: 0.6em; } .prose :where(h3 strong):not(:where([class~='not-prose'] *)) { color: inherit; @@ -913,9 +913,9 @@ pre { .prose :where(h4):not(:where([class~='not-prose'] *)) { color: var(--tw-prose-headings); font-weight: 600; - line-height: 1.5; - margin-bottom: 0.5em; - margin-top: 1.5em; + line-height: 1.2; + margin-bottom: 0.3em; + margin-top: 0.5em; } .prose :where(h4 strong):not(:where([class~='not-prose'] *)) { color: inherit; @@ -932,19 +932,19 @@ pre { .prose :where(figcaption):not(:where([class~='not-prose'] *)) { color: var(--tw-prose-captions); font-size: 0.875em; - line-height: 1.4285714; + line-height: 1.2; margin-top: 0.8571429em; } .prose :where(code):not(:where([class~='not-prose'] *)) { color: var(--tw-prose-code); font-size: 0.875em; font-weight: 600; + background-color: var(--gray-200); + padding: 0.125rem 0.25rem; + border-radius: 0.35rem; } -.prose :where(code):not(:where([class~='not-prose'] *)):before { - content: '`'; -} -.prose :where(code):not(:where([class~='not-prose'] *)):after { - content: '`'; +.dark .prose :where(code):not(:where([class~='not-prose'] *)):not(:where(pre *)) { + background-color: var(--gray-600); } .prose :where(a code):not(:where([class~='not-prose'] *)) { color: inherit; @@ -971,11 +971,11 @@ pre { } .prose :where(pre):not(:where([class~='not-prose'] *)) { background-color: transparent; - border-radius: 0.375rem; + border-radius: 0.75rem; color: currentColor; font-size: 0.875em; font-weight: 400; - line-height: 1.7142857; + line-height: 1.4; margin: 0; overflow-x: auto; padding: 0; @@ -999,7 +999,7 @@ pre { } .prose :where(table):not(:where([class~='not-prose'] *)) { font-size: 0.875em; - line-height: 1.7142857; + line-height: 1.4; margin-bottom: 2em; margin-top: 2em; table-layout: auto; @@ -1036,14 +1036,14 @@ pre { vertical-align: top; } .prose { - --tw-prose-body: #374151; + --tw-prose-body: #424242; --tw-prose-headings: #111827; --tw-prose-lead: #4b5563; --tw-prose-links: #0066cc; --tw-prose-bold: #111827; --tw-prose-counters: #6b7280; --tw-prose-bullets: #d1d5db; - --tw-prose-hr: #e5e7eb; + --tw-prose-hr: #cdcdcd; --tw-prose-quotes: #111827; --tw-prose-quote-borders: #e5e7eb; --tw-prose-captions: #6b7280; @@ -1059,17 +1059,17 @@ pre { --tw-prose-invert-bold: #fff; --tw-prose-invert-counters: #9ca3af; --tw-prose-invert-bullets: #4b5563; - --tw-prose-invert-hr: #374151; + --tw-prose-invert-hr: #424242; --tw-prose-invert-quotes: #f3f4f6; - --tw-prose-invert-quote-borders: #374151; + --tw-prose-invert-quote-borders: #424242; --tw-prose-invert-captions: #9ca3af; --tw-prose-invert-code: #fff; --tw-prose-invert-pre-code: #d1d5db; --tw-prose-invert-pre-bg: rgba(0, 0, 0, 0.5); --tw-prose-invert-th-borders: #4b5563; - --tw-prose-invert-td-borders: #374151; + --tw-prose-invert-td-borders: #424242; font-size: 1rem; - line-height: 1.75; + line-height: 1.4; } .prose :where(p):not(:where([class~='not-prose'] *)) { margin-bottom: 1.25em; @@ -1112,6 +1112,13 @@ pre { .prose :where(h4 + *):not(:where([class~='not-prose'] *)) { margin-top: 0; } +/* Ensure symmetrical spacing around hr */ +.prose :where(* + hr):not(:where([class~='not-prose'] *)) { + margin-top: 0.8em; +} +.prose :where(hr + h1, hr + h2, hr + h3, hr + h4):not(:where([class~='not-prose'] *)) { + margin-top: 0.4em; +} .prose :where(thead th:first-child):not(:where([class~='not-prose'] *)) { padding-left: 0; } @@ -1213,6 +1220,14 @@ pre { .prose-2xl :where(.prose > :last-child):not(:where([class~='not-prose'] *)) { margin-bottom: 0; } +.prose :where(ul > li):has(input[type='checkbox']):not(:where([class~='not-prose'] *)) { + margin-bottom: 0; + margin-top: 0; +} +.prose :where(ul > li):has(input[type='checkbox']) p:not(:where([class~='not-prose'] *)) { + margin-bottom: 0; + margin-top: 0; +} code, pre { @@ -1484,7 +1499,7 @@ html { max-width: none; font-size: var(--markdown-font-size, var(--font-size-base)); line-height: calc( - 28px * var(--markdown-font-size, var(--font-size-base)) / var(--font-size-base) + 22px * var(--markdown-font-size, var(--font-size-base)) / var(--font-size-base) ); } @@ -1496,8 +1511,8 @@ html { } .markdown h2 { - margin-bottom: 1rem; - margin-top: 2rem; + margin-bottom: 0.4rem; + margin-top: 0.8rem; } .markdown h3 { @@ -1507,8 +1522,8 @@ html { .markdown h3, .markdown h4 { - margin-bottom: 0.5rem; - margin-top: 1rem; + margin-bottom: 0.3rem; + margin-top: 0.6rem; } .markdown h4 { @@ -1523,7 +1538,7 @@ html { .markdown blockquote { --tw-border-opacity: 1; - border-color: rgba(142, 142, 160, var(--tw-border-opacity)); + border-color: var(--gray-400); border-left-width: 2px; line-height: 1rem; padding-left: 1rem; @@ -1551,6 +1566,7 @@ html { .markdown th:last-child { border-right-width: 1px; + border-color: #d1d5db; border-top-right-radius: 0.375rem; } @@ -1751,16 +1767,16 @@ html { font-weight: 600; } .markdown h2 { - margin-bottom: 1rem; - margin-top: 2rem; + margin-bottom: 0.4rem; + margin-top: 0.8rem; } .markdown h3 { font-weight: 600; } .markdown h3, .markdown h4 { - margin-bottom: 0.5rem; - margin-top: 1rem; + margin-bottom: 0.3rem; + margin-top: 0.6rem; } .markdown h4 { font-weight: 400; @@ -1770,45 +1786,63 @@ html { } .markdown blockquote { --tw-border-opacity: 1; - border-color: rgba(142, 142, 160, var(--tw-border-opacity)); + border-color: var(--gray-300); border-left-width: 2px; line-height: 1rem; padding-left: 1rem; } +.dark .markdown blockquote { + border-color: var(--gray-600); +} .markdown table { --tw-border-spacing-x: 0px; --tw-border-spacing-y: 0px; border-collapse: separate; border-spacing: var(--tw-border-spacing-x) var(--tw-border-spacing-y); width: 100%; + border-color: var(--gray-300); } .markdown th { - background-color: rgba(236, 236, 241, 0.2); + background-color: var(--gray-100); border-bottom-width: 1px; border-left-width: 1px; border-top-width: 1px; + border-color: var(--gray-300); padding: 0.25rem 0.75rem; + font-weight: 600; +} +.dark .markdown th { + border-color: var(--gray-600); + background-color: var(--gray-600); } .markdown th:first-child { - border-top-left-radius: 0.375rem; + border-top-left-radius: 0.75rem; } .markdown th:last-child { border-right-width: 1px; - border-top-right-radius: 0.375rem; + border-top-right-radius: 0.75rem; } .markdown td { border-bottom-width: 1px; border-left-width: 1px; + border-color: var(--gray-300); padding: 0.25rem 0.75rem; } .markdown td:last-child { border-right-width: 1px; + border-color: var(--gray-300); +} +.dark .markdown td { + border-color: var(--gray-600); +} +.dark .markdown td:last-child { + border-color: var(--gray-600); } .markdown tbody tr:last-child td:first-child { - border-bottom-left-radius: 0.375rem; + border-bottom-left-radius: 0.75rem; } .markdown tbody tr:last-child td:last-child { - border-bottom-right-radius: 0.375rem; + border-bottom-right-radius: 0.75rem; } .markdown a { text-decoration-line: underline; @@ -2011,7 +2045,7 @@ html { .dark .assistant-item:after { --tw-shadow: inset 0 0 0 1px rgba(0, 0, 0, 0.25); - --tw-shadow-colored: inset 0 0 0 1px var(--tw-shadow-color); + --tw-shadow-colored: inset 0 0 0 0 1px var(--tw-shadow-color); } .result-streaming > :not(ol):not(ul):not(pre):last-child:after, @@ -2248,7 +2282,13 @@ html { /* Nested unordered lists */ .prose ul ul, .markdown ul ul { - list-style-type: circle; + list-style-type: disc; +} + +.prose ul ul > li::marker, +.markdown ul ul > li::marker { + color: var(--tw-prose-bullets); + font-size: 0.8em; } .prose ul ul ul, @@ -2256,6 +2296,12 @@ html { list-style-type: square; } +.prose ul ul ul > li::marker, +.markdown ul ul ul > li::marker { + color: var(--tw-prose-bullets); + font-size: 0.7em; +} + /* Nested lists */ .prose ol ol, .prose ul ul, @@ -2450,7 +2496,7 @@ html { .message-content { font-size: var(--markdown-font-size, var(--font-size-base)); - line-height: 1.75; + line-height: 1.4; } .message-content pre code { From d39b99971fb29d7a0ec51f2060fddbb8ce7f82f5 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Mon, 23 Jun 2025 19:44:24 -0400 Subject: [PATCH 06/65] =?UTF-8?q?=F0=9F=A7=A0=20fix:=20Agent=20Title=20Con?= =?UTF-8?q?fig=20&=20Resource=20Handling=20(#8028)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🔧 fix: enhance client options handling in AgentClient and set default recursion limit - Updated the recursion limit to default to 25 if not specified in agentsEConfig. - Enhanced client options in AgentClient to include model parameters such as apiKey and anthropicApiUrl from agentModelParams. - Updated requestOptions in the anthropic endpoint to use reverseProxyUrl as anthropicApiUrl. * Enhance LLM configuration tests with edge case handling * chore add return type annotation for getCustomEndpointConfig function * fix: update modelOptions handling to use optional chaining and default to empty object in multiple endpoint initializations * chore: update @librechat/agents to version 2.4.42 * refactor: streamline agent endpoint configuration and enhance client options handling for title generations - Introduced a new `getProviderConfig` function to centralize provider configuration logic. - Updated `AgentClient` to utilize the new provider configuration, improving clarity and maintainability. - Removed redundant code related to endpoint initialization and model parameter handling. - Enhanced error logging for missing endpoint configurations. * fix: add abort handling for image generation and editing in OpenAIImageTools * ci: enhance getLLMConfig tests to verify fetchOptions and dispatcher properties * fix: use optional chaining for endpointOption properties in getOptions * fix: increase title generation timeout from 25s to 45s, pass `endpointOption` to `getOptions` * fix: update file filtering logic in getToolFilesByIds to ensure text field is properly checked * fix: add error handling for empty OCR results in uploadMistralOCR and uploadAzureMistralOCR * fix: enhance error handling in file upload to include 'No OCR result' message * chore: update error messages in uploadMistralOCR and uploadAzureMistralOCR * fix: enhance filtering logic in getToolFilesByIds to include context checks for OCR resources to only include files directly attached to agent --------- Co-authored-by: Matt Burnett --- .../tools/structured/OpenAIImageTools.js | 41 +++- api/models/File.js | 10 +- api/package.json | 2 +- api/server/controllers/agents/client.js | 84 ++++---- api/server/routes/files/files.js | 5 +- api/server/services/Config/getCustomConfig.js | 1 + api/server/services/Endpoints/agents/agent.js | 33 +-- api/server/services/Endpoints/agents/title.js | 2 +- .../Endpoints/anthropic/initialize.js | 2 +- .../services/Endpoints/anthropic/llm.js | 1 + .../services/Endpoints/anthropic/llm.spec.js | 203 +++++++++++++++++- .../services/Endpoints/bedrock/options.js | 4 +- .../services/Endpoints/custom/initialize.js | 2 +- .../services/Endpoints/google/initialize.js | 4 +- api/server/services/Endpoints/index.js | 58 +++++ .../services/Endpoints/openAI/initialize.js | 2 +- package-lock.json | 36 +++- packages/api/src/files/mistral/crud.ts | 16 +- 18 files changed, 415 insertions(+), 91 deletions(-) create mode 100644 api/server/services/Endpoints/index.js diff --git a/api/app/clients/tools/structured/OpenAIImageTools.js b/api/app/clients/tools/structured/OpenAIImageTools.js index 08e15a7fad..411db1edf9 100644 --- a/api/app/clients/tools/structured/OpenAIImageTools.js +++ b/api/app/clients/tools/structured/OpenAIImageTools.js @@ -107,6 +107,12 @@ const getImageEditPromptDescription = () => { return process.env.IMAGE_EDIT_OAI_PROMPT_DESCRIPTION || DEFAULT_IMAGE_EDIT_PROMPT_DESCRIPTION; }; +function createAbortHandler() { + return function () { + logger.debug('[ImageGenOAI] Image generation aborted'); + }; +} + /** * Creates OpenAI Image tools (generation and editing) * @param {Object} fields - Configuration fields @@ -201,10 +207,18 @@ function createOpenAIImageTools(fields = {}) { } let resp; + /** @type {AbortSignal} */ + let derivedSignal = null; + /** @type {() => void} */ + let abortHandler = null; + try { - const derivedSignal = runnableConfig?.signal - ? AbortSignal.any([runnableConfig.signal]) - : undefined; + if (runnableConfig?.signal) { + derivedSignal = AbortSignal.any([runnableConfig.signal]); + abortHandler = createAbortHandler(); + derivedSignal.addEventListener('abort', abortHandler, { once: true }); + } + resp = await openai.images.generate( { model: 'gpt-image-1', @@ -228,6 +242,10 @@ function createOpenAIImageTools(fields = {}) { logAxiosError({ error, message }); return returnValue(`Something went wrong when trying to generate the image. The OpenAI API may be unavailable: Error Message: ${error.message}`); + } finally { + if (abortHandler && derivedSignal) { + derivedSignal.removeEventListener('abort', abortHandler); + } } if (!resp) { @@ -409,10 +427,17 @@ Error Message: ${error.message}`); headers['Authorization'] = `Bearer ${apiKey}`; } + /** @type {AbortSignal} */ + let derivedSignal = null; + /** @type {() => void} */ + let abortHandler = null; + try { - const derivedSignal = runnableConfig?.signal - ? AbortSignal.any([runnableConfig.signal]) - : undefined; + if (runnableConfig?.signal) { + derivedSignal = AbortSignal.any([runnableConfig.signal]); + abortHandler = createAbortHandler(); + derivedSignal.addEventListener('abort', abortHandler, { once: true }); + } /** @type {import('axios').AxiosRequestConfig} */ const axiosConfig = { @@ -467,6 +492,10 @@ Error Message: ${error.message}`); logAxiosError({ error, message }); return returnValue(`Something went wrong when trying to edit the image. The OpenAI API may be unavailable: Error Message: ${error.message || 'Unknown error'}`); + } finally { + if (abortHandler && derivedSignal) { + derivedSignal.removeEventListener('abort', abortHandler); + } } }, { diff --git a/api/models/File.js b/api/models/File.js index ff509539e3..1ee943131d 100644 --- a/api/models/File.js +++ b/api/models/File.js @@ -1,5 +1,5 @@ const { logger } = require('@librechat/data-schemas'); -const { EToolResources } = require('librechat-data-provider'); +const { EToolResources, FileContext } = require('librechat-data-provider'); const { File } = require('~/db/models'); /** @@ -32,19 +32,19 @@ const getFiles = async (filter, _sortOptions, selectFields = { text: 0 }) => { * @returns {Promise>} Files that match the criteria */ const getToolFilesByIds = async (fileIds, toolResourceSet) => { - if (!fileIds || !fileIds.length) { + if (!fileIds || !fileIds.length || !toolResourceSet?.size) { return []; } try { const filter = { file_id: { $in: fileIds }, + $or: [], }; - if (toolResourceSet.size) { - filter.$or = []; + if (toolResourceSet.has(EToolResources.ocr)) { + filter.$or.push({ text: { $exists: true, $ne: null }, context: FileContext.agents }); } - if (toolResourceSet.has(EToolResources.file_search)) { filter.$or.push({ embedded: true }); } diff --git a/api/package.json b/api/package.json index 893baddd5d..7b0e064369 100644 --- a/api/package.json +++ b/api/package.json @@ -48,7 +48,7 @@ "@langchain/google-genai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13", "@langchain/textsplitters": "^0.1.0", - "@librechat/agents": "^2.4.41", + "@librechat/agents": "^2.4.42", "@librechat/api": "*", "@librechat/data-schemas": "*", "@node-saml/passport-saml": "^5.0.0", diff --git a/api/server/controllers/agents/client.js b/api/server/controllers/agents/client.js index 6769348d95..f9d4e16a94 100644 --- a/api/server/controllers/agents/client.js +++ b/api/server/controllers/agents/client.js @@ -9,6 +9,7 @@ const { } = require('@librechat/api'); const { Callback, + Providers, GraphEvents, formatMessage, formatAgentMessages, @@ -31,17 +32,13 @@ const { } = require('librechat-data-provider'); const { DynamicStructuredTool } = require('@langchain/core/tools'); const { getBufferString, HumanMessage } = require('@langchain/core/messages'); -const { - getCustomEndpointConfig, - createGetMCPAuthMap, - checkCapability, -} = require('~/server/services/Config'); +const { createGetMCPAuthMap, checkCapability } = require('~/server/services/Config'); const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts'); const { initializeAgent } = require('~/server/services/Endpoints/agents/agent'); const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens'); const { getFormattedMemories, deleteMemory, setMemory } = require('~/models'); const { encodeAndFormat } = require('~/server/services/Files/images/encode'); -const initOpenAI = require('~/server/services/Endpoints/openAI/initialize'); +const { getProviderConfig } = require('~/server/services/Endpoints'); const { checkAccess } = require('~/server/middleware/roles/access'); const BaseClient = require('~/app/clients/BaseClient'); const { loadAgent } = require('~/models/Agent'); @@ -677,7 +674,7 @@ class AgentClient extends BaseClient { hide_sequential_outputs: this.options.agent.hide_sequential_outputs, user: this.options.req.user, }, - recursionLimit: agentsEConfig?.recursionLimit, + recursionLimit: agentsEConfig?.recursionLimit ?? 25, signal: abortController.signal, streamMode: 'values', version: 'v2', @@ -983,23 +980,26 @@ class AgentClient extends BaseClient { throw new Error('Run not initialized'); } const { handleLLMEnd, collected: collectedMetadata } = createMetadataAggregator(); - const endpoint = this.options.agent.endpoint; - const { req, res } = this.options; + const { req, res, agent } = this.options; + const endpoint = agent.endpoint; + /** @type {import('@librechat/agents').ClientOptions} */ let clientOptions = { maxTokens: 75, + model: agent.model_parameters.model, }; - let endpointConfig = req.app.locals[endpoint]; + + const { getOptions, overrideProvider, customEndpointConfig } = + await getProviderConfig(endpoint); + + /** @type {TEndpoint | undefined} */ + const endpointConfig = req.app.locals[endpoint] ?? customEndpointConfig; if (!endpointConfig) { - try { - endpointConfig = await getCustomEndpointConfig(endpoint); - } catch (err) { - logger.error( - '[api/server/controllers/agents/client.js #titleConvo] Error getting custom endpoint config', - err, - ); - } + logger.warn( + '[api/server/controllers/agents/client.js #titleConvo] Error getting endpoint config', + ); } + if ( endpointConfig && endpointConfig.titleModel && @@ -1007,30 +1007,40 @@ class AgentClient extends BaseClient { ) { clientOptions.model = endpointConfig.titleModel; } + + const options = await getOptions({ + req, + res, + optionsOnly: true, + overrideEndpoint: endpoint, + overrideModel: clientOptions.model, + endpointOption: { model_parameters: clientOptions }, + }); + + let provider = options.provider ?? overrideProvider ?? agent.provider; if ( endpoint === EModelEndpoint.azureOpenAI && - clientOptions.model && - this.options.agent.model_parameters.model !== clientOptions.model + options.llmConfig?.azureOpenAIApiInstanceName == null ) { - clientOptions = - ( - await initOpenAI({ - req, - res, - optionsOnly: true, - overrideModel: clientOptions.model, - overrideEndpoint: endpoint, - endpointOption: { - model_parameters: clientOptions, - }, - }) - )?.llmConfig ?? clientOptions; + provider = Providers.OPENAI; } - if (/\b(o\d)\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) { + + /** @type {import('@librechat/agents').ClientOptions} */ + clientOptions = { ...options.llmConfig }; + if (options.configOptions) { + clientOptions.configuration = options.configOptions; + } + + // Ensure maxTokens is set for non-o1 models + if (!/\b(o\d)\b/i.test(clientOptions.model) && !clientOptions.maxTokens) { + clientOptions.maxTokens = 75; + } else if (/\b(o\d)\b/i.test(clientOptions.model) && clientOptions.maxTokens != null) { delete clientOptions.maxTokens; } + try { const titleResult = await this.run.generateTitle({ + provider, inputText: text, contentParts: this.contentParts, clientOptions, @@ -1048,8 +1058,10 @@ class AgentClient extends BaseClient { let input_tokens, output_tokens; if (item.usage) { - input_tokens = item.usage.input_tokens || item.usage.inputTokens; - output_tokens = item.usage.output_tokens || item.usage.outputTokens; + input_tokens = + item.usage.prompt_tokens || item.usage.input_tokens || item.usage.inputTokens; + output_tokens = + item.usage.completion_tokens || item.usage.output_tokens || item.usage.outputTokens; } else if (item.tokenUsage) { input_tokens = item.tokenUsage.promptTokens; output_tokens = item.tokenUsage.completionTokens; diff --git a/api/server/routes/files/files.js b/api/server/routes/files/files.js index bb2ae0bbe5..bdfdca65cf 100644 --- a/api/server/routes/files/files.js +++ b/api/server/routes/files/files.js @@ -283,7 +283,10 @@ router.post('/', async (req, res) => { message += ': ' + error.message; } - if (error.message?.includes('Invalid file format')) { + if ( + error.message?.includes('Invalid file format') || + error.message?.includes('No OCR result') + ) { message = error.message; } diff --git a/api/server/services/Config/getCustomConfig.js b/api/server/services/Config/getCustomConfig.js index d1ee5c3278..f3fb6f26b4 100644 --- a/api/server/services/Config/getCustomConfig.js +++ b/api/server/services/Config/getCustomConfig.js @@ -40,6 +40,7 @@ async function getBalanceConfig() { /** * * @param {string | EModelEndpoint} endpoint + * @returns {Promise} */ const getCustomEndpointConfig = async (endpoint) => { const customConfig = await getCustomConfig(); diff --git a/api/server/services/Endpoints/agents/agent.js b/api/server/services/Endpoints/agents/agent.js index 506670ecad..a3523605db 100644 --- a/api/server/services/Endpoints/agents/agent.js +++ b/api/server/services/Endpoints/agents/agent.js @@ -11,30 +11,13 @@ const { replaceSpecialVars, providerEndpointMap, } = require('librechat-data-provider'); -const initAnthropic = require('~/server/services/Endpoints/anthropic/initialize'); -const getBedrockOptions = require('~/server/services/Endpoints/bedrock/options'); -const initOpenAI = require('~/server/services/Endpoints/openAI/initialize'); -const initCustom = require('~/server/services/Endpoints/custom/initialize'); -const initGoogle = require('~/server/services/Endpoints/google/initialize'); +const { getProviderConfig } = require('~/server/services/Endpoints'); const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts'); -const { getCustomEndpointConfig } = require('~/server/services/Config'); const { processFiles } = require('~/server/services/Files/process'); const { getFiles, getToolFilesByIds } = require('~/models/File'); const { getConvoFiles } = require('~/models/Conversation'); const { getModelMaxTokens } = require('~/utils'); -const providerConfigMap = { - [Providers.XAI]: initCustom, - [Providers.OLLAMA]: initCustom, - [Providers.DEEPSEEK]: initCustom, - [Providers.OPENROUTER]: initCustom, - [EModelEndpoint.openAI]: initOpenAI, - [EModelEndpoint.google]: initGoogle, - [EModelEndpoint.azureOpenAI]: initOpenAI, - [EModelEndpoint.anthropic]: initAnthropic, - [EModelEndpoint.bedrock]: getBedrockOptions, -}; - /** * @param {object} params * @param {ServerRequest} params.req @@ -114,17 +97,9 @@ const initializeAgent = async ({ })) ?? {}; agent.endpoint = provider; - let getOptions = providerConfigMap[provider]; - if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) { - agent.provider = provider.toLowerCase(); - getOptions = providerConfigMap[agent.provider]; - } else if (!getOptions) { - const customEndpointConfig = await getCustomEndpointConfig(provider); - if (!customEndpointConfig) { - throw new Error(`Provider ${provider} not supported`); - } - getOptions = initCustom; - agent.provider = Providers.OPENAI; + const { getOptions, overrideProvider } = await getProviderConfig(provider); + if (overrideProvider) { + agent.provider = overrideProvider; } const _endpointOption = diff --git a/api/server/services/Endpoints/agents/title.js b/api/server/services/Endpoints/agents/title.js index ab171bc79d..2e5f00ecd0 100644 --- a/api/server/services/Endpoints/agents/title.js +++ b/api/server/services/Endpoints/agents/title.js @@ -23,7 +23,7 @@ const addTitle = async (req, { text, response, client }) => { let timeoutId; try { const timeoutPromise = new Promise((_, reject) => { - timeoutId = setTimeout(() => reject(new Error('Title generation timeout')), 25000); + timeoutId = setTimeout(() => reject(new Error('Title generation timeout')), 45000); }).catch((error) => { logger.error('Title error:', error); }); diff --git a/api/server/services/Endpoints/anthropic/initialize.js b/api/server/services/Endpoints/anthropic/initialize.js index d4c6dd1795..4546fc634c 100644 --- a/api/server/services/Endpoints/anthropic/initialize.js +++ b/api/server/services/Endpoints/anthropic/initialize.js @@ -41,7 +41,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio { reverseProxyUrl: ANTHROPIC_REVERSE_PROXY ?? null, proxy: PROXY ?? null, - modelOptions: endpointOption.model_parameters, + modelOptions: endpointOption?.model_parameters ?? {}, }, clientOptions, ); diff --git a/api/server/services/Endpoints/anthropic/llm.js b/api/server/services/Endpoints/anthropic/llm.js index 66496f00fd..a14960ccd5 100644 --- a/api/server/services/Endpoints/anthropic/llm.js +++ b/api/server/services/Endpoints/anthropic/llm.js @@ -75,6 +75,7 @@ function getLLMConfig(apiKey, options = {}) { if (options.reverseProxyUrl) { requestOptions.clientOptions.baseURL = options.reverseProxyUrl; + requestOptions.anthropicApiUrl = options.reverseProxyUrl; } return { diff --git a/api/server/services/Endpoints/anthropic/llm.spec.js b/api/server/services/Endpoints/anthropic/llm.spec.js index f3f77ee897..cd29975e0a 100644 --- a/api/server/services/Endpoints/anthropic/llm.spec.js +++ b/api/server/services/Endpoints/anthropic/llm.spec.js @@ -1,11 +1,45 @@ -const { anthropicSettings } = require('librechat-data-provider'); +const { anthropicSettings, removeNullishValues } = require('librechat-data-provider'); const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm'); +const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers'); jest.mock('https-proxy-agent', () => ({ HttpsProxyAgent: jest.fn().mockImplementation((proxy) => ({ proxy })), })); +jest.mock('./helpers', () => ({ + checkPromptCacheSupport: jest.fn(), + getClaudeHeaders: jest.fn(), + configureReasoning: jest.fn((requestOptions) => requestOptions), +})); + +jest.mock('librechat-data-provider', () => ({ + anthropicSettings: { + model: { default: 'claude-3-opus-20240229' }, + maxOutputTokens: { default: 4096, reset: jest.fn(() => 4096) }, + thinking: { default: false }, + promptCache: { default: false }, + thinkingBudget: { default: null }, + }, + removeNullishValues: jest.fn((obj) => { + const result = {}; + for (const key in obj) { + if (obj[key] !== null && obj[key] !== undefined) { + result[key] = obj[key]; + } + } + return result; + }), +})); + describe('getLLMConfig', () => { + beforeEach(() => { + jest.clearAllMocks(); + checkPromptCacheSupport.mockReturnValue(false); + getClaudeHeaders.mockReturnValue(undefined); + configureReasoning.mockImplementation((requestOptions) => requestOptions); + anthropicSettings.maxOutputTokens.reset.mockReturnValue(4096); + }); + it('should create a basic configuration with default values', () => { const result = getLLMConfig('test-api-key', { modelOptions: {} }); @@ -36,6 +70,7 @@ describe('getLLMConfig', () => { }); expect(result.llmConfig.clientOptions).toHaveProperty('baseURL', 'http://reverse-proxy'); + expect(result.llmConfig).toHaveProperty('anthropicApiUrl', 'http://reverse-proxy'); }); it('should include topK and topP for non-Claude-3.7 models', () => { @@ -65,6 +100,11 @@ describe('getLLMConfig', () => { }); it('should NOT include topK and topP for Claude-3-7 models (hyphen notation)', () => { + configureReasoning.mockImplementation((requestOptions) => { + requestOptions.thinking = { type: 'enabled' }; + return requestOptions; + }); + const result = getLLMConfig('test-api-key', { modelOptions: { model: 'claude-3-7-sonnet', @@ -78,6 +118,11 @@ describe('getLLMConfig', () => { }); it('should NOT include topK and topP for Claude-3.7 models (decimal notation)', () => { + configureReasoning.mockImplementation((requestOptions) => { + requestOptions.thinking = { type: 'enabled' }; + return requestOptions; + }); + const result = getLLMConfig('test-api-key', { modelOptions: { model: 'claude-3.7-sonnet', @@ -154,4 +199,160 @@ describe('getLLMConfig', () => { expect(result3.llmConfig).toHaveProperty('topK', 10); expect(result3.llmConfig).toHaveProperty('topP', 0.9); }); + + describe('Edge cases', () => { + it('should handle missing apiKey', () => { + const result = getLLMConfig(undefined, { modelOptions: {} }); + expect(result.llmConfig).not.toHaveProperty('apiKey'); + }); + + it('should handle empty modelOptions', () => { + expect(() => { + getLLMConfig('test-api-key', {}); + }).toThrow("Cannot read properties of undefined (reading 'thinking')"); + }); + + it('should handle no options parameter', () => { + expect(() => { + getLLMConfig('test-api-key'); + }).toThrow("Cannot read properties of undefined (reading 'thinking')"); + }); + + it('should handle temperature, stop sequences, and stream settings', () => { + const result = getLLMConfig('test-api-key', { + modelOptions: { + temperature: 0.7, + stop: ['\n\n', 'END'], + stream: false, + }, + }); + + expect(result.llmConfig).toHaveProperty('temperature', 0.7); + expect(result.llmConfig).toHaveProperty('stopSequences', ['\n\n', 'END']); + expect(result.llmConfig).toHaveProperty('stream', false); + }); + + it('should handle maxOutputTokens when explicitly set to falsy value', () => { + anthropicSettings.maxOutputTokens.reset.mockReturnValue(8192); + const result = getLLMConfig('test-api-key', { + modelOptions: { + model: 'claude-3-opus', + maxOutputTokens: null, + }, + }); + + expect(anthropicSettings.maxOutputTokens.reset).toHaveBeenCalledWith('claude-3-opus'); + expect(result.llmConfig).toHaveProperty('maxTokens', 8192); + }); + + it('should handle both proxy and reverseProxyUrl', () => { + const result = getLLMConfig('test-api-key', { + modelOptions: {}, + proxy: 'http://proxy:8080', + reverseProxyUrl: 'https://reverse-proxy.com', + }); + + expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions'); + expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher'); + expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined(); + expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe( + 'ProxyAgent', + ); + expect(result.llmConfig.clientOptions).toHaveProperty('baseURL', 'https://reverse-proxy.com'); + expect(result.llmConfig).toHaveProperty('anthropicApiUrl', 'https://reverse-proxy.com'); + }); + + it('should handle prompt cache with supported model', () => { + checkPromptCacheSupport.mockReturnValue(true); + getClaudeHeaders.mockReturnValue({ 'anthropic-beta': 'prompt-caching-2024-07-31' }); + + const result = getLLMConfig('test-api-key', { + modelOptions: { + model: 'claude-3-5-sonnet', + promptCache: true, + }, + }); + + expect(checkPromptCacheSupport).toHaveBeenCalledWith('claude-3-5-sonnet'); + expect(getClaudeHeaders).toHaveBeenCalledWith('claude-3-5-sonnet', true); + expect(result.llmConfig.clientOptions.defaultHeaders).toEqual({ + 'anthropic-beta': 'prompt-caching-2024-07-31', + }); + }); + + it('should handle thinking and thinkingBudget options', () => { + configureReasoning.mockImplementation((requestOptions, systemOptions) => { + if (systemOptions.thinking) { + requestOptions.thinking = { type: 'enabled' }; + } + if (systemOptions.thinkingBudget) { + requestOptions.thinking = { + ...requestOptions.thinking, + budget_tokens: systemOptions.thinkingBudget, + }; + } + return requestOptions; + }); + + getLLMConfig('test-api-key', { + modelOptions: { + model: 'claude-3-7-sonnet', + thinking: true, + thinkingBudget: 5000, + }, + }); + + expect(configureReasoning).toHaveBeenCalledWith( + expect.any(Object), + expect.objectContaining({ + thinking: true, + promptCache: false, + thinkingBudget: 5000, + }), + ); + }); + + it('should remove system options from modelOptions', () => { + const modelOptions = { + model: 'claude-3-opus', + thinking: true, + promptCache: true, + thinkingBudget: 1000, + temperature: 0.5, + }; + + getLLMConfig('test-api-key', { modelOptions }); + + expect(modelOptions).not.toHaveProperty('thinking'); + expect(modelOptions).not.toHaveProperty('promptCache'); + expect(modelOptions).not.toHaveProperty('thinkingBudget'); + expect(modelOptions).toHaveProperty('temperature', 0.5); + }); + + it('should handle all nullish values removal', () => { + removeNullishValues.mockImplementation((obj) => { + const cleaned = {}; + Object.entries(obj).forEach(([key, value]) => { + if (value !== null && value !== undefined) { + cleaned[key] = value; + } + }); + return cleaned; + }); + + const result = getLLMConfig('test-api-key', { + modelOptions: { + temperature: null, + topP: undefined, + topK: 0, + stop: [], + }, + }); + + expect(result.llmConfig).not.toHaveProperty('temperature'); + expect(result.llmConfig).not.toHaveProperty('topP'); + expect(result.llmConfig).toHaveProperty('topK', 0); + expect(result.llmConfig).toHaveProperty('stopSequences', []); + }); + }); }); diff --git a/api/server/services/Endpoints/bedrock/options.js b/api/server/services/Endpoints/bedrock/options.js index fc5536abbf..a31d6e10c4 100644 --- a/api/server/services/Endpoints/bedrock/options.js +++ b/api/server/services/Endpoints/bedrock/options.js @@ -64,7 +64,7 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => { /** @type {BedrockClientOptions} */ const requestOptions = { - model: overrideModel ?? endpointOption.model, + model: overrideModel ?? endpointOption?.model, region: BEDROCK_AWS_DEFAULT_REGION, }; @@ -76,7 +76,7 @@ const getOptions = async ({ req, overrideModel, endpointOption }) => { const llmConfig = bedrockOutputParser( bedrockInputParser.parse( - removeNullishValues(Object.assign(requestOptions, endpointOption.model_parameters)), + removeNullishValues(Object.assign(requestOptions, endpointOption?.model_parameters ?? {})), ), ); diff --git a/api/server/services/Endpoints/custom/initialize.js b/api/server/services/Endpoints/custom/initialize.js index 754abef5a8..7225f05b25 100644 --- a/api/server/services/Endpoints/custom/initialize.js +++ b/api/server/services/Endpoints/custom/initialize.js @@ -134,7 +134,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid }; if (optionsOnly) { - const modelOptions = endpointOption.model_parameters; + const modelOptions = endpointOption?.model_parameters ?? {}; if (endpoint !== Providers.OLLAMA) { clientOptions = Object.assign( { diff --git a/api/server/services/Endpoints/google/initialize.js b/api/server/services/Endpoints/google/initialize.js index b6bc2d6a79..fe58a1fa87 100644 --- a/api/server/services/Endpoints/google/initialize.js +++ b/api/server/services/Endpoints/google/initialize.js @@ -18,7 +18,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio let serviceKey = {}; try { serviceKey = require('~/data/auth.json'); - } catch (e) { + } catch (_e) { // Do nothing } @@ -58,7 +58,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio if (optionsOnly) { clientOptions = Object.assign( { - modelOptions: endpointOption.model_parameters, + modelOptions: endpointOption?.model_parameters ?? {}, }, clientOptions, ); diff --git a/api/server/services/Endpoints/index.js b/api/server/services/Endpoints/index.js new file mode 100644 index 0000000000..b6e398366b --- /dev/null +++ b/api/server/services/Endpoints/index.js @@ -0,0 +1,58 @@ +const { Providers } = require('@librechat/agents'); +const { EModelEndpoint } = require('librechat-data-provider'); +const initAnthropic = require('~/server/services/Endpoints/anthropic/initialize'); +const getBedrockOptions = require('~/server/services/Endpoints/bedrock/options'); +const initOpenAI = require('~/server/services/Endpoints/openAI/initialize'); +const initCustom = require('~/server/services/Endpoints/custom/initialize'); +const initGoogle = require('~/server/services/Endpoints/google/initialize'); +const { getCustomEndpointConfig } = require('~/server/services/Config'); + +const providerConfigMap = { + [Providers.XAI]: initCustom, + [Providers.OLLAMA]: initCustom, + [Providers.DEEPSEEK]: initCustom, + [Providers.OPENROUTER]: initCustom, + [EModelEndpoint.openAI]: initOpenAI, + [EModelEndpoint.google]: initGoogle, + [EModelEndpoint.azureOpenAI]: initOpenAI, + [EModelEndpoint.anthropic]: initAnthropic, + [EModelEndpoint.bedrock]: getBedrockOptions, +}; + +/** + * Get the provider configuration and override endpoint based on the provider string + * @param {string} provider - The provider string + * @returns {Promise<{ + * getOptions: Function, + * overrideProvider?: string, + * customEndpointConfig?: TEndpoint + * }>} + */ +async function getProviderConfig(provider) { + let getOptions = providerConfigMap[provider]; + let overrideProvider; + /** @type {TEndpoint | undefined} */ + let customEndpointConfig; + + if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) { + overrideProvider = provider.toLowerCase(); + getOptions = providerConfigMap[overrideProvider]; + } else if (!getOptions) { + customEndpointConfig = await getCustomEndpointConfig(provider); + if (!customEndpointConfig) { + throw new Error(`Provider ${provider} not supported`); + } + getOptions = initCustom; + overrideProvider = Providers.OPENAI; + } + + return { + getOptions, + overrideProvider, + customEndpointConfig, + }; +} + +module.exports = { + getProviderConfig, +}; diff --git a/api/server/services/Endpoints/openAI/initialize.js b/api/server/services/Endpoints/openAI/initialize.js index 0e9a07789e..e86596181a 100644 --- a/api/server/services/Endpoints/openAI/initialize.js +++ b/api/server/services/Endpoints/openAI/initialize.js @@ -138,7 +138,7 @@ const initializeClient = async ({ } if (optionsOnly) { - const modelOptions = endpointOption.model_parameters; + const modelOptions = endpointOption?.model_parameters ?? {}; modelOptions.model = modelName; clientOptions = Object.assign({ modelOptions }, clientOptions); clientOptions.modelOptions.user = req.user.id; diff --git a/package-lock.json b/package-lock.json index 39e41975e3..39e52db331 100644 --- a/package-lock.json +++ b/package-lock.json @@ -64,7 +64,7 @@ "@langchain/google-genai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13", "@langchain/textsplitters": "^0.1.0", - "@librechat/agents": "^2.4.41", + "@librechat/agents": "^2.4.42", "@librechat/api": "*", "@librechat/data-schemas": "*", "@node-saml/passport-saml": "^5.0.0", @@ -1351,6 +1351,33 @@ } } }, + "api/node_modules/@librechat/agents": { + "version": "2.4.42", + "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.42.tgz", + "integrity": "sha512-52ux2PeEAV79yr6/h6GN3omlpqX6H0FYl6qwjJ6gT04MMko/imnLd3bQrX0gm3i0KL5ygHbRjQeonONKjJayHw==", + "license": "MIT", + "dependencies": { + "@langchain/anthropic": "^0.3.23", + "@langchain/aws": "^0.1.11", + "@langchain/community": "^0.3.47", + "@langchain/core": "^0.3.60", + "@langchain/deepseek": "^0.0.2", + "@langchain/google-genai": "^0.2.13", + "@langchain/google-vertexai": "^0.2.13", + "@langchain/langgraph": "^0.3.4", + "@langchain/mistralai": "^0.2.1", + "@langchain/ollama": "^0.2.3", + "@langchain/openai": "^0.5.14", + "@langchain/xai": "^0.0.3", + "cheerio": "^1.0.0", + "dotenv": "^16.4.7", + "https-proxy-agent": "^7.0.6", + "nanoid": "^3.3.7" + }, + "engines": { + "node": ">=14.0.0" + } + }, "api/node_modules/@smithy/abort-controller": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", @@ -19440,6 +19467,7 @@ "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.41.tgz", "integrity": "sha512-kYmdk5WVRp0qZxTx6BuGCs4l0Ir9iBLLx4ZY4/1wxr80al5/vq3P8wbgGdKMeO2qTu4ZaT4RyWRQYWBg5HDkUQ==", "license": "MIT", + "peer": true, "dependencies": { "@langchain/anthropic": "^0.3.23", "@langchain/aws": "^0.1.11", @@ -19467,6 +19495,7 @@ "resolved": "https://registry.npmjs.org/@langchain/community/-/community-0.3.47.tgz", "integrity": "sha512-Vo42kAfkXpTFSevhEkeqqE55az8NyQgDktCbitXYuhipNbFYx08XVvqEDkFkB20MM/Z7u+cvLb+DxCqnKuH0CQ==", "license": "MIT", + "peer": true, "dependencies": { "@langchain/openai": ">=0.2.0 <0.6.0", "@langchain/weaviate": "^0.2.0", @@ -19992,6 +20021,7 @@ "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.14.tgz", "integrity": "sha512-0GEj5K/qi1MRuZ4nE7NvyI4jTG+RSewLZqsExUwRukWdeqmkPNHGrogTa5ZDt7eaJxAaY7EgLC5ZnvCM3L1oug==", "license": "MIT", + "peer": true, "dependencies": { "js-tiktoken": "^1.0.12", "openai": "^5.3.0", @@ -20009,6 +20039,7 @@ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", "license": "MIT", + "peer": true, "engines": { "node": ">= 14" } @@ -20018,6 +20049,7 @@ "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "license": "MIT", + "peer": true, "dependencies": { "agent-base": "^7.1.2", "debug": "4" @@ -20031,6 +20063,7 @@ "resolved": "https://registry.npmjs.org/openai/-/openai-5.5.1.tgz", "integrity": "sha512-5i19097mGotHA1eFsM6Tjd/tJ8uo9sa5Ysv4Q6bKJ2vtN6rc0MzMrUefXnLXYAJcmMQrC1Efhj0AvfIkXrQamw==", "license": "Apache-2.0", + "peer": true, "bin": { "openai": "bin/cli" }, @@ -20056,6 +20089,7 @@ "https://github.com/sponsors/ctavan" ], "license": "MIT", + "peer": true, "bin": { "uuid": "dist/bin/uuid" } diff --git a/packages/api/src/files/mistral/crud.ts b/packages/api/src/files/mistral/crud.ts index 1d36459886..d89be8f14d 100644 --- a/packages/api/src/files/mistral/crud.ts +++ b/packages/api/src/files/mistral/crud.ts @@ -353,7 +353,11 @@ export const uploadMistralOCR = async (context: OCRContext): Promise Date: Tue, 24 Jun 2025 05:21:14 -0700 Subject: [PATCH 07/65] =?UTF-8?q?=F0=9F=94=A7=20fix:=20User=20Placeholders?= =?UTF-8?q?=20in=20Headers=20for=20Custom=20Endpoints=20(#8030)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * hotfix(custom-endpoints): fix user placeholder resolution in headers * fix: import --------- Co-authored-by: Danny Avila --- api/server/services/Endpoints/custom/initialize.js | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/api/server/services/Endpoints/custom/initialize.js b/api/server/services/Endpoints/custom/initialize.js index 7225f05b25..4fcbe76ea6 100644 --- a/api/server/services/Endpoints/custom/initialize.js +++ b/api/server/services/Endpoints/custom/initialize.js @@ -6,7 +6,7 @@ const { extractEnvVariable, } = require('librechat-data-provider'); const { Providers } = require('@librechat/agents'); -const { getOpenAIConfig, createHandleLLMNewToken } = require('@librechat/api'); +const { getOpenAIConfig, createHandleLLMNewToken, resolveHeaders } = require('@librechat/api'); const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService'); const { getCustomEndpointConfig } = require('~/server/services/Config'); const { fetchModels } = require('~/server/services/ModelService'); @@ -28,12 +28,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid const CUSTOM_API_KEY = extractEnvVariable(endpointConfig.apiKey); const CUSTOM_BASE_URL = extractEnvVariable(endpointConfig.baseURL); - let resolvedHeaders = {}; - if (endpointConfig.headers && typeof endpointConfig.headers === 'object') { - Object.keys(endpointConfig.headers).forEach((key) => { - resolvedHeaders[key] = extractEnvVariable(endpointConfig.headers[key]); - }); - } + let resolvedHeaders = resolveHeaders(endpointConfig.headers, req.user); if (CUSTOM_API_KEY.match(envVarRegex)) { throw new Error(`Missing API Key for ${endpoint}.`); From 42977ac0d049bde5dcd054eabc7902ec62e7fdf5 Mon Sep 17 00:00:00 2001 From: Rakshit Tiwari Date: Tue, 24 Jun 2025 20:13:29 +0530 Subject: [PATCH 08/65] =?UTF-8?q?=F0=9F=96=BC=EF=B8=8F=20feat:=20Add=20Opt?= =?UTF-8?q?ional=20Client-Side=20Image=20Resizing=20to=20Prevent=20Upload?= =?UTF-8?q?=20Errors=20(#7909)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add optional client-side image resizing to prevent upload errors * Addressing comments from author * Addressing eslint errors * Fixing the naming to clientresize from clientsideresize --- client/src/hooks/Files/useClientResize.ts | 84 +++++++ client/src/hooks/Files/useFileHandling.ts | 49 +++- .../src/utils/__tests__/imageResize.test.ts | 108 +++++++++ client/src/utils/imageResize.ts | 215 ++++++++++++++++++ librechat.example.yaml | 6 + packages/data-provider/src/file-config.ts | 22 ++ packages/data-provider/src/types/files.ts | 6 + 7 files changed, 480 insertions(+), 10 deletions(-) create mode 100644 client/src/hooks/Files/useClientResize.ts create mode 100644 client/src/utils/__tests__/imageResize.test.ts create mode 100644 client/src/utils/imageResize.ts diff --git a/client/src/hooks/Files/useClientResize.ts b/client/src/hooks/Files/useClientResize.ts new file mode 100644 index 0000000000..1da3848aeb --- /dev/null +++ b/client/src/hooks/Files/useClientResize.ts @@ -0,0 +1,84 @@ +import { mergeFileConfig } from 'librechat-data-provider'; +import { useCallback } from 'react'; +import { useGetFileConfig } from '~/data-provider'; +import { + resizeImage, + shouldResizeImage, + supportsClientResize, + type ResizeOptions, + type ResizeResult, +} from '~/utils/imageResize'; + +/** + * Hook for client-side image resizing functionality + * Integrates with LibreChat's file configuration system + */ +export const useClientResize = () => { + const { data: fileConfig = null } = useGetFileConfig({ + select: (data) => mergeFileConfig(data), + }); + + // Safe access to clientImageResize config with fallbacks + // eslint-disable-next-line react-hooks/exhaustive-deps + const config = (fileConfig as any)?.clientImageResize ?? { + enabled: false, + maxWidth: 1900, + maxHeight: 1900, + quality: 0.92, + }; + const isEnabled = config?.enabled ?? false; + + /** + * Resizes an image if client-side resizing is enabled and supported + * @param file - The image file to resize + * @param options - Optional resize options to override defaults + * @returns Promise resolving to either the resized file result or original file + */ + const resizeImageIfNeeded = useCallback( + async ( + file: File, + options?: Partial, + ): Promise<{ file: File; resized: boolean; result?: ResizeResult }> => { + // Return original file if resizing is disabled + if (!isEnabled) { + return { file, resized: false }; + } + + // Return original file if browser doesn't support resizing + if (!supportsClientResize()) { + console.warn('Client-side image resizing not supported in this browser'); + return { file, resized: false }; + } + + // Return original file if it doesn't need resizing + if (!shouldResizeImage(file)) { + return { file, resized: false }; + } + + try { + const resizeOptions: Partial = { + maxWidth: config?.maxWidth, + maxHeight: config?.maxHeight, + quality: config?.quality, + ...options, + }; + + const result = await resizeImage(file, resizeOptions); + return { file: result.file, resized: true, result }; + } catch (error) { + console.warn('Client-side image resizing failed:', error); + return { file, resized: false }; + } + }, + [isEnabled, config], + ); + + return { + isEnabled, + isSupported: supportsClientResize(), + config, + resizeImageIfNeeded, + }; +}; + +export default useClientResize; diff --git a/client/src/hooks/Files/useFileHandling.ts b/client/src/hooks/Files/useFileHandling.ts index 9e03f29334..7f74a02733 100644 --- a/client/src/hooks/Files/useFileHandling.ts +++ b/client/src/hooks/Files/useFileHandling.ts @@ -18,6 +18,7 @@ import useLocalize, { TranslationKeys } from '~/hooks/useLocalize'; import { useChatContext } from '~/Providers/ChatContext'; import { useToastContext } from '~/Providers/ToastContext'; import { logger, validateFiles } from '~/utils'; +import useClientResize from './useClientResize'; import { processFileForUpload } from '~/utils/heicConverter'; import { useDelayedUploadToast } from './useDelayedUploadToast'; import useUpdateFiles from './useUpdateFiles'; @@ -41,6 +42,7 @@ const useFileHandling = (params?: UseFileHandling) => { const { addFile, replaceFile, updateFileById, deleteFileById } = useUpdateFiles( params?.fileSetter ?? setFiles, ); + const { resizeImageIfNeeded } = useClientResize(); const agent_id = params?.additionalMetadata?.agent_id ?? ''; const assistant_id = params?.additionalMetadata?.assistant_id ?? ''; @@ -298,7 +300,7 @@ const useFileHandling = (params?: UseFileHandling) => { } // Process file for HEIC conversion if needed - const processedFile = await processFileForUpload( + const heicProcessedFile = await processFileForUpload( originalFile, 0.9, (conversionProgress) => { @@ -311,23 +313,50 @@ const useFileHandling = (params?: UseFileHandling) => { }, ); - // If file was converted, update with new file and preview - if (processedFile !== originalFile) { + let finalProcessedFile = heicProcessedFile; + + // Apply client-side resizing if available and appropriate + if (heicProcessedFile.type.startsWith('image/')) { + try { + const resizeResult = await resizeImageIfNeeded(heicProcessedFile); + finalProcessedFile = resizeResult.file; + + // Show toast notification if image was resized + if (resizeResult.resized && resizeResult.result) { + const { originalSize, newSize, compressionRatio } = resizeResult.result; + const originalSizeMB = (originalSize / (1024 * 1024)).toFixed(1); + const newSizeMB = (newSize / (1024 * 1024)).toFixed(1); + const savedPercent = Math.round((1 - compressionRatio) * 100); + + showToast({ + message: `Image resized: ${originalSizeMB}MB → ${newSizeMB}MB (${savedPercent}% smaller)`, + status: 'success', + duration: 3000, + }); + } + } catch (resizeError) { + console.warn('Image resize failed, using original:', resizeError); + // Continue with HEIC processed file if resizing fails + } + } + + // If file was processed (HEIC converted or resized), update with new file and preview + if (finalProcessedFile !== originalFile) { URL.revokeObjectURL(initialPreview); // Clean up original preview - const newPreview = URL.createObjectURL(processedFile); + const newPreview = URL.createObjectURL(finalProcessedFile); const updatedExtendedFile: ExtendedFile = { ...initialExtendedFile, - file: processedFile, - type: processedFile.type, + file: finalProcessedFile, + type: finalProcessedFile.type, preview: newPreview, - progress: 0.5, // Conversion complete, ready for upload - size: processedFile.size, + progress: 0.5, // Processing complete, ready for upload + size: finalProcessedFile.size, }; replaceFile(updatedExtendedFile); - const isImage = processedFile.type.split('/')[0] === 'image'; + const isImage = finalProcessedFile.type.split('/')[0] === 'image'; if (isImage) { loadImage(updatedExtendedFile, newPreview); continue; @@ -335,7 +364,7 @@ const useFileHandling = (params?: UseFileHandling) => { await startUpload(updatedExtendedFile); } else { - // File wasn't converted, proceed with original + // File wasn't processed, proceed with original const isImage = originalFile.type.split('/')[0] === 'image'; const tool_resource = initialExtendedFile.tool_resource ?? params?.additionalMetadata?.tool_resource; diff --git a/client/src/utils/__tests__/imageResize.test.ts b/client/src/utils/__tests__/imageResize.test.ts new file mode 100644 index 0000000000..c09d2293d1 --- /dev/null +++ b/client/src/utils/__tests__/imageResize.test.ts @@ -0,0 +1,108 @@ +/** + * Tests for client-side image resizing utility + */ + +import { shouldResizeImage, supportsClientResize } from '../imageResize'; + +// Mock browser APIs for testing +Object.defineProperty(global, 'HTMLCanvasElement', { + value: function () { + return { + getContext: () => ({ + drawImage: jest.fn(), + }), + toBlob: jest.fn(), + }; + }, + writable: true, +}); + +Object.defineProperty(global, 'FileReader', { + value: function () { + return { + readAsDataURL: jest.fn(), + }; + }, + writable: true, +}); + +Object.defineProperty(global, 'Image', { + value: function () { + return {}; + }, + writable: true, +}); + +describe('imageResize utility', () => { + describe('supportsClientResize', () => { + it('should return true when all required APIs are available', () => { + const result = supportsClientResize(); + expect(result).toBe(true); + }); + + it('should return false when HTMLCanvasElement is not available', () => { + const originalCanvas = global.HTMLCanvasElement; + // @ts-ignore + delete global.HTMLCanvasElement; + + const result = supportsClientResize(); + expect(result).toBe(false); + + global.HTMLCanvasElement = originalCanvas; + }); + }); + + describe('shouldResizeImage', () => { + it('should return true for large image files', () => { + const largeImageFile = new File([''], 'test.jpg', { + type: 'image/jpeg', + lastModified: Date.now(), + }); + + // Mock large file size + Object.defineProperty(largeImageFile, 'size', { + value: 100 * 1024 * 1024, // 100MB + writable: false, + }); + + const result = shouldResizeImage(largeImageFile, 50 * 1024 * 1024); // 50MB limit + expect(result).toBe(true); + }); + + it('should return false for small image files', () => { + const smallImageFile = new File([''], 'test.jpg', { + type: 'image/jpeg', + lastModified: Date.now(), + }); + + // Mock small file size + Object.defineProperty(smallImageFile, 'size', { + value: 1024, // 1KB + writable: false, + }); + + const result = shouldResizeImage(smallImageFile, 50 * 1024 * 1024); // 50MB limit + expect(result).toBe(false); + }); + + it('should return false for non-image files', () => { + const textFile = new File([''], 'test.txt', { + type: 'text/plain', + lastModified: Date.now(), + }); + + const result = shouldResizeImage(textFile); + expect(result).toBe(false); + }); + + it('should return false for GIF files', () => { + const gifFile = new File([''], 'test.gif', { + type: 'image/gif', + lastModified: Date.now(), + }); + + const result = shouldResizeImage(gifFile); + expect(result).toBe(false); + }); + }); +}); diff --git a/client/src/utils/imageResize.ts b/client/src/utils/imageResize.ts new file mode 100644 index 0000000000..3be6e8d8c0 --- /dev/null +++ b/client/src/utils/imageResize.ts @@ -0,0 +1,215 @@ +/** + * Client-side image resizing utility for LibreChat + * Resizes images to prevent backend upload errors while maintaining quality + */ + +export interface ResizeOptions { + maxWidth?: number; + maxHeight?: number; + quality?: number; + format?: 'jpeg' | 'png' | 'webp'; +} + +export interface ResizeResult { + file: File; + originalSize: number; + newSize: number; + originalDimensions: { width: number; height: number }; + newDimensions: { width: number; height: number }; + compressionRatio: number; +} + +/** + * Default resize options based on backend 'high' resolution settings + * Backend 'high' uses maxShortSide=768, maxLongSide=2000 + * We use slightly smaller values to ensure no backend resizing is triggered + */ +const DEFAULT_RESIZE_OPTIONS: ResizeOptions = { + maxWidth: 1900, // Slightly less than backend maxLongSide=2000 + maxHeight: 1900, // Slightly less than backend maxLongSide=2000 + quality: 0.92, // High quality while reducing file size + format: 'jpeg', // Most compatible format +}; + +/** + * Checks if the browser supports canvas-based image resizing + */ +export function supportsClientResize(): boolean { + try { + // Check for required APIs + if (typeof HTMLCanvasElement === 'undefined') return false; + if (typeof FileReader === 'undefined') return false; + if (typeof Image === 'undefined') return false; + + // Test canvas creation + const canvas = document.createElement('canvas'); + const ctx = canvas.getContext('2d'); + + return !!(ctx && ctx.drawImage && canvas.toBlob); + } catch { + return false; + } +} + +/** + * Calculates new dimensions while maintaining aspect ratio + */ +function calculateDimensions( + originalWidth: number, + originalHeight: number, + maxWidth: number, + maxHeight: number, +): { width: number; height: number } { + const { width, height } = { width: originalWidth, height: originalHeight }; + + // If image is smaller than max dimensions, don't upscale + if (width <= maxWidth && height <= maxHeight) { + return { width, height }; + } + + // Calculate scaling factor + const widthRatio = maxWidth / width; + const heightRatio = maxHeight / height; + const scalingFactor = Math.min(widthRatio, heightRatio); + + return { + width: Math.round(width * scalingFactor), + height: Math.round(height * scalingFactor), + }; +} + +/** + * Resizes an image file using canvas + */ +export function resizeImage( + file: File, + options: Partial = {}, +): Promise { + return new Promise((resolve, reject) => { + // Check browser support + if (!supportsClientResize()) { + reject(new Error('Browser does not support client-side image resizing')); + return; + } + + // Only process image files + if (!file.type.startsWith('image/')) { + reject(new Error('File is not an image')); + return; + } + + const opts = { ...DEFAULT_RESIZE_OPTIONS, ...options }; + const reader = new FileReader(); + + reader.onload = (event) => { + const img = new Image(); + + img.onload = () => { + try { + const originalDimensions = { width: img.width, height: img.height }; + const newDimensions = calculateDimensions( + img.width, + img.height, + opts.maxWidth!, + opts.maxHeight!, + ); + + // If no resizing needed, return original file + if ( + newDimensions.width === originalDimensions.width && + newDimensions.height === originalDimensions.height + ) { + resolve({ + file, + originalSize: file.size, + newSize: file.size, + originalDimensions, + newDimensions, + compressionRatio: 1, + }); + return; + } + + // Create canvas and resize + const canvas = document.createElement('canvas'); + const ctx = canvas.getContext('2d')!; + + canvas.width = newDimensions.width; + canvas.height = newDimensions.height; + + // Use high-quality image smoothing + ctx.imageSmoothingEnabled = true; + ctx.imageSmoothingQuality = 'high'; + + // Draw resized image + ctx.drawImage(img, 0, 0, newDimensions.width, newDimensions.height); + + // Convert to blob + canvas.toBlob( + (blob) => { + if (!blob) { + reject(new Error('Failed to create blob from canvas')); + return; + } + + // Create new file with same name but potentially different extension + const extension = opts.format === 'jpeg' ? '.jpg' : `.${opts.format}`; + const baseName = file.name.replace(/\.[^/.]+$/, ''); + const newFileName = `${baseName}${extension}`; + + const resizedFile = new File([blob], newFileName, { + type: `image/${opts.format}`, + lastModified: Date.now(), + }); + + resolve({ + file: resizedFile, + originalSize: file.size, + newSize: resizedFile.size, + originalDimensions, + newDimensions, + compressionRatio: resizedFile.size / file.size, + }); + }, + `image/${opts.format}`, + opts.quality, + ); + } catch (error) { + reject(error); + } + }; + + img.onerror = () => reject(new Error('Failed to load image')); + img.src = event.target?.result as string; + }; + + reader.onerror = () => reject(new Error('Failed to read file')); + reader.readAsDataURL(file); + }); +} + +/** + * Determines if an image should be resized based on size and dimensions + */ +export function shouldResizeImage( + file: File, + fileSizeLimit: number = 512 * 1024 * 1024, // 512MB default +): boolean { + // Don't resize if file is already small + if (file.size < fileSizeLimit * 0.1) { + // Less than 10% of limit + return false; + } + + // Don't process non-images + if (!file.type.startsWith('image/')) { + return false; + } + + // Don't process GIFs (they might be animated) + if (file.type === 'image/gif') { + return false; + } + + return true; +} diff --git a/librechat.example.yaml b/librechat.example.yaml index 17aeafd82d..de28dcc32e 100644 --- a/librechat.example.yaml +++ b/librechat.example.yaml @@ -300,6 +300,12 @@ endpoints: # imageGeneration: # Image Gen settings, either percentage or px # percentage: 100 # px: 1024 +# # Client-side image resizing to prevent upload errors +# clientImageResize: +# enabled: false # Enable/disable client-side image resizing (default: false) +# maxWidth: 1900 # Maximum width for resized images (default: 1900) +# maxHeight: 1900 # Maximum height for resized images (default: 1900) +# quality: 0.92 # JPEG quality for compression (0.0-1.0, default: 0.92) # # See the Custom Configuration Guide for more information on Assistants Config: # # https://www.librechat.ai/docs/configuration/librechat_yaml/object_structure/assistants_endpoint diff --git a/packages/data-provider/src/file-config.ts b/packages/data-provider/src/file-config.ts index f09fce7cc5..08cf99fbe6 100644 --- a/packages/data-provider/src/file-config.ts +++ b/packages/data-provider/src/file-config.ts @@ -192,6 +192,12 @@ export const fileConfig = { }, serverFileSizeLimit: defaultSizeLimit, avatarSizeLimit: mbToBytes(2), + clientImageResize: { + enabled: false, + maxWidth: 1900, + maxHeight: 1900, + quality: 0.92, + }, checkType: function (fileType: string, supportedTypes: RegExp[] = supportedMimeTypes) { return supportedTypes.some((regex) => regex.test(fileType)); }, @@ -232,6 +238,14 @@ export const fileConfigSchema = z.object({ px: z.number().min(0).optional(), }) .optional(), + clientImageResize: z + .object({ + enabled: z.boolean().optional(), + maxWidth: z.number().min(0).optional(), + maxHeight: z.number().min(0).optional(), + quality: z.number().min(0).max(1).optional(), + }) + .optional(), }); /** Helper function to safely convert string patterns to RegExp objects */ @@ -260,6 +274,14 @@ export function mergeFileConfig(dynamic: z.infer | unde mergedConfig.avatarSizeLimit = mbToBytes(dynamic.avatarSizeLimit); } + // Merge clientImageResize configuration + if (dynamic.clientImageResize !== undefined) { + mergedConfig.clientImageResize = { + ...mergedConfig.clientImageResize, + ...dynamic.clientImageResize, + }; + } + if (!dynamic.endpoints) { return mergedConfig; } diff --git a/packages/data-provider/src/types/files.ts b/packages/data-provider/src/types/files.ts index bae3e783bf..95b74a4216 100644 --- a/packages/data-provider/src/types/files.ts +++ b/packages/data-provider/src/types/files.ts @@ -48,6 +48,12 @@ export type FileConfig = { }; serverFileSizeLimit?: number; avatarSizeLimit?: number; + clientImageResize?: { + enabled?: boolean; + maxWidth?: number; + maxHeight?: number; + quality?: number; + }; checkType?: (fileType: string, supportedTypes: RegExp[]) => boolean; }; From b1693060960e399f5af81b07792bfc23c3ecc4c0 Mon Sep 17 00:00:00 2001 From: Dustin Healy <54083382+dustinhealy@users.noreply.github.com> Date: Tue, 24 Jun 2025 18:11:06 -0700 Subject: [PATCH 09/65] =?UTF-8?q?=F0=9F=A7=AA=20ci:=20Add=20Tests=20for=20?= =?UTF-8?q?Custom=20Endpoint=20Header=20Resolution=20(#8045)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Enhanced existing tests for the `resolveHeaders` function to cover all user field placeholders and messy scenarios. * Added basic integration tests for custom endpoints initialization file --- .../Endpoints/custom/initialize.spec.js | 93 +++++++++++++++ packages/api/src/utils/env.spec.ts | 112 ++++++++++++++++++ 2 files changed, 205 insertions(+) create mode 100644 api/server/services/Endpoints/custom/initialize.spec.js diff --git a/api/server/services/Endpoints/custom/initialize.spec.js b/api/server/services/Endpoints/custom/initialize.spec.js new file mode 100644 index 0000000000..7e28995127 --- /dev/null +++ b/api/server/services/Endpoints/custom/initialize.spec.js @@ -0,0 +1,93 @@ +const initializeClient = require('./initialize'); + +jest.mock('@librechat/api', () => ({ + resolveHeaders: jest.fn(), + getOpenAIConfig: jest.fn(), + createHandleLLMNewToken: jest.fn(), +})); + +jest.mock('librechat-data-provider', () => ({ + CacheKeys: { TOKEN_CONFIG: 'token_config' }, + ErrorTypes: { NO_USER_KEY: 'NO_USER_KEY', NO_BASE_URL: 'NO_BASE_URL' }, + envVarRegex: /\$\{([^}]+)\}/, + FetchTokenConfig: {}, + extractEnvVariable: jest.fn((value) => value), +})); + +jest.mock('@librechat/agents', () => ({ + Providers: { OLLAMA: 'ollama' }, +})); + +jest.mock('~/server/services/UserService', () => ({ + getUserKeyValues: jest.fn(), + checkUserKeyExpiry: jest.fn(), +})); + +jest.mock('~/server/services/Config', () => ({ + getCustomEndpointConfig: jest.fn().mockResolvedValue({ + apiKey: 'test-key', + baseURL: 'https://test.com', + headers: { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' }, + models: { default: ['test-model'] }, + }), +})); + +jest.mock('~/server/services/ModelService', () => ({ + fetchModels: jest.fn(), +})); + +jest.mock('~/app/clients/OpenAIClient', () => { + return jest.fn().mockImplementation(() => ({ + options: {}, + })); +}); + +jest.mock('~/server/utils', () => ({ + isUserProvided: jest.fn().mockReturnValue(false), +})); + +jest.mock('~/cache/getLogStores', () => + jest.fn().mockReturnValue({ + get: jest.fn(), + }), +); + +describe('custom/initializeClient', () => { + const mockRequest = { + body: { endpoint: 'test-endpoint' }, + user: { id: 'user-123', email: 'test@example.com' }, + app: { locals: {} }, + }; + const mockResponse = {}; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('calls resolveHeaders with headers and user', async () => { + const { resolveHeaders } = require('@librechat/api'); + await initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true }); + expect(resolveHeaders).toHaveBeenCalledWith( + { 'x-user': '{{LIBRECHAT_USER_ID}}', 'x-email': '{{LIBRECHAT_USER_EMAIL}}' }, + { id: 'user-123', email: 'test@example.com' }, + ); + }); + + it('throws if endpoint config is missing', async () => { + const { getCustomEndpointConfig } = require('~/server/services/Config'); + getCustomEndpointConfig.mockResolvedValueOnce(null); + await expect( + initializeClient({ req: mockRequest, res: mockResponse, optionsOnly: true }), + ).rejects.toThrow('Config not found for the test-endpoint custom endpoint.'); + }); + + it('throws if user is missing', async () => { + await expect( + initializeClient({ + req: { ...mockRequest, user: undefined }, + res: mockResponse, + optionsOnly: true, + }), + ).rejects.toThrow("Cannot read properties of undefined (reading 'id')"); + }); +}); diff --git a/packages/api/src/utils/env.spec.ts b/packages/api/src/utils/env.spec.ts index 35f3f13272..4cb8da0d6b 100644 --- a/packages/api/src/utils/env.spec.ts +++ b/packages/api/src/utils/env.spec.ts @@ -314,4 +314,116 @@ describe('resolveHeaders', () => { 'Dot-Header': 'dot-value', }); }); + + // Additional comprehensive tests for all user field placeholders + it('should replace all allowed user field placeholders', () => { + const user = { + id: 'abc', + name: 'Test User', + username: 'testuser', + email: 'me@example.com', + provider: 'google', + role: 'admin', + googleId: 'gid', + facebookId: 'fbid', + openidId: 'oid', + samlId: 'sid', + ldapId: 'lid', + githubId: 'ghid', + discordId: 'dcid', + appleId: 'aid', + emailVerified: true, + twoFactorEnabled: false, + termsAccepted: true, + }; + + const headers = { + 'X-User-ID': '{{LIBRECHAT_USER_ID}}', + 'X-User-Name': '{{LIBRECHAT_USER_NAME}}', + 'X-User-Username': '{{LIBRECHAT_USER_USERNAME}}', + 'X-User-Email': '{{LIBRECHAT_USER_EMAIL}}', + 'X-User-Provider': '{{LIBRECHAT_USER_PROVIDER}}', + 'X-User-Role': '{{LIBRECHAT_USER_ROLE}}', + 'X-User-GoogleId': '{{LIBRECHAT_USER_GOOGLEID}}', + 'X-User-FacebookId': '{{LIBRECHAT_USER_FACEBOOKID}}', + 'X-User-OpenIdId': '{{LIBRECHAT_USER_OPENIDID}}', + 'X-User-SamlId': '{{LIBRECHAT_USER_SAMLID}}', + 'X-User-LdapId': '{{LIBRECHAT_USER_LDAPID}}', + 'X-User-GithubId': '{{LIBRECHAT_USER_GITHUBID}}', + 'X-User-DiscordId': '{{LIBRECHAT_USER_DISCORDID}}', + 'X-User-AppleId': '{{LIBRECHAT_USER_APPLEID}}', + 'X-User-EmailVerified': '{{LIBRECHAT_USER_EMAILVERIFIED}}', + 'X-User-TwoFactorEnabled': '{{LIBRECHAT_USER_TWOFACTORENABLED}}', + 'X-User-TermsAccepted': '{{LIBRECHAT_USER_TERMSACCEPTED}}', + }; + + const result = resolveHeaders(headers, user); + + expect(result['X-User-ID']).toBe('abc'); + expect(result['X-User-Name']).toBe('Test User'); + expect(result['X-User-Username']).toBe('testuser'); + expect(result['X-User-Email']).toBe('me@example.com'); + expect(result['X-User-Provider']).toBe('google'); + expect(result['X-User-Role']).toBe('admin'); + expect(result['X-User-GoogleId']).toBe('gid'); + expect(result['X-User-FacebookId']).toBe('fbid'); + expect(result['X-User-OpenIdId']).toBe('oid'); + expect(result['X-User-SamlId']).toBe('sid'); + expect(result['X-User-LdapId']).toBe('lid'); + expect(result['X-User-GithubId']).toBe('ghid'); + expect(result['X-User-DiscordId']).toBe('dcid'); + expect(result['X-User-AppleId']).toBe('aid'); + expect(result['X-User-EmailVerified']).toBe('true'); + expect(result['X-User-TwoFactorEnabled']).toBe('false'); + expect(result['X-User-TermsAccepted']).toBe('true'); + }); + + it('should handle multiple placeholders in one value', () => { + const user = { id: 'abc', email: 'me@example.com' }; + const headers = { + 'X-Multi': 'User: {{LIBRECHAT_USER_ID}}, Env: ${TEST_API_KEY}, Custom: {{MY_CUSTOM}}', + }; + const customVars = { MY_CUSTOM: 'custom-value' }; + const result = resolveHeaders(headers, user, customVars); + expect(result['X-Multi']).toBe('User: abc, Env: test-api-key-value, Custom: custom-value'); + }); + + it('should leave unknown placeholders unchanged', () => { + const user = { id: 'abc' }; + const headers = { + 'X-Unknown': '{{SOMETHING_NOT_RECOGNIZED}}', + 'X-Known': '{{LIBRECHAT_USER_ID}}', + }; + const result = resolveHeaders(headers, user); + expect(result['X-Unknown']).toBe('{{SOMETHING_NOT_RECOGNIZED}}'); + expect(result['X-Known']).toBe('abc'); + }); + + it('should handle a mix of all types', () => { + const user = { + id: 'abc', + email: 'me@example.com', + emailVerified: true, + twoFactorEnabled: false, + }; + const headers = { + 'X-User': '{{LIBRECHAT_USER_ID}}', + 'X-Env': '${TEST_API_KEY}', + 'X-Custom': '{{MY_CUSTOM}}', + 'X-Multi': 'ID: {{LIBRECHAT_USER_ID}}, ENV: ${TEST_API_KEY}, CUSTOM: {{MY_CUSTOM}}', + 'X-Unknown': '{{NOT_A_REAL_PLACEHOLDER}}', + 'X-Empty': '', + 'X-Boolean': '{{LIBRECHAT_USER_EMAILVERIFIED}}', + }; + const customVars = { MY_CUSTOM: 'custom-value' }; + const result = resolveHeaders(headers, user, customVars); + + expect(result['X-User']).toBe('abc'); + expect(result['X-Env']).toBe('test-api-key-value'); + expect(result['X-Custom']).toBe('custom-value'); + expect(result['X-Multi']).toBe('ID: abc, ENV: test-api-key-value, CUSTOM: custom-value'); + expect(result['X-Unknown']).toBe('{{NOT_A_REAL_PLACEHOLDER}}'); + expect(result['X-Empty']).toBe(''); + expect(result['X-Boolean']).toBe('true'); + }); }); From c87422a1e063597268426869db0a992ad3deb05b Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Wed, 25 Jun 2025 15:14:33 -0400 Subject: [PATCH 10/65] =?UTF-8?q?=F0=9F=A7=A0=20feat:=20Thinking=20Budget,?= =?UTF-8?q?=20Include=20Thoughts,=20and=20Dynamic=20Thinking=20for=20Gemin?= =?UTF-8?q?i=202.5=20(#8055)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: support thinking budget parameter for Gemini 2.5 series (#6949, #7542) https://ai.google.dev/gemini-api/docs/thinking#set-budget * refactor: update thinking budget minimum value to -1 for dynamic thinking - see: https://ai.google.dev/gemini-api/docs/thinking#set-budget * chore: bump @librechat/agents to v2.4.43 * refactor: rename LLMConfigOptions to OpenAIConfigOptions for clarity and consistency - Updated type definitions and references in initialize.ts, llm.ts, and openai.ts to reflect the new naming convention. - Ensured that the OpenAI configuration options are consistently used across the relevant files. * refactor: port Google LLM methods to TypeScript Package * chore: update @librechat/agents version to 2.4.43 in package-lock.json and package.json * refactor: update thinking budget description for clarity and adjust placeholder in parameter settings * refactor: enhance googleSettings default value for thinking budget to support dynamic adjustment * chore: update @librechat/agents to v2.4.44 for Vertex Dynamic Thinking workaround * refactor: rename google config function, update `createRun` types, use `reasoning` as `reasoningKey` for Google * refactor: simplify placeholder handling in DynamicInput component * refactor: enhance thinking budget description for clarity and allow automatic decision by setting to "-1" * refactor: update text styling in OptionHover component for improved readability * chore: update @librechat/agents dependency to v2.4.46 in package.json and package-lock.json * chore: update @librechat/api version to 1.2.5 in package.json and package-lock.json * refactor: enhance `clientOptions` handling by filtering `omitTitleOptions`, add `json` field for Google models --------- Co-authored-by: ciffelia <15273128+ciffelia@users.noreply.github.com> --- api/app/clients/GoogleClient.js | 14 ++- api/package.json | 2 +- api/server/controllers/agents/client.js | 21 +++++ .../services/Endpoints/google/initialize.js | 5 +- .../SidePanel/Parameters/DynamicInput.tsx | 10 +-- .../SidePanel/Parameters/OptionHover.tsx | 2 +- client/src/locales/en/translation.json | 3 + package-lock.json | 64 ++++---------- packages/api/package.json | 4 +- packages/api/src/agents/run.ts | 9 +- packages/api/src/endpoints/google/index.ts | 1 + .../api/src/endpoints/google/llm.ts | 87 +++++++++++-------- packages/api/src/endpoints/index.ts | 1 + .../api/src/endpoints/openai/initialize.ts | 8 +- packages/api/src/endpoints/openai/llm.ts | 2 +- packages/api/src/types/google.ts | 24 +++++ packages/api/src/types/index.ts | 1 + packages/api/src/types/openai.ts | 2 +- packages/api/src/types/run.ts | 5 +- .../data-provider/src/parameterSettings.ts | 35 ++++++++ packages/data-provider/src/schemas.ts | 20 +++++ 21 files changed, 212 insertions(+), 108 deletions(-) create mode 100644 packages/api/src/endpoints/google/index.ts rename api/server/services/Endpoints/google/llm.js => packages/api/src/endpoints/google/llm.ts (61%) create mode 100644 packages/api/src/types/google.ts diff --git a/api/app/clients/GoogleClient.js b/api/app/clients/GoogleClient.js index 817239d14f..2ec23a0a06 100644 --- a/api/app/clients/GoogleClient.js +++ b/api/app/clients/GoogleClient.js @@ -1,7 +1,7 @@ const { google } = require('googleapis'); -const { Tokenizer } = require('@librechat/api'); const { concat } = require('@langchain/core/utils/stream'); const { ChatVertexAI } = require('@langchain/google-vertexai'); +const { Tokenizer, getSafetySettings } = require('@librechat/api'); const { ChatGoogleGenerativeAI } = require('@langchain/google-genai'); const { GoogleGenerativeAI: GenAI } = require('@google/generative-ai'); const { HumanMessage, SystemMessage } = require('@langchain/core/messages'); @@ -12,13 +12,13 @@ const { endpointSettings, parseTextParts, EModelEndpoint, + googleSettings, ContentTypes, VisionModes, ErrorTypes, Constants, AuthKeys, } = require('librechat-data-provider'); -const { getSafetySettings } = require('~/server/services/Endpoints/google/llm'); const { encodeAndFormat } = require('~/server/services/Files/images'); const { spendTokens } = require('~/models/spendTokens'); const { getModelMaxTokens } = require('~/utils'); @@ -166,6 +166,16 @@ class GoogleClient extends BaseClient { ); } + // Add thinking configuration + this.modelOptions.thinkingConfig = { + thinkingBudget: + (this.modelOptions.thinking ?? googleSettings.thinking.default) + ? this.modelOptions.thinkingBudget + : 0, + }; + delete this.modelOptions.thinking; + delete this.modelOptions.thinkingBudget; + this.sender = this.options.sender ?? getResponseSender({ diff --git a/api/package.json b/api/package.json index 7b0e064369..571db53aa6 100644 --- a/api/package.json +++ b/api/package.json @@ -48,7 +48,7 @@ "@langchain/google-genai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13", "@langchain/textsplitters": "^0.1.0", - "@librechat/agents": "^2.4.42", + "@librechat/agents": "^2.4.46", "@librechat/api": "*", "@librechat/data-schemas": "*", "@node-saml/passport-saml": "^5.0.0", diff --git a/api/server/controllers/agents/client.js b/api/server/controllers/agents/client.js index f9d4e16a94..f4395b4b32 100644 --- a/api/server/controllers/agents/client.js +++ b/api/server/controllers/agents/client.js @@ -44,6 +44,17 @@ const BaseClient = require('~/app/clients/BaseClient'); const { loadAgent } = require('~/models/Agent'); const { getMCPManager } = require('~/config'); +const omitTitleOptions = new Set([ + 'stream', + 'thinking', + 'streaming', + 'clientOptions', + 'thinkingConfig', + 'thinkingBudget', + 'includeThoughts', + 'maxOutputTokens', +]); + /** * @param {ServerRequest} req * @param {Agent} agent @@ -1038,6 +1049,16 @@ class AgentClient extends BaseClient { delete clientOptions.maxTokens; } + clientOptions = Object.assign( + Object.fromEntries( + Object.entries(clientOptions).filter(([key]) => !omitTitleOptions.has(key)), + ), + ); + + if (provider === Providers.GOOGLE) { + clientOptions.json = true; + } + try { const titleResult = await this.run.generateTitle({ provider, diff --git a/api/server/services/Endpoints/google/initialize.js b/api/server/services/Endpoints/google/initialize.js index fe58a1fa87..60e874d5b8 100644 --- a/api/server/services/Endpoints/google/initialize.js +++ b/api/server/services/Endpoints/google/initialize.js @@ -1,7 +1,6 @@ +const { getGoogleConfig, isEnabled } = require('@librechat/api'); const { EModelEndpoint, AuthKeys } = require('librechat-data-provider'); const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService'); -const { getLLMConfig } = require('~/server/services/Endpoints/google/llm'); -const { isEnabled } = require('~/server/utils'); const { GoogleClient } = require('~/app'); const initializeClient = async ({ req, res, endpointOption, overrideModel, optionsOnly }) => { @@ -65,7 +64,7 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio if (overrideModel) { clientOptions.modelOptions.model = overrideModel; } - return getLLMConfig(credentials, clientOptions); + return getGoogleConfig(credentials, clientOptions); } const client = new GoogleClient(credentials, clientOptions); diff --git a/client/src/components/SidePanel/Parameters/DynamicInput.tsx b/client/src/components/SidePanel/Parameters/DynamicInput.tsx index 71714d050e..57e55d75ca 100644 --- a/client/src/components/SidePanel/Parameters/DynamicInput.tsx +++ b/client/src/components/SidePanel/Parameters/DynamicInput.tsx @@ -46,6 +46,10 @@ function DynamicInput({ setInputValue(e, !isNaN(Number(e.target.value))); }; + const placeholderText = placeholderCode + ? localize(placeholder as TranslationKeys) || placeholder + : placeholder; + return (
-

{text}

+

{text}

diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index b875644a20..4c6ab1df47 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -207,6 +207,8 @@ "com_endpoint_google_temp": "Higher values = more random, while lower values = more focused and deterministic. We recommend altering this or Top P but not both.", "com_endpoint_google_topk": "Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model's vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).", "com_endpoint_google_topp": "Top-p changes how the model selects tokens for output. Tokens are selected from most K (see topK parameter) probable to least until the sum of their probabilities equals the top-p value.", + "com_endpoint_google_thinking": "Enables or disables reasoning. This setting is only supported by certain models (2.5 series). For older models, this setting may have no effect.", + "com_endpoint_google_thinking_budget": "Guides the number of thinking tokens the model uses. The actual amount may exceed or fall below this value depending on the prompt.\n\nThis setting is only supported by certain models (2.5 series). Gemini 2.5 Pro supports 128-32,768 tokens. Gemini 2.5 Flash supports 0-24,576 tokens. Gemini 2.5 Flash Lite supports 512-24,576 tokens.\n\nLeave blank or set to \"-1\" to let the model automatically decide when and how much to think. By default, Gemini 2.5 Flash Lite does not think.", "com_endpoint_instructions_assistants": "Override Instructions", "com_endpoint_instructions_assistants_placeholder": "Overrides the instructions of the assistant. This is useful for modifying the behavior on a per-run basis.", "com_endpoint_max_output_tokens": "Max Output Tokens", @@ -582,6 +584,7 @@ "com_ui_auth_url": "Authorization URL", "com_ui_authentication": "Authentication", "com_ui_authentication_type": "Authentication Type", + "com_ui_auto": "Auto", "com_ui_available_tools": "Available Tools", "com_ui_avatar": "Avatar", "com_ui_azure": "Azure", diff --git a/package-lock.json b/package-lock.json index 39e52db331..989883c0ec 100644 --- a/package-lock.json +++ b/package-lock.json @@ -64,7 +64,7 @@ "@langchain/google-genai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13", "@langchain/textsplitters": "^0.1.0", - "@librechat/agents": "^2.4.42", + "@librechat/agents": "^2.4.46", "@librechat/api": "*", "@librechat/data-schemas": "*", "@node-saml/passport-saml": "^5.0.0", @@ -1351,33 +1351,6 @@ } } }, - "api/node_modules/@librechat/agents": { - "version": "2.4.42", - "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.42.tgz", - "integrity": "sha512-52ux2PeEAV79yr6/h6GN3omlpqX6H0FYl6qwjJ6gT04MMko/imnLd3bQrX0gm3i0KL5ygHbRjQeonONKjJayHw==", - "license": "MIT", - "dependencies": { - "@langchain/anthropic": "^0.3.23", - "@langchain/aws": "^0.1.11", - "@langchain/community": "^0.3.47", - "@langchain/core": "^0.3.60", - "@langchain/deepseek": "^0.0.2", - "@langchain/google-genai": "^0.2.13", - "@langchain/google-vertexai": "^0.2.13", - "@langchain/langgraph": "^0.3.4", - "@langchain/mistralai": "^0.2.1", - "@langchain/ollama": "^0.2.3", - "@langchain/openai": "^0.5.14", - "@langchain/xai": "^0.0.3", - "cheerio": "^1.0.0", - "dotenv": "^16.4.7", - "https-proxy-agent": "^7.0.6", - "nanoid": "^3.3.7" - }, - "engines": { - "node": ">=14.0.0" - } - }, "api/node_modules/@smithy/abort-controller": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.0.2.tgz", @@ -19153,9 +19126,9 @@ } }, "node_modules/@langchain/langgraph": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/@langchain/langgraph/-/langgraph-0.3.4.tgz", - "integrity": "sha512-Vuja8Qtu3Zjx7k4fK7Cnw+p8gtvIRPciWp9btPhAs3aUo6aBgOJOZVcK5Ii3mHfEHK/aQmRElR0x/u/YwykOrg==", + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@langchain/langgraph/-/langgraph-0.3.5.tgz", + "integrity": "sha512-7astlgnp6BdMQJqmr+cbDgR10FYWNCaDLnbfEDHpqhKCCajU59m5snOdl4Vtu5UM6V2k3lgatNqWoflBtxhIyg==", "license": "MIT", "dependencies": { "@langchain/langgraph-checkpoint": "~0.0.18", @@ -19463,11 +19436,10 @@ } }, "node_modules/@librechat/agents": { - "version": "2.4.41", - "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.41.tgz", - "integrity": "sha512-kYmdk5WVRp0qZxTx6BuGCs4l0Ir9iBLLx4ZY4/1wxr80al5/vq3P8wbgGdKMeO2qTu4ZaT4RyWRQYWBg5HDkUQ==", + "version": "2.4.46", + "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.46.tgz", + "integrity": "sha512-zR27U19/WGF3HN64oBbiaFgjjWHaF7BjYzRFWzQKEkk+iEzCe59IpuEZUizQ54YcY02nhhh6S3MNUjhAJwMYVA==", "license": "MIT", - "peer": true, "dependencies": { "@langchain/anthropic": "^0.3.23", "@langchain/aws": "^0.1.11", @@ -19495,7 +19467,6 @@ "resolved": "https://registry.npmjs.org/@langchain/community/-/community-0.3.47.tgz", "integrity": "sha512-Vo42kAfkXpTFSevhEkeqqE55az8NyQgDktCbitXYuhipNbFYx08XVvqEDkFkB20MM/Z7u+cvLb+DxCqnKuH0CQ==", "license": "MIT", - "peer": true, "dependencies": { "@langchain/openai": ">=0.2.0 <0.6.0", "@langchain/weaviate": "^0.2.0", @@ -20017,11 +19988,10 @@ } }, "node_modules/@librechat/agents/node_modules/@langchain/openai": { - "version": "0.5.14", - "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.14.tgz", - "integrity": "sha512-0GEj5K/qi1MRuZ4nE7NvyI4jTG+RSewLZqsExUwRukWdeqmkPNHGrogTa5ZDt7eaJxAaY7EgLC5ZnvCM3L1oug==", + "version": "0.5.15", + "resolved": "https://registry.npmjs.org/@langchain/openai/-/openai-0.5.15.tgz", + "integrity": "sha512-ANadEHyAj5sufQpz+SOPpKbyoMcTLhnh8/d+afbSPUqWsIMPpEFX3HoSY3nrBPG6l4NQQNG5P5oHb4SdC8+YIg==", "license": "MIT", - "peer": true, "dependencies": { "js-tiktoken": "^1.0.12", "openai": "^5.3.0", @@ -20039,7 +20009,6 @@ "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", "license": "MIT", - "peer": true, "engines": { "node": ">= 14" } @@ -20049,7 +20018,6 @@ "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "license": "MIT", - "peer": true, "dependencies": { "agent-base": "^7.1.2", "debug": "4" @@ -20059,11 +20027,10 @@ } }, "node_modules/@librechat/agents/node_modules/openai": { - "version": "5.5.1", - "resolved": "https://registry.npmjs.org/openai/-/openai-5.5.1.tgz", - "integrity": "sha512-5i19097mGotHA1eFsM6Tjd/tJ8uo9sa5Ysv4Q6bKJ2vtN6rc0MzMrUefXnLXYAJcmMQrC1Efhj0AvfIkXrQamw==", + "version": "5.7.0", + "resolved": "https://registry.npmjs.org/openai/-/openai-5.7.0.tgz", + "integrity": "sha512-zXWawZl6J/P5Wz57/nKzVT3kJQZvogfuyuNVCdEp4/XU2UNrjL7SsuNpWAyLZbo6HVymwmnfno9toVzBhelygA==", "license": "Apache-2.0", - "peer": true, "bin": { "openai": "bin/cli" }, @@ -20089,7 +20056,6 @@ "https://github.com/sponsors/ctavan" ], "license": "MIT", - "peer": true, "bin": { "uuid": "dist/bin/uuid" } @@ -46568,7 +46534,7 @@ }, "packages/api": { "name": "@librechat/api", - "version": "1.2.4", + "version": "1.2.5", "license": "ISC", "devDependencies": { "@babel/preset-env": "^7.21.5", @@ -46600,7 +46566,7 @@ "typescript": "^5.0.4" }, "peerDependencies": { - "@librechat/agents": "^2.4.41", + "@librechat/agents": "^2.4.46", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.12.3", "axios": "^1.8.2", diff --git a/packages/api/package.json b/packages/api/package.json index aa4fc43772..4aaf0f793e 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -1,6 +1,6 @@ { "name": "@librechat/api", - "version": "1.2.4", + "version": "1.2.5", "type": "commonjs", "description": "MCP services for LibreChat", "main": "dist/index.js", @@ -69,7 +69,7 @@ "registry": "https://registry.npmjs.org/" }, "peerDependencies": { - "@librechat/agents": "^2.4.41", + "@librechat/agents": "^2.4.46", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.12.3", "axios": "^1.8.2", diff --git a/packages/api/src/agents/run.ts b/packages/api/src/agents/run.ts index 41ec02d9b9..e12d2cf2b6 100644 --- a/packages/api/src/agents/run.ts +++ b/packages/api/src/agents/run.ts @@ -46,7 +46,10 @@ export async function createRun({ customHandlers?: Record; }): Promise> { const provider = - providerEndpointMap[agent.provider as keyof typeof providerEndpointMap] ?? agent.provider; + (providerEndpointMap[ + agent.provider as keyof typeof providerEndpointMap + ] as unknown as Providers) ?? agent.provider; + const llmConfig: t.RunLLMConfig = Object.assign( { provider, @@ -66,7 +69,9 @@ export async function createRun({ } let reasoningKey: 'reasoning_content' | 'reasoning' | undefined; - if ( + if (provider === Providers.GOOGLE) { + reasoningKey = 'reasoning'; + } else if ( llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter) || (agent.endpoint && agent.endpoint.toLowerCase().includes(KnownEndpoints.openrouter)) ) { diff --git a/packages/api/src/endpoints/google/index.ts b/packages/api/src/endpoints/google/index.ts new file mode 100644 index 0000000000..4045e8de0c --- /dev/null +++ b/packages/api/src/endpoints/google/index.ts @@ -0,0 +1 @@ +export * from './llm'; diff --git a/api/server/services/Endpoints/google/llm.js b/packages/api/src/endpoints/google/llm.ts similarity index 61% rename from api/server/services/Endpoints/google/llm.js rename to packages/api/src/endpoints/google/llm.ts index 235e1e3df9..0721acce29 100644 --- a/api/server/services/Endpoints/google/llm.js +++ b/packages/api/src/endpoints/google/llm.ts @@ -1,13 +1,15 @@ -const { Providers } = require('@librechat/agents'); -const { AuthKeys } = require('librechat-data-provider'); -const { isEnabled } = require('~/server/utils'); +import { Providers } from '@librechat/agents'; +import { googleSettings, AuthKeys } from 'librechat-data-provider'; +import type { GoogleClientOptions, VertexAIClientOptions } from '@librechat/agents'; +import type * as t from '~/types'; +import { isEnabled } from '~/utils'; -function getThresholdMapping(model) { +function getThresholdMapping(model: string) { const gemini1Pattern = /gemini-(1\.0|1\.5|pro$|1\.0-pro|1\.5-pro|1\.5-flash-001)/; const restrictedPattern = /(gemini-(1\.5-flash-8b|2\.0|exp)|learnlm)/; if (gemini1Pattern.test(model)) { - return (value) => { + return (value: string) => { if (value === 'OFF') { return 'BLOCK_NONE'; } @@ -16,7 +18,7 @@ function getThresholdMapping(model) { } if (restrictedPattern.test(model)) { - return (value) => { + return (value: string) => { if (value === 'OFF' || value === 'HARM_BLOCK_THRESHOLD_UNSPECIFIED') { return 'BLOCK_NONE'; } @@ -24,19 +26,16 @@ function getThresholdMapping(model) { }; } - return (value) => value; + return (value: string) => value; } -/** - * - * @param {string} model - * @returns {Array<{category: string, threshold: string}> | undefined} - */ -function getSafetySettings(model) { +export function getSafetySettings( + model?: string, +): Array<{ category: string; threshold: string }> | undefined { if (isEnabled(process.env.GOOGLE_EXCLUDE_SAFETY_SETTINGS)) { return undefined; } - const mapThreshold = getThresholdMapping(model); + const mapThreshold = getThresholdMapping(model ?? ''); return [ { @@ -74,24 +73,27 @@ function getSafetySettings(model) { * Replicates core logic from GoogleClient's constructor and setOptions, plus client determination. * Returns an object with the provider label and the final options that would be passed to createLLM. * - * @param {string | object} credentials - Either a JSON string or an object containing Google keys - * @param {object} [options={}] - The same shape as the "GoogleClient" constructor options + * @param credentials - Either a JSON string or an object containing Google keys + * @param options - The same shape as the "GoogleClient" constructor options */ -function getLLMConfig(credentials, options = {}) { - // 1. Parse credentials - let creds = {}; +export function getGoogleConfig( + credentials: string | t.GoogleCredentials | undefined, + options: t.GoogleConfigOptions = {}, +) { + let creds: t.GoogleCredentials = {}; if (typeof credentials === 'string') { try { creds = JSON.parse(credentials); - } catch (err) { - throw new Error(`Error parsing string credentials: ${err.message}`); + } catch (err: unknown) { + throw new Error( + `Error parsing string credentials: ${err instanceof Error ? err.message : 'Unknown error'}`, + ); } } else if (credentials && typeof credentials === 'object') { creds = credentials; } - // Extract from credentials const serviceKeyRaw = creds[AuthKeys.GOOGLE_SERVICE_KEY] ?? {}; const serviceKey = typeof serviceKeyRaw === 'string' ? JSON.parse(serviceKeyRaw) : (serviceKeyRaw ?? {}); @@ -102,9 +104,15 @@ function getLLMConfig(credentials, options = {}) { const reverseProxyUrl = options.reverseProxyUrl; const authHeader = options.authHeader; - /** @type {GoogleClientOptions | VertexAIClientOptions} */ - let llmConfig = { - ...(options.modelOptions || {}), + const { + thinking = googleSettings.thinking.default, + thinkingBudget = googleSettings.thinkingBudget.default, + ...modelOptions + } = options.modelOptions || {}; + + const llmConfig: GoogleClientOptions | VertexAIClientOptions = { + ...(modelOptions || {}), + model: modelOptions?.model ?? '', maxRetries: 2, }; @@ -121,16 +129,30 @@ function getLLMConfig(credentials, options = {}) { // If we have a GCP project => Vertex AI if (project_id && provider === Providers.VERTEXAI) { - /** @type {VertexAIClientOptions['authOptions']} */ - llmConfig.authOptions = { + (llmConfig as VertexAIClientOptions).authOptions = { credentials: { ...serviceKey }, projectId: project_id, }; - llmConfig.location = process.env.GOOGLE_LOC || 'us-central1'; + (llmConfig as VertexAIClientOptions).location = process.env.GOOGLE_LOC || 'us-central1'; } else if (apiKey && provider === Providers.GOOGLE) { llmConfig.apiKey = apiKey; } + const shouldEnableThinking = + thinking && thinkingBudget != null && (thinkingBudget > 0 || thinkingBudget === -1); + + if (shouldEnableThinking && provider === Providers.GOOGLE) { + (llmConfig as GoogleClientOptions).thinkingConfig = { + thinkingBudget: thinking ? thinkingBudget : googleSettings.thinkingBudget.default, + includeThoughts: Boolean(thinking), + }; + } else if (shouldEnableThinking && provider === Providers.VERTEXAI) { + (llmConfig as VertexAIClientOptions).thinkingBudget = thinking + ? thinkingBudget + : googleSettings.thinkingBudget.default; + (llmConfig as VertexAIClientOptions).includeThoughts = Boolean(thinking); + } + /* let legacyOptions = {}; // Filter out any "examples" that are empty @@ -152,11 +174,11 @@ function getLLMConfig(credentials, options = {}) { */ if (reverseProxyUrl) { - llmConfig.baseUrl = reverseProxyUrl; + (llmConfig as GoogleClientOptions).baseUrl = reverseProxyUrl; } if (authHeader) { - llmConfig.customHeaders = { + (llmConfig as GoogleClientOptions).customHeaders = { Authorization: `Bearer ${apiKey}`, }; } @@ -169,8 +191,3 @@ function getLLMConfig(credentials, options = {}) { llmConfig, }; } - -module.exports = { - getLLMConfig, - getSafetySettings, -}; diff --git a/packages/api/src/endpoints/index.ts b/packages/api/src/endpoints/index.ts index e919f9e429..e12780d876 100644 --- a/packages/api/src/endpoints/index.ts +++ b/packages/api/src/endpoints/index.ts @@ -1 +1,2 @@ +export * from './google'; export * from './openai'; diff --git a/packages/api/src/endpoints/openai/initialize.ts b/packages/api/src/endpoints/openai/initialize.ts index 91e92db85a..ad44ed4697 100644 --- a/packages/api/src/endpoints/openai/initialize.ts +++ b/packages/api/src/endpoints/openai/initialize.ts @@ -1,9 +1,9 @@ import { ErrorTypes, EModelEndpoint, mapModelToAzureConfig } from 'librechat-data-provider'; import type { - LLMConfigOptions, UserKeyValues, - InitializeOpenAIOptionsParams, OpenAIOptionsResult, + OpenAIConfigOptions, + InitializeOpenAIOptionsParams, } from '~/types'; import { createHandleLLMNewToken } from '~/utils/generators'; import { getAzureCredentials } from '~/utils/azure'; @@ -64,7 +64,7 @@ export const initializeOpenAI = async ({ ? userValues?.baseURL : baseURLOptions[endpoint as keyof typeof baseURLOptions]; - const clientOptions: LLMConfigOptions = { + const clientOptions: OpenAIConfigOptions = { proxy: PROXY ?? undefined, reverseProxyUrl: baseURL || undefined, streaming: true, @@ -135,7 +135,7 @@ export const initializeOpenAI = async ({ user: req.user.id, }; - const finalClientOptions: LLMConfigOptions = { + const finalClientOptions: OpenAIConfigOptions = { ...clientOptions, modelOptions, }; diff --git a/packages/api/src/endpoints/openai/llm.ts b/packages/api/src/endpoints/openai/llm.ts index ec7c4b863d..ddf61016e8 100644 --- a/packages/api/src/endpoints/openai/llm.ts +++ b/packages/api/src/endpoints/openai/llm.ts @@ -13,7 +13,7 @@ import { isEnabled } from '~/utils/common'; */ export function getOpenAIConfig( apiKey: string, - options: t.LLMConfigOptions = {}, + options: t.OpenAIConfigOptions = {}, endpoint?: string | null, ): t.LLMConfigResult { const { diff --git a/packages/api/src/types/google.ts b/packages/api/src/types/google.ts new file mode 100644 index 0000000000..1bc40f06e8 --- /dev/null +++ b/packages/api/src/types/google.ts @@ -0,0 +1,24 @@ +import { z } from 'zod'; +import { AuthKeys, googleBaseSchema } from 'librechat-data-provider'; + +export type GoogleParameters = z.infer; + +export type GoogleCredentials = { + [AuthKeys.GOOGLE_SERVICE_KEY]?: string; + [AuthKeys.GOOGLE_API_KEY]?: string; +}; + +/** + * Configuration options for the getLLMConfig function + */ +export interface GoogleConfigOptions { + modelOptions?: Partial; + reverseProxyUrl?: string; + defaultQuery?: Record; + headers?: Record; + proxy?: string; + streaming?: boolean; + authHeader?: boolean; + addParams?: Record; + dropParams?: string[]; +} diff --git a/packages/api/src/types/index.ts b/packages/api/src/types/index.ts index 41ea33eb45..6db727529a 100644 --- a/packages/api/src/types/index.ts +++ b/packages/api/src/types/index.ts @@ -1,5 +1,6 @@ export * from './azure'; export * from './events'; +export * from './google'; export * from './mistral'; export * from './openai'; export * from './run'; diff --git a/packages/api/src/types/openai.ts b/packages/api/src/types/openai.ts index cb11be984f..5f609a641a 100644 --- a/packages/api/src/types/openai.ts +++ b/packages/api/src/types/openai.ts @@ -9,7 +9,7 @@ export type OpenAIParameters = z.infer; /** * Configuration options for the getLLMConfig function */ -export interface LLMConfigOptions { +export interface OpenAIConfigOptions { modelOptions?: Partial; reverseProxyUrl?: string; defaultQuery?: Record; diff --git a/packages/api/src/types/run.ts b/packages/api/src/types/run.ts index 080b3adba1..72c61a587f 100644 --- a/packages/api/src/types/run.ts +++ b/packages/api/src/types/run.ts @@ -1,8 +1,9 @@ -import type { AgentModelParameters, EModelEndpoint } from 'librechat-data-provider'; +import type { Providers } from '@librechat/agents'; +import type { AgentModelParameters } from 'librechat-data-provider'; import type { OpenAIConfiguration } from './openai'; export type RunLLMConfig = { - provider: EModelEndpoint; + provider: Providers; streaming: boolean; streamUsage: boolean; usage?: boolean; diff --git a/packages/data-provider/src/parameterSettings.ts b/packages/data-provider/src/parameterSettings.ts index af79eb2c1f..8b1dd222a4 100644 --- a/packages/data-provider/src/parameterSettings.ts +++ b/packages/data-provider/src/parameterSettings.ts @@ -450,6 +450,37 @@ const google: Record = { optionType: 'model', columnSpan: 2, }, + thinking: { + key: 'thinking', + label: 'com_endpoint_thinking', + labelCode: true, + description: 'com_endpoint_google_thinking', + descriptionCode: true, + type: 'boolean', + default: googleSettings.thinking.default, + component: 'switch', + optionType: 'conversation', + showDefault: false, + columnSpan: 2, + }, + thinkingBudget: { + key: 'thinkingBudget', + label: 'com_endpoint_thinking_budget', + labelCode: true, + description: 'com_endpoint_google_thinking_budget', + descriptionCode: true, + placeholder: 'com_ui_auto', + placeholderCode: true, + type: 'number', + component: 'input', + range: { + min: googleSettings.thinkingBudget.min, + max: googleSettings.thinkingBudget.max, + step: googleSettings.thinkingBudget.step, + }, + optionType: 'conversation', + columnSpan: 2, + }, }; const googleConfig: SettingsConfiguration = [ @@ -461,6 +492,8 @@ const googleConfig: SettingsConfiguration = [ google.topP, google.topK, librechat.resendFiles, + google.thinking, + google.thinkingBudget, ]; const googleCol1: SettingsConfiguration = [ @@ -476,6 +509,8 @@ const googleCol2: SettingsConfiguration = [ google.topP, google.topK, librechat.resendFiles, + google.thinking, + google.thinkingBudget, ]; const openAI: SettingsConfiguration = [ diff --git a/packages/data-provider/src/schemas.ts b/packages/data-provider/src/schemas.ts index 6125c65e5b..463150d36f 100644 --- a/packages/data-provider/src/schemas.ts +++ b/packages/data-provider/src/schemas.ts @@ -255,6 +255,18 @@ export const googleSettings = { step: 1 as const, default: 40 as const, }, + thinking: { + default: true as const, + }, + thinkingBudget: { + min: -1 as const, + max: 32768 as const, + step: 1 as const, + /** `-1` = Dynamic Thinking, meaning the model will adjust + * the budget based on the complexity of the request. + */ + default: -1 as const, + }, }; const ANTHROPIC_MAX_OUTPUT = 128000 as const; @@ -785,6 +797,8 @@ export const googleBaseSchema = tConversationSchema.pick({ artifacts: true, topP: true, topK: true, + thinking: true, + thinkingBudget: true, iconURL: true, greeting: true, spec: true, @@ -810,6 +824,12 @@ export const googleGenConfigSchema = z presencePenalty: coerceNumber.optional(), frequencyPenalty: coerceNumber.optional(), stopSequences: z.array(z.string()).optional(), + thinkingConfig: z + .object({ + includeThoughts: z.boolean().optional(), + thinkingBudget: coerceNumber.optional(), + }) + .optional(), }) .strip() .optional(); From c551ba21f5ec3ea1aeac5c6125502c670bfd02ec Mon Sep 17 00:00:00 2001 From: Marlon <153027575+marlonka@users.noreply.github.com> Date: Wed, 25 Jun 2025 21:31:24 +0200 Subject: [PATCH 11/65] =?UTF-8?q?=F0=9F=93=9C=20chore:=20Update=20`.env.ex?= =?UTF-8?q?ample`=20(#8043)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update recent Gemini model names and remove deprecated Gemini models from env.example --- .env.example | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.env.example b/.env.example index 086938043b..2b811c79de 100644 --- a/.env.example +++ b/.env.example @@ -142,10 +142,10 @@ GOOGLE_KEY=user_provided # GOOGLE_AUTH_HEADER=true # Gemini API (AI Studio) -# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002 +# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash,gemini-2.0-flash-lite # Vertex AI -# GOOGLE_MODELS=gemini-2.5-pro-preview-05-06,gemini-2.5-flash-preview-04-17,gemini-2.0-flash-001,gemini-2.0-flash-exp,gemini-2.0-flash-lite-001,gemini-1.5-pro-002,gemini-1.5-flash-002 +# GOOGLE_MODELS=gemini-2.5-pro,gemini-2.5-flash,gemini-2.5-flash-lite-preview-06-17,gemini-2.0-flash-001,gemini-2.0-flash-lite-001 # GOOGLE_TITLE_MODEL=gemini-2.0-flash-lite-001 @@ -657,4 +657,4 @@ OPENWEATHER_API_KEY= # Reranker (Required) # JINA_API_KEY=your_jina_api_key # or -# COHERE_API_KEY=your_cohere_api_key \ No newline at end of file +# COHERE_API_KEY=your_cohere_api_key From 3ab1bd65e54d17ec545f59139a5a323f98e4c216 Mon Sep 17 00:00:00 2001 From: Karol Potocki Date: Wed, 25 Jun 2025 21:38:24 +0200 Subject: [PATCH 12/65] =?UTF-8?q?=F0=9F=90=9B=20fix:=20Support=20Bedrock?= =?UTF-8?q?=20Provider=20for=20MCP=20Image=20Content=20Rendering=20(#8047)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/api/src/mcp/parsers.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/api/src/mcp/parsers.ts b/packages/api/src/mcp/parsers.ts index e377f4b70c..7886c86a10 100644 --- a/packages/api/src/mcp/parsers.ts +++ b/packages/api/src/mcp/parsers.ts @@ -7,6 +7,7 @@ const RECOGNIZED_PROVIDERS = new Set([ 'xai', 'deepseek', 'ollama', + 'bedrock', ]); const CONTENT_ARRAY_PROVIDERS = new Set(['google', 'anthropic', 'openai']); From cbda3cb529bfc1e610fe6ba72e259e255af33d06 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Wed, 25 Jun 2025 17:16:26 -0400 Subject: [PATCH 13/65] =?UTF-8?q?=F0=9F=95=90=20feat:=20Configurable=20Ret?= =?UTF-8?q?ention=20Period=20for=20Temporary=20Chats=20(#8056)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add configurable retention period for temporary chats * Addressing eslint errors * Fix: failing test due to missing registration * Update: variable name and use hours instead of days for chat retention * Addressing comments * chore: fix import order in Conversation.js * chore: import order in Message.js * chore: fix import order in config.ts * chore: move common methods to packages/api to reduce potential for circular dependencies * refactor: update temp chat retention config type to Partial * refactor: remove unused config variable from AppService and update loadCustomConfig tests with logger mock * refactor: handle model undefined edge case by moving Session model initialization inside methods --------- Co-authored-by: Rakshit Tiwari --- api/cache/banViolation.js | 3 +- api/cache/getLogStores.js | 2 +- api/models/Conversation.js | 15 +- api/models/Message.js | 13 +- api/server/controllers/AuthController.js | 8 +- api/server/controllers/EditController.js | 7 +- api/server/controllers/agents/errors.js | 8 +- api/server/controllers/agents/request.js | 6 +- api/server/controllers/assistants/chatV1.js | 11 +- api/server/controllers/assistants/chatV2.js | 10 +- api/server/controllers/assistants/errors.js | 10 +- api/server/middleware/abortMiddleware.js | 10 +- api/server/middleware/abortRun.js | 8 +- api/server/middleware/denyRequest.js | 5 +- .../streamResponse.js => middleware/error.js} | 34 +---- api/server/services/AssistantService.js | 8 +- api/server/services/Config/EndpointService.js | 3 +- .../services/Config/loadCustomConfig.js | 22 +-- .../services/Config/loadCustomConfig.spec.js | 20 ++- api/server/services/Runs/StreamRunManager.js | 10 +- api/server/utils/handleText.js | 6 +- api/server/utils/index.js | 4 - api/utils/index.js | 2 - api/utils/loadYaml.js | 13 -- librechat.example.yaml | 2 + package-lock.json | 1 + packages/api/package.json | 1 + packages/api/src/utils/events.ts | 10 ++ packages/api/src/utils/index.ts | 3 + .../math.js => packages/api/src/utils/math.ts | 12 +- .../api/src/utils/tempChatRetention.spec.ts | 133 ++++++++++++++++++ packages/api/src/utils/tempChatRetention.ts | 77 ++++++++++ packages/api/src/utils/yaml.ts | 11 ++ packages/data-provider/src/config.ts | 1 + packages/data-schemas/src/methods/session.ts | 18 +-- 35 files changed, 372 insertions(+), 135 deletions(-) rename api/server/{utils/streamResponse.js => middleware/error.js} (76%) delete mode 100644 api/utils/loadYaml.js rename api/server/utils/math.js => packages/api/src/utils/math.ts (68%) create mode 100644 packages/api/src/utils/tempChatRetention.spec.ts create mode 100644 packages/api/src/utils/tempChatRetention.ts create mode 100644 packages/api/src/utils/yaml.ts diff --git a/api/cache/banViolation.js b/api/cache/banViolation.js index 17b23f1c12..3a2d9791b4 100644 --- a/api/cache/banViolation.js +++ b/api/cache/banViolation.js @@ -1,7 +1,8 @@ const { logger } = require('@librechat/data-schemas'); +const { isEnabled, math } = require('@librechat/api'); const { ViolationTypes } = require('librechat-data-provider'); -const { isEnabled, math, removePorts } = require('~/server/utils'); const { deleteAllUserSessions } = require('~/models'); +const { removePorts } = require('~/server/utils'); const getLogStores = require('./getLogStores'); const { BAN_VIOLATIONS, BAN_INTERVAL } = process.env ?? {}; diff --git a/api/cache/getLogStores.js b/api/cache/getLogStores.js index 06cadf9f64..0eef7d3fb4 100644 --- a/api/cache/getLogStores.js +++ b/api/cache/getLogStores.js @@ -1,7 +1,7 @@ const { Keyv } = require('keyv'); +const { isEnabled, math } = require('@librechat/api'); const { CacheKeys, ViolationTypes, Time } = require('librechat-data-provider'); const { logFile, violationFile } = require('./keyvFiles'); -const { isEnabled, math } = require('~/server/utils'); const keyvRedis = require('./keyvRedis'); const keyvMongo = require('./keyvMongo'); diff --git a/api/models/Conversation.js b/api/models/Conversation.js index 38e2cbb448..698762d43d 100644 --- a/api/models/Conversation.js +++ b/api/models/Conversation.js @@ -1,4 +1,6 @@ const { logger } = require('@librechat/data-schemas'); +const { createTempChatExpirationDate } = require('@librechat/api'); +const getCustomConfig = require('~/server/services/Config/loadCustomConfig'); const { getMessages, deleteMessages } = require('./Message'); const { Conversation } = require('~/db/models'); @@ -98,10 +100,15 @@ module.exports = { update.conversationId = newConversationId; } - if (req.body.isTemporary) { - const expiredAt = new Date(); - expiredAt.setDate(expiredAt.getDate() + 30); - update.expiredAt = expiredAt; + if (req?.body?.isTemporary) { + try { + const customConfig = await getCustomConfig(); + update.expiredAt = createTempChatExpirationDate(customConfig); + } catch (err) { + logger.error('Error creating temporary chat expiration date:', err); + logger.info(`---\`saveConvo\` context: ${metadata?.context}`); + update.expiredAt = null; + } } else { update.expiredAt = null; } diff --git a/api/models/Message.js b/api/models/Message.js index abd538084e..c200c5f4d4 100644 --- a/api/models/Message.js +++ b/api/models/Message.js @@ -1,5 +1,7 @@ const { z } = require('zod'); const { logger } = require('@librechat/data-schemas'); +const { createTempChatExpirationDate } = require('@librechat/api'); +const getCustomConfig = require('~/server/services/Config/loadCustomConfig'); const { Message } = require('~/db/models'); const idSchema = z.string().uuid(); @@ -54,9 +56,14 @@ async function saveMessage(req, params, metadata) { }; if (req?.body?.isTemporary) { - const expiredAt = new Date(); - expiredAt.setDate(expiredAt.getDate() + 30); - update.expiredAt = expiredAt; + try { + const customConfig = await getCustomConfig(); + update.expiredAt = createTempChatExpirationDate(customConfig); + } catch (err) { + logger.error('Error creating temporary chat expiration date:', err); + logger.info(`---\`saveMessage\` context: ${metadata?.context}`); + update.expiredAt = null; + } } else { update.expiredAt = null; } diff --git a/api/server/controllers/AuthController.js b/api/server/controllers/AuthController.js index 0f8152de3e..3dbb1a2f31 100644 --- a/api/server/controllers/AuthController.js +++ b/api/server/controllers/AuthController.js @@ -1,17 +1,17 @@ const cookies = require('cookie'); const jwt = require('jsonwebtoken'); const openIdClient = require('openid-client'); +const { isEnabled } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); const { - registerUser, - resetPassword, - setAuthTokens, requestPasswordReset, setOpenIDAuthTokens, + resetPassword, + setAuthTokens, + registerUser, } = require('~/server/services/AuthService'); const { findUser, getUserById, deleteAllUserSessions, findSession } = require('~/models'); const { getOpenIdConfig } = require('~/strategies'); -const { isEnabled } = require('~/server/utils'); const registrationController = async (req, res) => { try { diff --git a/api/server/controllers/EditController.js b/api/server/controllers/EditController.js index 574111abf9..d24e87ce3a 100644 --- a/api/server/controllers/EditController.js +++ b/api/server/controllers/EditController.js @@ -1,3 +1,5 @@ +const { sendEvent } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { getResponseSender } = require('librechat-data-provider'); const { handleAbortError, @@ -10,9 +12,8 @@ const { clientRegistry, requestDataMap, } = require('~/server/cleanup'); -const { sendMessage, createOnProgress } = require('~/server/utils'); +const { createOnProgress } = require('~/server/utils'); const { saveMessage } = require('~/models'); -const { logger } = require('~/config'); const EditController = async (req, res, next, initializeClient) => { let { @@ -198,7 +199,7 @@ const EditController = async (req, res, next, initializeClient) => { const finalUserMessage = reqDataContext.userMessage; const finalResponseMessage = { ...response }; - sendMessage(res, { + sendEvent(res, { final: true, conversation, title: conversation.title, diff --git a/api/server/controllers/agents/errors.js b/api/server/controllers/agents/errors.js index fb4de45085..b3bb1cea65 100644 --- a/api/server/controllers/agents/errors.js +++ b/api/server/controllers/agents/errors.js @@ -1,10 +1,10 @@ // errorHandler.js -const { logger } = require('~/config'); -const getLogStores = require('~/cache/getLogStores'); +const { logger } = require('@librechat/data-schemas'); const { CacheKeys, ViolationTypes } = require('librechat-data-provider'); +const { sendResponse } = require('~/server/middleware/error'); const { recordUsage } = require('~/server/services/Threads'); const { getConvo } = require('~/models/Conversation'); -const { sendResponse } = require('~/server/utils'); +const getLogStores = require('~/cache/getLogStores'); /** * @typedef {Object} ErrorHandlerContext @@ -75,7 +75,7 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch } else if (/Files.*are invalid/.test(error.message)) { const errorMessage = `Files are invalid, or may not have uploaded yet.${ endpoint === 'azureAssistants' - ? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.' + ? " If using Azure OpenAI, files are only available in the region of the assistant's model at the time of upload." : '' }`; return sendResponse(req, res, messageData, errorMessage); diff --git a/api/server/controllers/agents/request.js b/api/server/controllers/agents/request.js index 24b7822c1f..5d55991e19 100644 --- a/api/server/controllers/agents/request.js +++ b/api/server/controllers/agents/request.js @@ -1,3 +1,5 @@ +const { sendEvent } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { Constants } = require('librechat-data-provider'); const { handleAbortError, @@ -5,9 +7,7 @@ const { cleanupAbortController, } = require('~/server/middleware'); const { disposeClient, clientRegistry, requestDataMap } = require('~/server/cleanup'); -const { sendMessage } = require('~/server/utils'); const { saveMessage } = require('~/models'); -const { logger } = require('~/config'); const AgentController = async (req, res, next, initializeClient, addTitle) => { let { @@ -206,7 +206,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => { // Create a new response object with minimal copies const finalResponse = { ...response }; - sendMessage(res, { + sendEvent(res, { final: true, conversation, title: conversation.title, diff --git a/api/server/controllers/assistants/chatV1.js b/api/server/controllers/assistants/chatV1.js index 9129a6a1c1..b4fe0d9013 100644 --- a/api/server/controllers/assistants/chatV1.js +++ b/api/server/controllers/assistants/chatV1.js @@ -1,4 +1,7 @@ const { v4 } = require('uuid'); +const { sleep } = require('@librechat/agents'); +const { sendEvent } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { Time, Constants, @@ -19,20 +22,20 @@ const { addThreadMetadata, saveAssistantMessage, } = require('~/server/services/Threads'); -const { sendResponse, sendMessage, sleep, countTokens } = require('~/server/utils'); const { runAssistant, createOnTextProgress } = require('~/server/services/AssistantService'); const validateAuthor = require('~/server/middleware/assistants/validateAuthor'); const { formatMessage, createVisionPrompt } = require('~/app/clients/prompts'); const { createRun, StreamRunManager } = require('~/server/services/Runs'); const { addTitle } = require('~/server/services/Endpoints/assistants'); const { createRunBody } = require('~/server/services/createRunBody'); +const { sendResponse } = require('~/server/middleware/error'); const { getTransactions } = require('~/models/Transaction'); const { checkBalance } = require('~/models/balanceMethods'); const { getConvo } = require('~/models/Conversation'); const getLogStores = require('~/cache/getLogStores'); +const { countTokens } = require('~/server/utils'); const { getModelMaxTokens } = require('~/utils'); const { getOpenAIClient } = require('./helpers'); -const { logger } = require('~/config'); /** * @route POST / @@ -471,7 +474,7 @@ const chatV1 = async (req, res) => { await Promise.all(promises); const sendInitialResponse = () => { - sendMessage(res, { + sendEvent(res, { sync: true, conversationId, // messages: previousMessages, @@ -587,7 +590,7 @@ const chatV1 = async (req, res) => { iconURL: endpointOption.iconURL, }; - sendMessage(res, { + sendEvent(res, { final: true, conversation, requestMessage: { diff --git a/api/server/controllers/assistants/chatV2.js b/api/server/controllers/assistants/chatV2.js index 309e5a86c4..e1ba93bc21 100644 --- a/api/server/controllers/assistants/chatV2.js +++ b/api/server/controllers/assistants/chatV2.js @@ -1,4 +1,7 @@ const { v4 } = require('uuid'); +const { sleep } = require('@librechat/agents'); +const { sendEvent } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { Time, Constants, @@ -22,15 +25,14 @@ const { createErrorHandler } = require('~/server/controllers/assistants/errors') const validateAuthor = require('~/server/middleware/assistants/validateAuthor'); const { createRun, StreamRunManager } = require('~/server/services/Runs'); const { addTitle } = require('~/server/services/Endpoints/assistants'); -const { sendMessage, sleep, countTokens } = require('~/server/utils'); const { createRunBody } = require('~/server/services/createRunBody'); const { getTransactions } = require('~/models/Transaction'); const { checkBalance } = require('~/models/balanceMethods'); const { getConvo } = require('~/models/Conversation'); const getLogStores = require('~/cache/getLogStores'); +const { countTokens } = require('~/server/utils'); const { getModelMaxTokens } = require('~/utils'); const { getOpenAIClient } = require('./helpers'); -const { logger } = require('~/config'); /** * @route POST / @@ -309,7 +311,7 @@ const chatV2 = async (req, res) => { await Promise.all(promises); const sendInitialResponse = () => { - sendMessage(res, { + sendEvent(res, { sync: true, conversationId, // messages: previousMessages, @@ -432,7 +434,7 @@ const chatV2 = async (req, res) => { iconURL: endpointOption.iconURL, }; - sendMessage(res, { + sendEvent(res, { final: true, conversation, requestMessage: { diff --git a/api/server/controllers/assistants/errors.js b/api/server/controllers/assistants/errors.js index a4b880bf04..182b230fba 100644 --- a/api/server/controllers/assistants/errors.js +++ b/api/server/controllers/assistants/errors.js @@ -1,10 +1,10 @@ // errorHandler.js -const { sendResponse } = require('~/server/utils'); -const { logger } = require('~/config'); -const getLogStores = require('~/cache/getLogStores'); +const { logger } = require('@librechat/data-schemas'); const { CacheKeys, ViolationTypes, ContentTypes } = require('librechat-data-provider'); -const { getConvo } = require('~/models/Conversation'); const { recordUsage, checkMessageGaps } = require('~/server/services/Threads'); +const { sendResponse } = require('~/server/middleware/error'); +const { getConvo } = require('~/models/Conversation'); +const getLogStores = require('~/cache/getLogStores'); /** * @typedef {Object} ErrorHandlerContext @@ -78,7 +78,7 @@ const createErrorHandler = ({ req, res, getContext, originPath = '/assistants/ch } else if (/Files.*are invalid/.test(error.message)) { const errorMessage = `Files are invalid, or may not have uploaded yet.${ endpoint === 'azureAssistants' - ? ' If using Azure OpenAI, files are only available in the region of the assistant\'s model at the time of upload.' + ? " If using Azure OpenAI, files are only available in the region of the assistant's model at the time of upload." : '' }`; return sendResponse(req, res, messageData, errorMessage); diff --git a/api/server/middleware/abortMiddleware.js b/api/server/middleware/abortMiddleware.js index 94d69004bd..c5fc48780c 100644 --- a/api/server/middleware/abortMiddleware.js +++ b/api/server/middleware/abortMiddleware.js @@ -1,13 +1,13 @@ -// abortMiddleware.js +const { logger } = require('@librechat/data-schemas'); +const { countTokens, isEnabled, sendEvent } = require('@librechat/api'); const { isAssistantsEndpoint, ErrorTypes } = require('librechat-data-provider'); -const { sendMessage, sendError, countTokens, isEnabled } = require('~/server/utils'); const { truncateText, smartTruncateText } = require('~/app/clients/prompts'); const clearPendingReq = require('~/cache/clearPendingReq'); +const { sendError } = require('~/server/middleware/error'); const { spendTokens } = require('~/models/spendTokens'); const abortControllers = require('./abortControllers'); const { saveMessage, getConvo } = require('~/models'); const { abortRun } = require('./abortRun'); -const { logger } = require('~/config'); const abortDataMap = new WeakMap(); @@ -101,7 +101,7 @@ async function abortMessage(req, res) { cleanupAbortController(abortKey); if (res.headersSent && finalEvent) { - return sendMessage(res, finalEvent); + return sendEvent(res, finalEvent); } res.setHeader('Content-Type', 'application/json'); @@ -174,7 +174,7 @@ const createAbortController = (req, res, getAbortData, getReqData) => { * @param {string} responseMessageId */ const onStart = (userMessage, responseMessageId) => { - sendMessage(res, { message: userMessage, created: true }); + sendEvent(res, { message: userMessage, created: true }); const abortKey = userMessage?.conversationId ?? req.user.id; getReqData({ abortKey }); diff --git a/api/server/middleware/abortRun.js b/api/server/middleware/abortRun.js index 01b34aacc2..2846c6eefc 100644 --- a/api/server/middleware/abortRun.js +++ b/api/server/middleware/abortRun.js @@ -1,11 +1,11 @@ +const { sendEvent } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { CacheKeys, RunStatus, isUUID } = require('librechat-data-provider'); const { initializeClient } = require('~/server/services/Endpoints/assistants'); const { checkMessageGaps, recordUsage } = require('~/server/services/Threads'); const { deleteMessages } = require('~/models/Message'); const { getConvo } = require('~/models/Conversation'); const getLogStores = require('~/cache/getLogStores'); -const { sendMessage } = require('~/server/utils'); -const { logger } = require('~/config'); const three_minutes = 1000 * 60 * 3; @@ -34,7 +34,7 @@ async function abortRun(req, res) { const [thread_id, run_id] = runValues.split(':'); if (!run_id) { - logger.warn('[abortRun] Couldn\'t find run for cancel request', { thread_id }); + logger.warn("[abortRun] Couldn't find run for cancel request", { thread_id }); return res.status(204).send({ message: 'Run not found' }); } else if (run_id === 'cancelled') { logger.warn('[abortRun] Run already cancelled', { thread_id }); @@ -93,7 +93,7 @@ async function abortRun(req, res) { }; if (res.headersSent && finalEvent) { - return sendMessage(res, finalEvent); + return sendEvent(res, finalEvent); } res.json(finalEvent); diff --git a/api/server/middleware/denyRequest.js b/api/server/middleware/denyRequest.js index 62efb1aeaf..20360519cf 100644 --- a/api/server/middleware/denyRequest.js +++ b/api/server/middleware/denyRequest.js @@ -1,6 +1,7 @@ const crypto = require('crypto'); +const { sendEvent } = require('@librechat/api'); const { getResponseSender, Constants } = require('librechat-data-provider'); -const { sendMessage, sendError } = require('~/server/utils'); +const { sendError } = require('~/server/middleware/error'); const { saveMessage } = require('~/models'); /** @@ -36,7 +37,7 @@ const denyRequest = async (req, res, errorMessage) => { isCreatedByUser: true, text, }; - sendMessage(res, { message: userMessage, created: true }); + sendEvent(res, { message: userMessage, created: true }); const shouldSaveMessage = _convoId && parentMessageId && parentMessageId !== Constants.NO_PARENT; diff --git a/api/server/utils/streamResponse.js b/api/server/middleware/error.js similarity index 76% rename from api/server/utils/streamResponse.js rename to api/server/middleware/error.js index bb8d63b229..db445c1d43 100644 --- a/api/server/utils/streamResponse.js +++ b/api/server/middleware/error.js @@ -1,31 +1,9 @@ const crypto = require('crypto'); +const { logger } = require('@librechat/data-schemas'); const { parseConvo } = require('librechat-data-provider'); +const { sendEvent, handleError } = require('@librechat/api'); const { saveMessage, getMessages } = require('~/models/Message'); const { getConvo } = require('~/models/Conversation'); -const { logger } = require('~/config'); - -/** - * Sends error data in Server Sent Events format and ends the response. - * @param {object} res - The server response. - * @param {string} message - The error message. - */ -const handleError = (res, message) => { - res.write(`event: error\ndata: ${JSON.stringify(message)}\n\n`); - res.end(); -}; - -/** - * Sends message data in Server Sent Events format. - * @param {Express.Response} res - - The server response. - * @param {string | Object} message - The message to be sent. - * @param {'message' | 'error' | 'cancel'} event - [Optional] The type of event. Default is 'message'. - */ -const sendMessage = (res, message, event = 'message') => { - if (typeof message === 'string' && message.length === 0) { - return; - } - res.write(`event: ${event}\ndata: ${JSON.stringify(message)}\n\n`); -}; /** * Processes an error with provided options, saves the error message and sends a corresponding SSE response @@ -91,7 +69,7 @@ const sendError = async (req, res, options, callback) => { convo = parseConvo(errorMessage); } - return sendMessage(res, { + return sendEvent(res, { final: true, requestMessage: query?.[0] ? query[0] : requestMessage, responseMessage: errorMessage, @@ -120,12 +98,10 @@ const sendResponse = (req, res, data, errorMessage) => { if (errorMessage) { return sendError(req, res, { ...data, text: errorMessage }); } - return sendMessage(res, data); + return sendEvent(res, data); }; module.exports = { - sendResponse, - handleError, - sendMessage, sendError, + sendResponse, }; diff --git a/api/server/services/AssistantService.js b/api/server/services/AssistantService.js index 2db0a56b6b..5354b2e33a 100644 --- a/api/server/services/AssistantService.js +++ b/api/server/services/AssistantService.js @@ -1,4 +1,7 @@ const { klona } = require('klona'); +const { sleep } = require('@librechat/agents'); +const { sendEvent } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { StepTypes, RunStatus, @@ -11,11 +14,10 @@ const { } = require('librechat-data-provider'); const { retrieveAndProcessFile } = require('~/server/services/Files/process'); const { processRequiredActions } = require('~/server/services/ToolService'); -const { createOnProgress, sendMessage, sleep } = require('~/server/utils'); const { RunManager, waitForRun } = require('~/server/services/Runs'); const { processMessages } = require('~/server/services/Threads'); +const { createOnProgress } = require('~/server/utils'); const { TextStream } = require('~/app/clients'); -const { logger } = require('~/config'); /** * Sorts, processes, and flattens messages to a single string. @@ -64,7 +66,7 @@ async function createOnTextProgress({ }; logger.debug('Content data:', contentData); - sendMessage(openai.res, contentData); + sendEvent(openai.res, contentData); }; } diff --git a/api/server/services/Config/EndpointService.js b/api/server/services/Config/EndpointService.js index 1f38b70a62..d8277dd67f 100644 --- a/api/server/services/Config/EndpointService.js +++ b/api/server/services/Config/EndpointService.js @@ -1,5 +1,6 @@ +const { isUserProvided } = require('@librechat/api'); const { EModelEndpoint } = require('librechat-data-provider'); -const { isUserProvided, generateConfig } = require('~/server/utils'); +const { generateConfig } = require('~/server/utils/handleText'); const { OPENAI_API_KEY: openAIApiKey, diff --git a/api/server/services/Config/loadCustomConfig.js b/api/server/services/Config/loadCustomConfig.js index 18f3a44748..393281daf2 100644 --- a/api/server/services/Config/loadCustomConfig.js +++ b/api/server/services/Config/loadCustomConfig.js @@ -1,18 +1,18 @@ const path = require('path'); -const { - CacheKeys, - configSchema, - EImageOutputType, - validateSettingDefinitions, - agentParamSettings, - paramSettings, -} = require('librechat-data-provider'); -const getLogStores = require('~/cache/getLogStores'); -const loadYaml = require('~/utils/loadYaml'); -const { logger } = require('~/config'); const axios = require('axios'); const yaml = require('js-yaml'); const keyBy = require('lodash/keyBy'); +const { loadYaml } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); +const { + CacheKeys, + configSchema, + paramSettings, + EImageOutputType, + agentParamSettings, + validateSettingDefinitions, +} = require('librechat-data-provider'); +const getLogStores = require('~/cache/getLogStores'); const projectRoot = path.resolve(__dirname, '..', '..', '..', '..'); const defaultConfigPath = path.resolve(projectRoot, 'librechat.yaml'); diff --git a/api/server/services/Config/loadCustomConfig.spec.js b/api/server/services/Config/loadCustomConfig.spec.js index ed698e57f1..9b905181c5 100644 --- a/api/server/services/Config/loadCustomConfig.spec.js +++ b/api/server/services/Config/loadCustomConfig.spec.js @@ -1,6 +1,9 @@ jest.mock('axios'); jest.mock('~/cache/getLogStores'); -jest.mock('~/utils/loadYaml'); +jest.mock('@librechat/api', () => ({ + ...jest.requireActual('@librechat/api'), + loadYaml: jest.fn(), +})); jest.mock('librechat-data-provider', () => { const actual = jest.requireActual('librechat-data-provider'); return { @@ -30,11 +33,22 @@ jest.mock('librechat-data-provider', () => { }; }); +jest.mock('@librechat/data-schemas', () => { + return { + logger: { + info: jest.fn(), + warn: jest.fn(), + debug: jest.fn(), + error: jest.fn(), + }, + }; +}); + const axios = require('axios'); +const { loadYaml } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const loadCustomConfig = require('./loadCustomConfig'); const getLogStores = require('~/cache/getLogStores'); -const loadYaml = require('~/utils/loadYaml'); -const { logger } = require('~/config'); describe('loadCustomConfig', () => { const mockSet = jest.fn(); diff --git a/api/server/services/Runs/StreamRunManager.js b/api/server/services/Runs/StreamRunManager.js index 4bab7326bb..4f6994e0cb 100644 --- a/api/server/services/Runs/StreamRunManager.js +++ b/api/server/services/Runs/StreamRunManager.js @@ -1,3 +1,6 @@ +const { sleep } = require('@librechat/agents'); +const { sendEvent } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { Constants, StepTypes, @@ -8,9 +11,8 @@ const { } = require('librechat-data-provider'); const { retrieveAndProcessFile } = require('~/server/services/Files/process'); const { processRequiredActions } = require('~/server/services/ToolService'); -const { createOnProgress, sendMessage, sleep } = require('~/server/utils'); const { processMessages } = require('~/server/services/Threads'); -const { logger } = require('~/config'); +const { createOnProgress } = require('~/server/utils'); /** * Implements the StreamRunManager functionality for managing the streaming @@ -126,7 +128,7 @@ class StreamRunManager { conversationId: this.finalMessage.conversationId, }; - sendMessage(this.res, contentData); + sendEvent(this.res, contentData); } /* <------------------ Misc. Helpers ------------------> */ @@ -302,7 +304,7 @@ class StreamRunManager { for (const d of delta[key]) { if (typeof d === 'object' && !Object.prototype.hasOwnProperty.call(d, 'index')) { - logger.warn('Expected an object with an \'index\' for array updates but got:', d); + logger.warn("Expected an object with an 'index' for array updates but got:", d); continue; } diff --git a/api/server/utils/handleText.js b/api/server/utils/handleText.js index 680da5da44..36671c44ff 100644 --- a/api/server/utils/handleText.js +++ b/api/server/utils/handleText.js @@ -7,9 +7,9 @@ const { defaultAssistantsVersion, defaultAgentCapabilities, } = require('librechat-data-provider'); +const { sendEvent } = require('@librechat/api'); const { Providers } = require('@librechat/agents'); const partialRight = require('lodash/partialRight'); -const { sendMessage } = require('./streamResponse'); /** Helper function to escape special characters in regex * @param {string} string - The string to escape. @@ -37,7 +37,7 @@ const createOnProgress = ( basePayload.text = basePayload.text + chunk; const payload = Object.assign({}, basePayload, rest); - sendMessage(res, payload); + sendEvent(res, payload); if (_onProgress) { _onProgress(payload); } @@ -50,7 +50,7 @@ const createOnProgress = ( const sendIntermediateMessage = (res, payload, extraTokens = '') => { basePayload.text = basePayload.text + extraTokens; const message = Object.assign({}, basePayload, payload); - sendMessage(res, message); + sendEvent(res, message); if (i === 0) { basePayload.initial = false; } diff --git a/api/server/utils/index.js b/api/server/utils/index.js index 2661ff75e1..2672f4f2ea 100644 --- a/api/server/utils/index.js +++ b/api/server/utils/index.js @@ -1,11 +1,9 @@ -const streamResponse = require('./streamResponse'); const removePorts = require('./removePorts'); const countTokens = require('./countTokens'); const handleText = require('./handleText'); const sendEmail = require('./sendEmail'); const queue = require('./queue'); const files = require('./files'); -const math = require('./math'); /** * Check if email configuration is set @@ -28,7 +26,6 @@ function checkEmailConfig() { } module.exports = { - ...streamResponse, checkEmailConfig, ...handleText, countTokens, @@ -36,5 +33,4 @@ module.exports = { sendEmail, ...files, ...queue, - math, }; diff --git a/api/utils/index.js b/api/utils/index.js index 50b8c46d99..b80c9b0c31 100644 --- a/api/utils/index.js +++ b/api/utils/index.js @@ -1,11 +1,9 @@ -const loadYaml = require('./loadYaml'); const tokenHelpers = require('./tokens'); const deriveBaseURL = require('./deriveBaseURL'); const extractBaseURL = require('./extractBaseURL'); const findMessageContent = require('./findMessageContent'); module.exports = { - loadYaml, deriveBaseURL, extractBaseURL, ...tokenHelpers, diff --git a/api/utils/loadYaml.js b/api/utils/loadYaml.js deleted file mode 100644 index 50e5d23ec3..0000000000 --- a/api/utils/loadYaml.js +++ /dev/null @@ -1,13 +0,0 @@ -const fs = require('fs'); -const yaml = require('js-yaml'); - -function loadYaml(filepath) { - try { - let fileContents = fs.readFileSync(filepath, 'utf8'); - return yaml.load(fileContents); - } catch (e) { - return e; - } -} - -module.exports = loadYaml; diff --git a/librechat.example.yaml b/librechat.example.yaml index de28dcc32e..0fb7975cbb 100644 --- a/librechat.example.yaml +++ b/librechat.example.yaml @@ -73,6 +73,8 @@ interface: bookmarks: true multiConvo: true agents: true + # Temporary chat retention period in hours (default: 720, min: 1, max: 8760) + # temporaryChatRetention: 1 # Example Cloudflare turnstile (optional) #turnstile: diff --git a/package-lock.json b/package-lock.json index 989883c0ec..e3a47a4fd4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -46573,6 +46573,7 @@ "diff": "^7.0.0", "eventsource": "^3.0.2", "express": "^4.21.2", + "js-yaml": "^4.1.0", "keyv": "^5.3.2", "librechat-data-provider": "*", "node-fetch": "2.7.0", diff --git a/packages/api/package.json b/packages/api/package.json index 4aaf0f793e..ed2b70965d 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -76,6 +76,7 @@ "diff": "^7.0.0", "eventsource": "^3.0.2", "express": "^4.21.2", + "js-yaml": "^4.1.0", "keyv": "^5.3.2", "librechat-data-provider": "*", "node-fetch": "2.7.0", diff --git a/packages/api/src/utils/events.ts b/packages/api/src/utils/events.ts index 76d1361d9b..20c9583993 100644 --- a/packages/api/src/utils/events.ts +++ b/packages/api/src/utils/events.ts @@ -14,3 +14,13 @@ export function sendEvent(res: ServerResponse, event: ServerSentEvent): void { } res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`); } + +/** + * Sends error data in Server Sent Events format and ends the response. + * @param res - The server response. + * @param message - The error message. + */ +export function handleError(res: ServerResponse, message: string): void { + res.write(`event: error\ndata: ${JSON.stringify(message)}\n\n`); + res.end(); +} diff --git a/packages/api/src/utils/index.ts b/packages/api/src/utils/index.ts index 807686ca44..dea9472b0e 100644 --- a/packages/api/src/utils/index.ts +++ b/packages/api/src/utils/index.ts @@ -6,5 +6,8 @@ export * from './events'; export * from './files'; export * from './generators'; export * from './llm'; +export * from './math'; export * from './openid'; +export * from './tempChatRetention'; export { default as Tokenizer } from './tokenizer'; +export * from './yaml'; diff --git a/api/server/utils/math.js b/packages/api/src/utils/math.ts similarity index 68% rename from api/server/utils/math.js rename to packages/api/src/utils/math.ts index 3cd0929890..08ae04f7eb 100644 --- a/api/server/utils/math.js +++ b/packages/api/src/utils/math.ts @@ -5,14 +5,14 @@ * If the input is not a string or contains invalid characters, an error is thrown. * If the evaluated result is not a number, an error is thrown. * - * @param {string|number} str - The mathematical expression to evaluate, or a number. - * @param {number} [fallbackValue] - The default value to return if the input is not a string or number, or if the evaluated result is not a number. + * @param str - The mathematical expression to evaluate, or a number. + * @param fallbackValue - The default value to return if the input is not a string or number, or if the evaluated result is not a number. * - * @returns {number} The result of the evaluated expression or the input number. + * @returns The result of the evaluated expression or the input number. * - * @throws {Error} Throws an error if the input is not a string or number, contains invalid characters, or does not evaluate to a number. + * @throws Throws an error if the input is not a string or number, contains invalid characters, or does not evaluate to a number. */ -function math(str, fallbackValue) { +export function math(str: string | number, fallbackValue?: number): number { const fallback = typeof fallbackValue !== 'undefined' && typeof fallbackValue === 'number'; if (typeof str !== 'string' && typeof str === 'number') { return str; @@ -43,5 +43,3 @@ function math(str, fallbackValue) { return value; } - -module.exports = math; diff --git a/packages/api/src/utils/tempChatRetention.spec.ts b/packages/api/src/utils/tempChatRetention.spec.ts new file mode 100644 index 0000000000..b0166e9522 --- /dev/null +++ b/packages/api/src/utils/tempChatRetention.spec.ts @@ -0,0 +1,133 @@ +import { + MIN_RETENTION_HOURS, + MAX_RETENTION_HOURS, + DEFAULT_RETENTION_HOURS, + getTempChatRetentionHours, + createTempChatExpirationDate, +} from './tempChatRetention'; +import type { TCustomConfig } from 'librechat-data-provider'; + +describe('tempChatRetention', () => { + const originalEnv = process.env; + + beforeEach(() => { + jest.resetModules(); + process.env = { ...originalEnv }; + delete process.env.TEMP_CHAT_RETENTION_HOURS; + }); + + afterAll(() => { + process.env = originalEnv; + }); + + describe('getTempChatRetentionHours', () => { + it('should return default retention hours when no config or env var is set', () => { + const result = getTempChatRetentionHours(); + expect(result).toBe(DEFAULT_RETENTION_HOURS); + }); + + it('should use environment variable when set', () => { + process.env.TEMP_CHAT_RETENTION_HOURS = '48'; + const result = getTempChatRetentionHours(); + expect(result).toBe(48); + }); + + it('should use config value when set', () => { + const config: Partial = { + interface: { + temporaryChatRetention: 12, + }, + }; + const result = getTempChatRetentionHours(config); + expect(result).toBe(12); + }); + + it('should prioritize config over environment variable', () => { + process.env.TEMP_CHAT_RETENTION_HOURS = '48'; + const config: Partial = { + interface: { + temporaryChatRetention: 12, + }, + }; + const result = getTempChatRetentionHours(config); + expect(result).toBe(12); + }); + + it('should enforce minimum retention period', () => { + const config: Partial = { + interface: { + temporaryChatRetention: 0, + }, + }; + const result = getTempChatRetentionHours(config); + expect(result).toBe(MIN_RETENTION_HOURS); + }); + + it('should enforce maximum retention period', () => { + const config: Partial = { + interface: { + temporaryChatRetention: 10000, + }, + }; + const result = getTempChatRetentionHours(config); + expect(result).toBe(MAX_RETENTION_HOURS); + }); + + it('should handle invalid environment variable', () => { + process.env.TEMP_CHAT_RETENTION_HOURS = 'invalid'; + const result = getTempChatRetentionHours(); + expect(result).toBe(DEFAULT_RETENTION_HOURS); + }); + + it('should handle invalid config value', () => { + const config: Partial = { + interface: { + temporaryChatRetention: 'invalid' as unknown as number, + }, + }; + const result = getTempChatRetentionHours(config); + expect(result).toBe(DEFAULT_RETENTION_HOURS); + }); + }); + + describe('createTempChatExpirationDate', () => { + it('should create expiration date with default retention period', () => { + const result = createTempChatExpirationDate(); + + const expectedDate = new Date(); + expectedDate.setHours(expectedDate.getHours() + DEFAULT_RETENTION_HOURS); + + // Allow for small time differences in test execution + const timeDiff = Math.abs(result.getTime() - expectedDate.getTime()); + expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference + }); + + it('should create expiration date with custom retention period', () => { + const config: Partial = { + interface: { + temporaryChatRetention: 12, + }, + }; + + const result = createTempChatExpirationDate(config); + + const expectedDate = new Date(); + expectedDate.setHours(expectedDate.getHours() + 12); + + // Allow for small time differences in test execution + const timeDiff = Math.abs(result.getTime() - expectedDate.getTime()); + expect(timeDiff).toBeLessThan(1000); // Less than 1 second difference + }); + + it('should return a Date object', () => { + const result = createTempChatExpirationDate(); + expect(result).toBeInstanceOf(Date); + }); + + it('should return a future date', () => { + const now = new Date(); + const result = createTempChatExpirationDate(); + expect(result.getTime()).toBeGreaterThan(now.getTime()); + }); + }); +}); diff --git a/packages/api/src/utils/tempChatRetention.ts b/packages/api/src/utils/tempChatRetention.ts new file mode 100644 index 0000000000..6683b4c6ac --- /dev/null +++ b/packages/api/src/utils/tempChatRetention.ts @@ -0,0 +1,77 @@ +import { logger } from '@librechat/data-schemas'; +import type { TCustomConfig } from 'librechat-data-provider'; + +/** + * Default retention period for temporary chats in hours + */ +export const DEFAULT_RETENTION_HOURS = 24 * 30; // 30 days + +/** + * Minimum allowed retention period in hours + */ +export const MIN_RETENTION_HOURS = 1; + +/** + * Maximum allowed retention period in hours (1 year = 8760 hours) + */ +export const MAX_RETENTION_HOURS = 8760; + +/** + * Gets the temporary chat retention period from environment variables or config + * @param config - The custom configuration object + * @returns The retention period in hours + */ +export function getTempChatRetentionHours(config?: Partial | null): number { + let retentionHours = DEFAULT_RETENTION_HOURS; + + // Check environment variable first + if (process.env.TEMP_CHAT_RETENTION_HOURS) { + const envValue = parseInt(process.env.TEMP_CHAT_RETENTION_HOURS, 10); + if (!isNaN(envValue)) { + retentionHours = envValue; + } else { + logger.warn( + `Invalid TEMP_CHAT_RETENTION_HOURS environment variable: ${process.env.TEMP_CHAT_RETENTION_HOURS}. Using default: ${DEFAULT_RETENTION_HOURS} hours.`, + ); + } + } + + // Check config file (takes precedence over environment variable) + if (config?.interface?.temporaryChatRetention !== undefined) { + const configValue = config.interface.temporaryChatRetention; + if (typeof configValue === 'number' && !isNaN(configValue)) { + retentionHours = configValue; + } else { + logger.warn( + `Invalid temporaryChatRetention in config: ${configValue}. Using ${retentionHours} hours.`, + ); + } + } + + // Validate the retention period + if (retentionHours < MIN_RETENTION_HOURS) { + logger.warn( + `Temporary chat retention period ${retentionHours} is below minimum ${MIN_RETENTION_HOURS} hours. Using minimum value.`, + ); + retentionHours = MIN_RETENTION_HOURS; + } else if (retentionHours > MAX_RETENTION_HOURS) { + logger.warn( + `Temporary chat retention period ${retentionHours} exceeds maximum ${MAX_RETENTION_HOURS} hours. Using maximum value.`, + ); + retentionHours = MAX_RETENTION_HOURS; + } + + return retentionHours; +} + +/** + * Creates an expiration date for temporary chats + * @param config - The custom configuration object + * @returns The expiration date + */ +export function createTempChatExpirationDate(config?: Partial): Date { + const retentionHours = getTempChatRetentionHours(config); + const expiredAt = new Date(); + expiredAt.setHours(expiredAt.getHours() + retentionHours); + return expiredAt; +} diff --git a/packages/api/src/utils/yaml.ts b/packages/api/src/utils/yaml.ts new file mode 100644 index 0000000000..50ea0bd4de --- /dev/null +++ b/packages/api/src/utils/yaml.ts @@ -0,0 +1,11 @@ +import fs from 'fs'; +import yaml from 'js-yaml'; + +export function loadYaml(filepath: string) { + try { + const fileContents = fs.readFileSync(filepath, 'utf8'); + return yaml.load(fileContents); + } catch (e) { + return e; + } +} diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index 4c896d0230..004ed572ca 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -510,6 +510,7 @@ export const intefaceSchema = z prompts: z.boolean().optional(), agents: z.boolean().optional(), temporaryChat: z.boolean().optional(), + temporaryChatRetention: z.number().min(1).max(8760).optional(), runCode: z.boolean().optional(), webSearch: z.boolean().optional(), }) diff --git a/packages/data-schemas/src/methods/session.ts b/packages/data-schemas/src/methods/session.ts index c5af51e932..30700bc267 100644 --- a/packages/data-schemas/src/methods/session.ts +++ b/packages/data-schemas/src/methods/session.ts @@ -13,14 +13,10 @@ export class SessionError extends Error { } const { REFRESH_TOKEN_EXPIRY } = process.env ?? {}; -const expires = REFRESH_TOKEN_EXPIRY - ? eval(REFRESH_TOKEN_EXPIRY) - : 1000 * 60 * 60 * 24 * 7; // 7 days default +const expires = REFRESH_TOKEN_EXPIRY ? eval(REFRESH_TOKEN_EXPIRY) : 1000 * 60 * 60 * 24 * 7; // 7 days default // Factory function that takes mongoose instance and returns the methods export function createSessionMethods(mongoose: typeof import('mongoose')) { - const Session = mongoose.models.Session; - /** * Creates a new session for a user */ @@ -33,13 +29,14 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) { } try { - const session = new Session({ + const Session = mongoose.models.Session; + const currentSession = new Session({ user: userId, expiration: options.expiration || new Date(Date.now() + expires), }); - const refreshToken = await generateRefreshToken(session); + const refreshToken = await generateRefreshToken(currentSession); - return { session, refreshToken }; + return { session: currentSession, refreshToken }; } catch (error) { logger.error('[createSession] Error creating session:', error); throw new SessionError('Failed to create session', 'CREATE_SESSION_FAILED'); @@ -54,6 +51,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) { options: t.SessionQueryOptions = { lean: true }, ): Promise { try { + const Session = mongoose.models.Session; const query: Record = {}; if (!params.refreshToken && !params.userId && !params.sessionId) { @@ -109,6 +107,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) { newExpiration?: Date, ): Promise { try { + const Session = mongoose.models.Session; const sessionDoc = typeof session === 'string' ? await Session.findById(session) : session; if (!sessionDoc) { @@ -128,6 +127,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) { */ async function deleteSession(params: t.DeleteSessionParams): Promise<{ deletedCount?: number }> { try { + const Session = mongoose.models.Session; if (!params.refreshToken && !params.sessionId) { throw new SessionError( 'Either refreshToken or sessionId is required', @@ -166,6 +166,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) { options: t.DeleteAllSessionsOptions = {}, ): Promise<{ deletedCount?: number }> { try { + const Session = mongoose.models.Session; if (!userId) { throw new SessionError('User ID is required', 'INVALID_USER_ID'); } @@ -237,6 +238,7 @@ export function createSessionMethods(mongoose: typeof import('mongoose')) { */ async function countActiveSessions(userId: string): Promise { try { + const Session = mongoose.models.Session; if (!userId) { throw new SessionError('User ID is required', 'INVALID_USER_ID'); } From 799f0e5810208ee29f9b8b4972f7234121f4d7da Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Wed, 25 Jun 2025 20:58:34 -0400 Subject: [PATCH 14/65] =?UTF-8?q?=F0=9F=90=9B=20fix:=20Move=20MemoryEntry?= =?UTF-8?q?=20and=20PluginAuth=20model=20retrieval=20inside=20methods=20fo?= =?UTF-8?q?r=20Runtime=20Usage?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/data-schemas/src/methods/memory.ts | 6 ++++-- packages/data-schemas/src/methods/pluginAuth.ts | 9 ++++++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/packages/data-schemas/src/methods/memory.ts b/packages/data-schemas/src/methods/memory.ts index 021354eab2..becb063f3d 100644 --- a/packages/data-schemas/src/methods/memory.ts +++ b/packages/data-schemas/src/methods/memory.ts @@ -11,8 +11,6 @@ const formatDate = (date: Date): string => { // Factory function that takes mongoose instance and returns the methods export function createMemoryMethods(mongoose: typeof import('mongoose')) { - const MemoryEntry = mongoose.models.MemoryEntry; - /** * Creates a new memory entry for a user * Throws an error if a memory with the same key already exists @@ -28,6 +26,7 @@ export function createMemoryMethods(mongoose: typeof import('mongoose')) { return { ok: false }; } + const MemoryEntry = mongoose.models.MemoryEntry; const existingMemory = await MemoryEntry.findOne({ userId, key }); if (existingMemory) { throw new Error('Memory with this key already exists'); @@ -63,6 +62,7 @@ export function createMemoryMethods(mongoose: typeof import('mongoose')) { return { ok: false }; } + const MemoryEntry = mongoose.models.MemoryEntry; await MemoryEntry.findOneAndUpdate( { userId, key }, { @@ -89,6 +89,7 @@ export function createMemoryMethods(mongoose: typeof import('mongoose')) { */ async function deleteMemory({ userId, key }: t.DeleteMemoryParams): Promise { try { + const MemoryEntry = mongoose.models.MemoryEntry; const result = await MemoryEntry.findOneAndDelete({ userId, key }); return { ok: !!result }; } catch (error) { @@ -105,6 +106,7 @@ export function createMemoryMethods(mongoose: typeof import('mongoose')) { userId: string | Types.ObjectId, ): Promise { try { + const MemoryEntry = mongoose.models.MemoryEntry; return (await MemoryEntry.find({ userId }).lean()) as t.IMemoryEntryLean[]; } catch (error) { throw new Error( diff --git a/packages/data-schemas/src/methods/pluginAuth.ts b/packages/data-schemas/src/methods/pluginAuth.ts index f0256f859f..5355fec50c 100644 --- a/packages/data-schemas/src/methods/pluginAuth.ts +++ b/packages/data-schemas/src/methods/pluginAuth.ts @@ -1,16 +1,14 @@ import type { DeleteResult, Model } from 'mongoose'; -import type { IPluginAuth } from '~/schema/pluginAuth'; import type { FindPluginAuthsByKeysParams, UpdatePluginAuthParams, DeletePluginAuthParams, FindPluginAuthParams, + IPluginAuth, } from '~/types'; // Factory function that takes mongoose instance and returns the methods export function createPluginAuthMethods(mongoose: typeof import('mongoose')) { - const PluginAuth: Model = mongoose.models.PluginAuth; - /** * Finds a single plugin auth entry by userId and authField */ @@ -19,6 +17,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) { authField, }: FindPluginAuthParams): Promise { try { + const PluginAuth: Model = mongoose.models.PluginAuth; return await PluginAuth.findOne({ userId, authField }).lean(); } catch (error) { throw new Error( @@ -39,6 +38,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) { return []; } + const PluginAuth: Model = mongoose.models.PluginAuth; return await PluginAuth.find({ userId, pluginKey: { $in: pluginKeys }, @@ -60,6 +60,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) { value, }: UpdatePluginAuthParams): Promise { try { + const PluginAuth: Model = mongoose.models.PluginAuth; const existingAuth = await PluginAuth.findOne({ userId, pluginKey, authField }).lean(); if (existingAuth) { @@ -95,6 +96,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) { all = false, }: DeletePluginAuthParams): Promise { try { + const PluginAuth: Model = mongoose.models.PluginAuth; if (all) { const filter: DeletePluginAuthParams = { userId }; if (pluginKey) { @@ -120,6 +122,7 @@ export function createPluginAuthMethods(mongoose: typeof import('mongoose')) { */ async function deleteAllUserPluginAuths(userId: string): Promise { try { + const PluginAuth: Model = mongoose.models.PluginAuth; return await PluginAuth.deleteMany({ userId }); } catch (error) { throw new Error( From 9cdc62b655870b41e29f6bf643c7e46601113a0c Mon Sep 17 00:00:00 2001 From: Sebastien Bruel <93573440+sbruel@users.noreply.github.com> Date: Fri, 27 Jun 2025 07:51:35 +0900 Subject: [PATCH 15/65] =?UTF-8?q?=F0=9F=93=82=20fix:=20Prevent=20Null=20Re?= =?UTF-8?q?ference=20Errors=20in=20File=20Process=20(#8084)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/server/services/Files/process.js | 8 +- .../services/Files/processFiles.test.js | 208 ++++++++++++++++++ 2 files changed, 214 insertions(+), 2 deletions(-) create mode 100644 api/server/services/Files/processFiles.test.js diff --git a/api/server/services/Files/process.js b/api/server/services/Files/process.js index 8910163047..38ccdafdd7 100644 --- a/api/server/services/Files/process.js +++ b/api/server/services/Files/process.js @@ -55,7 +55,9 @@ const processFiles = async (files, fileIds) => { } if (!fileIds) { - return await Promise.all(promises); + const results = await Promise.all(promises); + // Filter out null results from failed updateFileUsage calls + return results.filter((result) => result != null); } for (let file_id of fileIds) { @@ -67,7 +69,9 @@ const processFiles = async (files, fileIds) => { } // TODO: calculate token cost when image is first uploaded - return await Promise.all(promises); + const results = await Promise.all(promises); + // Filter out null results from failed updateFileUsage calls + return results.filter((result) => result != null); }; /** diff --git a/api/server/services/Files/processFiles.test.js b/api/server/services/Files/processFiles.test.js new file mode 100644 index 0000000000..8665d33665 --- /dev/null +++ b/api/server/services/Files/processFiles.test.js @@ -0,0 +1,208 @@ +// Mock the updateFileUsage function before importing the actual processFiles +jest.mock('~/models/File', () => ({ + updateFileUsage: jest.fn(), +})); + +// Mock winston and logger configuration to avoid dependency issues +jest.mock('~/config', () => ({ + logger: { + info: jest.fn(), + warn: jest.fn(), + debug: jest.fn(), + error: jest.fn(), + }, +})); + +// Mock all other dependencies that might cause issues +jest.mock('librechat-data-provider', () => ({ + isUUID: { parse: jest.fn() }, + megabyte: 1024 * 1024, + FileContext: { message_attachment: 'message_attachment' }, + FileSources: { local: 'local' }, + EModelEndpoint: { assistants: 'assistants' }, + EToolResources: { file_search: 'file_search' }, + mergeFileConfig: jest.fn(), + removeNullishValues: jest.fn((obj) => obj), + isAssistantsEndpoint: jest.fn(), +})); + +jest.mock('~/server/services/Files/images', () => ({ + convertImage: jest.fn(), + resizeAndConvert: jest.fn(), + resizeImageBuffer: jest.fn(), +})); + +jest.mock('~/server/controllers/assistants/v2', () => ({ + addResourceFileId: jest.fn(), + deleteResourceFileId: jest.fn(), +})); + +jest.mock('~/models/Agent', () => ({ + addAgentResourceFile: jest.fn(), + removeAgentResourceFiles: jest.fn(), +})); + +jest.mock('~/server/controllers/assistants/helpers', () => ({ + getOpenAIClient: jest.fn(), +})); + +jest.mock('~/server/services/Tools/credentials', () => ({ + loadAuthValues: jest.fn(), +})); + +jest.mock('~/server/services/Config', () => ({ + checkCapability: jest.fn(), +})); + +jest.mock('~/server/utils/queue', () => ({ + LB_QueueAsyncCall: jest.fn(), +})); + +jest.mock('./strategies', () => ({ + getStrategyFunctions: jest.fn(), +})); + +jest.mock('~/server/utils', () => ({ + determineFileType: jest.fn(), +})); + +// Import the actual processFiles function after all mocks are set up +const { processFiles } = require('./process'); +const { updateFileUsage } = require('~/models/File'); + +describe('processFiles', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('null filtering functionality', () => { + it('should filter out null results from updateFileUsage when files do not exist', async () => { + const mockFiles = [ + { file_id: 'existing-file-1' }, + { file_id: 'non-existent-file' }, + { file_id: 'existing-file-2' }, + ]; + + // Mock updateFileUsage to return null for non-existent files + updateFileUsage.mockImplementation(({ file_id }) => { + if (file_id === 'non-existent-file') { + return Promise.resolve(null); // Simulate file not found in the database + } + return Promise.resolve({ file_id, usage: 1 }); + }); + + const result = await processFiles(mockFiles); + + expect(updateFileUsage).toHaveBeenCalledTimes(3); + expect(result).toEqual([ + { file_id: 'existing-file-1', usage: 1 }, + { file_id: 'existing-file-2', usage: 1 }, + ]); + + // Critical test - ensure no null values in result + expect(result).not.toContain(null); + expect(result).not.toContain(undefined); + expect(result.length).toBe(2); // Only valid files should be returned + }); + + it('should return empty array when all updateFileUsage calls return null', async () => { + const mockFiles = [{ file_id: 'non-existent-1' }, { file_id: 'non-existent-2' }]; + + // All updateFileUsage calls return null + updateFileUsage.mockResolvedValue(null); + + const result = await processFiles(mockFiles); + + expect(updateFileUsage).toHaveBeenCalledTimes(2); + expect(result).toEqual([]); + expect(result).not.toContain(null); + expect(result.length).toBe(0); + }); + + it('should work correctly when all files exist', async () => { + const mockFiles = [{ file_id: 'file-1' }, { file_id: 'file-2' }]; + + updateFileUsage.mockImplementation(({ file_id }) => { + return Promise.resolve({ file_id, usage: 1 }); + }); + + const result = await processFiles(mockFiles); + + expect(result).toEqual([ + { file_id: 'file-1', usage: 1 }, + { file_id: 'file-2', usage: 1 }, + ]); + expect(result).not.toContain(null); + expect(result.length).toBe(2); + }); + + it('should handle fileIds parameter and filter nulls correctly', async () => { + const mockFiles = [{ file_id: 'file-1' }]; + const mockFileIds = ['file-2', 'non-existent-file']; + + updateFileUsage.mockImplementation(({ file_id }) => { + if (file_id === 'non-existent-file') { + return Promise.resolve(null); + } + return Promise.resolve({ file_id, usage: 1 }); + }); + + const result = await processFiles(mockFiles, mockFileIds); + + expect(result).toEqual([ + { file_id: 'file-1', usage: 1 }, + { file_id: 'file-2', usage: 1 }, + ]); + expect(result).not.toContain(null); + expect(result).not.toContain(undefined); + expect(result.length).toBe(2); + }); + + it('should handle duplicate file_ids correctly', async () => { + const mockFiles = [ + { file_id: 'duplicate-file' }, + { file_id: 'duplicate-file' }, // Duplicate should be ignored + { file_id: 'unique-file' }, + ]; + + updateFileUsage.mockImplementation(({ file_id }) => { + return Promise.resolve({ file_id, usage: 1 }); + }); + + const result = await processFiles(mockFiles); + + // Should only call updateFileUsage twice (duplicate ignored) + expect(updateFileUsage).toHaveBeenCalledTimes(2); + expect(result).toEqual([ + { file_id: 'duplicate-file', usage: 1 }, + { file_id: 'unique-file', usage: 1 }, + ]); + expect(result.length).toBe(2); + }); + }); + + describe('edge cases', () => { + it('should handle empty files array', async () => { + const result = await processFiles([]); + expect(result).toEqual([]); + expect(updateFileUsage).not.toHaveBeenCalled(); + }); + + it('should handle mixed null and undefined returns from updateFileUsage', async () => { + const mockFiles = [{ file_id: 'file-1' }, { file_id: 'file-2' }, { file_id: 'file-3' }]; + + updateFileUsage.mockImplementation(({ file_id }) => { + if (file_id === 'file-1') return Promise.resolve(null); + if (file_id === 'file-2') return Promise.resolve(undefined); + return Promise.resolve({ file_id, usage: 1 }); + }); + + const result = await processFiles(mockFiles); + + expect(result).toEqual([{ file_id: 'file-3', usage: 1 }]); + expect(result).not.toContain(null); + expect(result).not.toContain(undefined); + expect(result.length).toBe(1); + }); + }); +}); From 33b4a97b427865c2aa347507793783b281db7fd0 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Thu, 26 Jun 2025 18:50:15 -0400 Subject: [PATCH 16/65] =?UTF-8?q?=F0=9F=94=92=20fix:=20Agents=20Config/Per?= =?UTF-8?q?mission=20Checks=20after=20Streamline=20Change=20(#8089)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: access control logic to TypeScript * chore: Change EndpointURLs to a constant object for improved type safety * 🐛 fix: Enhance agent access control by adding skipAgentCheck functionality * 🐛 fix: Add endpointFileConfig prop to AttachFileMenu and update file handling logic * 🐛 fix: Update tool handling logic to support optional groupedTools and improve null checks, add dedicated tool dialog for Assistants * chore: Export Accordion component from UI index for improved modularity * feat: Add ActivePanelContext for managing active panel state across components * chore: Replace string IDs with EModelEndpoint constants for assistants and agents in useSideNavLinks * fix: Integrate access checks for agent creation and deletion routes in actions.js --- api/server/controllers/agents/client.js | 10 +- api/server/controllers/tools.js | 13 +- api/server/middleware/roles/access.js | 78 ------ api/server/middleware/roles/index.js | 3 - api/server/routes/agents/actions.js | 22 +- api/server/routes/agents/chat.js | 10 +- api/server/routes/agents/v1.js | 31 ++- api/server/routes/memories.js | 52 ++-- api/server/routes/prompts.js | 34 ++- api/server/routes/tags.js | 16 +- client/src/Providers/ActivePanelContext.tsx | 37 +++ client/src/Providers/AgentPanelContext.tsx | 63 +++-- client/src/Providers/index.ts | 1 + client/src/common/types.ts | 2 +- .../Chat/Input/Files/AttachFileChat.tsx | 21 +- .../Chat/Input/Files/AttachFileMenu.tsx | 5 +- .../SidePanel/Agents/AgentConfig.tsx | 3 +- .../components/SidePanel/Agents/AgentTool.tsx | 6 +- .../SidePanel/Builder/AssistantPanel.tsx | 7 +- client/src/components/SidePanel/Nav.tsx | 22 +- .../components/Tools/AssistantToolsDialog.tsx | 254 ++++++++++++++++++ client/src/components/Tools/ToolItem.tsx | 14 +- .../src/components/Tools/ToolSelectDialog.tsx | 15 +- client/src/components/ui/index.ts | 1 + client/src/hooks/Files/useFileHandling.ts | 38 +-- client/src/hooks/Nav/useSideNavLinks.ts | 4 +- packages/api/src/index.ts | 2 + packages/api/src/middleware/access.ts | 141 ++++++++++ packages/api/src/middleware/index.ts | 1 + packages/data-provider/src/config.ts | 4 +- packages/data-provider/src/types.ts | 4 +- 31 files changed, 672 insertions(+), 242 deletions(-) delete mode 100644 api/server/middleware/roles/access.js create mode 100644 client/src/Providers/ActivePanelContext.tsx create mode 100644 client/src/components/Tools/AssistantToolsDialog.tsx create mode 100644 packages/api/src/middleware/access.ts create mode 100644 packages/api/src/middleware/index.ts diff --git a/api/server/controllers/agents/client.js b/api/server/controllers/agents/client.js index f4395b4b32..87fb053479 100644 --- a/api/server/controllers/agents/client.js +++ b/api/server/controllers/agents/client.js @@ -4,6 +4,7 @@ const { sendEvent, createRun, Tokenizer, + checkAccess, memoryInstructions, createMemoryProcessor, } = require('@librechat/api'); @@ -39,8 +40,8 @@ const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens'); const { getFormattedMemories, deleteMemory, setMemory } = require('~/models'); const { encodeAndFormat } = require('~/server/services/Files/images/encode'); const { getProviderConfig } = require('~/server/services/Endpoints'); -const { checkAccess } = require('~/server/middleware/roles/access'); const BaseClient = require('~/app/clients/BaseClient'); +const { getRoleByName } = require('~/models/Role'); const { loadAgent } = require('~/models/Agent'); const { getMCPManager } = require('~/config'); @@ -401,7 +402,12 @@ class AgentClient extends BaseClient { if (user.personalization?.memories === false) { return; } - const hasAccess = await checkAccess(user, PermissionTypes.MEMORIES, [Permissions.USE]); + const hasAccess = await checkAccess({ + user, + permissionType: PermissionTypes.MEMORIES, + permissions: [Permissions.USE], + getRoleByName, + }); if (!hasAccess) { logger.debug( diff --git a/api/server/controllers/tools.js b/api/server/controllers/tools.js index 254ecb4f94..8d5d2e9ce6 100644 --- a/api/server/controllers/tools.js +++ b/api/server/controllers/tools.js @@ -1,5 +1,7 @@ const { nanoid } = require('nanoid'); const { EnvVar } = require('@librechat/agents'); +const { checkAccess } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); const { Tools, AuthType, @@ -13,9 +15,8 @@ const { processCodeOutput } = require('~/server/services/Files/Code/process'); const { createToolCall, getToolCallsByConvo } = require('~/models/ToolCall'); const { loadAuthValues } = require('~/server/services/Tools/credentials'); const { loadTools } = require('~/app/clients/tools/util'); -const { checkAccess } = require('~/server/middleware'); +const { getRoleByName } = require('~/models/Role'); const { getMessage } = require('~/models/Message'); -const { logger } = require('~/config'); const fieldsMap = { [Tools.execute_code]: [EnvVar.CODE_API_KEY], @@ -79,6 +80,7 @@ const verifyToolAuth = async (req, res) => { throwError: false, }); } catch (error) { + logger.error('Error loading auth values', error); res.status(200).json({ authenticated: false, message: AuthType.USER_PROVIDED }); return; } @@ -132,7 +134,12 @@ const callTool = async (req, res) => { logger.debug(`[${toolId}/call] User: ${req.user.id}`); let hasAccess = true; if (toolAccessPermType[toolId]) { - hasAccess = await checkAccess(req.user, toolAccessPermType[toolId], [Permissions.USE]); + hasAccess = await checkAccess({ + user: req.user, + permissionType: toolAccessPermType[toolId], + permissions: [Permissions.USE], + getRoleByName, + }); } if (!hasAccess) { logger.warn( diff --git a/api/server/middleware/roles/access.js b/api/server/middleware/roles/access.js deleted file mode 100644 index cabbd405b0..0000000000 --- a/api/server/middleware/roles/access.js +++ /dev/null @@ -1,78 +0,0 @@ -const { getRoleByName } = require('~/models/Role'); -const { logger } = require('~/config'); - -/** - * Core function to check if a user has one or more required permissions - * - * @param {object} user - The user object - * @param {PermissionTypes} permissionType - The type of permission to check - * @param {Permissions[]} permissions - The list of specific permissions to check - * @param {Record} [bodyProps] - An optional object where keys are permissions and values are arrays of properties to check - * @param {object} [checkObject] - The object to check properties against - * @returns {Promise} Whether the user has the required permissions - */ -const checkAccess = async (user, permissionType, permissions, bodyProps = {}, checkObject = {}) => { - if (!user) { - return false; - } - - const role = await getRoleByName(user.role); - if (role && role.permissions && role.permissions[permissionType]) { - const hasAnyPermission = permissions.some((permission) => { - if (role.permissions[permissionType][permission]) { - return true; - } - - if (bodyProps[permission] && checkObject) { - return bodyProps[permission].some((prop) => - Object.prototype.hasOwnProperty.call(checkObject, prop), - ); - } - - return false; - }); - - return hasAnyPermission; - } - - return false; -}; - -/** - * Middleware to check if a user has one or more required permissions, optionally based on `req.body` properties. - * - * @param {PermissionTypes} permissionType - The type of permission to check. - * @param {Permissions[]} permissions - The list of specific permissions to check. - * @param {Record} [bodyProps] - An optional object where keys are permissions and values are arrays of `req.body` properties to check. - * @returns {(req: ServerRequest, res: ServerResponse, next: NextFunction) => Promise} Express middleware function. - */ -const generateCheckAccess = (permissionType, permissions, bodyProps = {}) => { - return async (req, res, next) => { - try { - const hasAccess = await checkAccess( - req.user, - permissionType, - permissions, - bodyProps, - req.body, - ); - - if (hasAccess) { - return next(); - } - - logger.warn( - `[${permissionType}] Forbidden: Insufficient permissions for User ${req.user.id}: ${permissions.join(', ')}`, - ); - return res.status(403).json({ message: 'Forbidden: Insufficient permissions' }); - } catch (error) { - logger.error(error); - return res.status(500).json({ message: `Server error: ${error.message}` }); - } - }; -}; - -module.exports = { - checkAccess, - generateCheckAccess, -}; diff --git a/api/server/middleware/roles/index.js b/api/server/middleware/roles/index.js index ebc0043f2f..f01b884e5a 100644 --- a/api/server/middleware/roles/index.js +++ b/api/server/middleware/roles/index.js @@ -1,8 +1,5 @@ const checkAdmin = require('./admin'); -const { checkAccess, generateCheckAccess } = require('./access'); module.exports = { checkAdmin, - checkAccess, - generateCheckAccess, }; diff --git a/api/server/routes/agents/actions.js b/api/server/routes/agents/actions.js index 89d6a9dc42..2f11486a0e 100644 --- a/api/server/routes/agents/actions.js +++ b/api/server/routes/agents/actions.js @@ -1,14 +1,28 @@ const express = require('express'); const { nanoid } = require('nanoid'); -const { actionDelimiter, SystemRoles, removeNullishValues } = require('librechat-data-provider'); +const { logger } = require('@librechat/data-schemas'); +const { generateCheckAccess } = require('@librechat/api'); +const { + SystemRoles, + Permissions, + PermissionTypes, + actionDelimiter, + removeNullishValues, +} = require('librechat-data-provider'); const { encryptMetadata, domainParser } = require('~/server/services/ActionService'); const { updateAction, getActions, deleteAction } = require('~/models/Action'); const { isActionDomainAllowed } = require('~/server/services/domains'); const { getAgent, updateAgent } = require('~/models/Agent'); -const { logger } = require('~/config'); +const { getRoleByName } = require('~/models/Role'); const router = express.Router(); +const checkAgentCreate = generateCheckAccess({ + permissionType: PermissionTypes.AGENTS, + permissions: [Permissions.USE, Permissions.CREATE], + getRoleByName, +}); + // If the user has ADMIN role // then action edition is possible even if not owner of the assistant const isAdmin = (req) => { @@ -41,7 +55,7 @@ router.get('/', async (req, res) => { * @param {ActionMetadata} req.body.metadata - Metadata for the action. * @returns {Object} 200 - success response - application/json */ -router.post('/:agent_id', async (req, res) => { +router.post('/:agent_id', checkAgentCreate, async (req, res) => { try { const { agent_id } = req.params; @@ -149,7 +163,7 @@ router.post('/:agent_id', async (req, res) => { * @param {string} req.params.action_id - The ID of the action to delete. * @returns {Object} 200 - success response - application/json */ -router.delete('/:agent_id/:action_id', async (req, res) => { +router.delete('/:agent_id/:action_id', checkAgentCreate, async (req, res) => { try { const { agent_id, action_id } = req.params; const admin = isAdmin(req); diff --git a/api/server/routes/agents/chat.js b/api/server/routes/agents/chat.js index ef66ef7896..0e07c83bd1 100644 --- a/api/server/routes/agents/chat.js +++ b/api/server/routes/agents/chat.js @@ -1,22 +1,28 @@ const express = require('express'); +const { generateCheckAccess, skipAgentCheck } = require('@librechat/api'); const { PermissionTypes, Permissions } = require('librechat-data-provider'); const { setHeaders, moderateText, // validateModel, - generateCheckAccess, validateConvoAccess, buildEndpointOption, } = require('~/server/middleware'); const { initializeClient } = require('~/server/services/Endpoints/agents'); const AgentController = require('~/server/controllers/agents/request'); const addTitle = require('~/server/services/Endpoints/agents/title'); +const { getRoleByName } = require('~/models/Role'); const router = express.Router(); router.use(moderateText); -const checkAgentAccess = generateCheckAccess(PermissionTypes.AGENTS, [Permissions.USE]); +const checkAgentAccess = generateCheckAccess({ + permissionType: PermissionTypes.AGENTS, + permissions: [Permissions.USE], + skipCheck: skipAgentCheck, + getRoleByName, +}); router.use(checkAgentAccess); router.use(validateConvoAccess); diff --git a/api/server/routes/agents/v1.js b/api/server/routes/agents/v1.js index 657aa79414..0455b23948 100644 --- a/api/server/routes/agents/v1.js +++ b/api/server/routes/agents/v1.js @@ -1,29 +1,36 @@ const express = require('express'); +const { generateCheckAccess } = require('@librechat/api'); const { PermissionTypes, Permissions } = require('librechat-data-provider'); -const { requireJwtAuth, generateCheckAccess } = require('~/server/middleware'); +const { requireJwtAuth } = require('~/server/middleware'); const v1 = require('~/server/controllers/agents/v1'); +const { getRoleByName } = require('~/models/Role'); const actions = require('./actions'); const tools = require('./tools'); const router = express.Router(); const avatar = express.Router(); -const checkAgentAccess = generateCheckAccess(PermissionTypes.AGENTS, [Permissions.USE]); -const checkAgentCreate = generateCheckAccess(PermissionTypes.AGENTS, [ - Permissions.USE, - Permissions.CREATE, -]); +const checkAgentAccess = generateCheckAccess({ + permissionType: PermissionTypes.AGENTS, + permissions: [Permissions.USE], + getRoleByName, +}); +const checkAgentCreate = generateCheckAccess({ + permissionType: PermissionTypes.AGENTS, + permissions: [Permissions.USE, Permissions.CREATE], + getRoleByName, +}); -const checkGlobalAgentShare = generateCheckAccess( - PermissionTypes.AGENTS, - [Permissions.USE, Permissions.CREATE], - { +const checkGlobalAgentShare = generateCheckAccess({ + permissionType: PermissionTypes.AGENTS, + permissions: [Permissions.USE, Permissions.CREATE], + bodyProps: { [Permissions.SHARED_GLOBAL]: ['projectIds', 'removeProjectIds'], }, -); + getRoleByName, +}); router.use(requireJwtAuth); -router.use(checkAgentAccess); /** * Agent actions route. diff --git a/api/server/routes/memories.js b/api/server/routes/memories.js index 86065fecaa..fe520de000 100644 --- a/api/server/routes/memories.js +++ b/api/server/routes/memories.js @@ -1,37 +1,43 @@ const express = require('express'); -const { Tokenizer } = require('@librechat/api'); +const { Tokenizer, generateCheckAccess } = require('@librechat/api'); const { PermissionTypes, Permissions } = require('librechat-data-provider'); const { getAllUserMemories, toggleUserMemories, createMemory, - setMemory, deleteMemory, + setMemory, } = require('~/models'); -const { requireJwtAuth, generateCheckAccess } = require('~/server/middleware'); +const { requireJwtAuth } = require('~/server/middleware'); +const { getRoleByName } = require('~/models/Role'); const router = express.Router(); -const checkMemoryRead = generateCheckAccess(PermissionTypes.MEMORIES, [ - Permissions.USE, - Permissions.READ, -]); -const checkMemoryCreate = generateCheckAccess(PermissionTypes.MEMORIES, [ - Permissions.USE, - Permissions.CREATE, -]); -const checkMemoryUpdate = generateCheckAccess(PermissionTypes.MEMORIES, [ - Permissions.USE, - Permissions.UPDATE, -]); -const checkMemoryDelete = generateCheckAccess(PermissionTypes.MEMORIES, [ - Permissions.USE, - Permissions.UPDATE, -]); -const checkMemoryOptOut = generateCheckAccess(PermissionTypes.MEMORIES, [ - Permissions.USE, - Permissions.OPT_OUT, -]); +const checkMemoryRead = generateCheckAccess({ + permissionType: PermissionTypes.MEMORIES, + permissions: [Permissions.USE, Permissions.READ], + getRoleByName, +}); +const checkMemoryCreate = generateCheckAccess({ + permissionType: PermissionTypes.MEMORIES, + permissions: [Permissions.USE, Permissions.CREATE], + getRoleByName, +}); +const checkMemoryUpdate = generateCheckAccess({ + permissionType: PermissionTypes.MEMORIES, + permissions: [Permissions.USE, Permissions.UPDATE], + getRoleByName, +}); +const checkMemoryDelete = generateCheckAccess({ + permissionType: PermissionTypes.MEMORIES, + permissions: [Permissions.USE, Permissions.UPDATE], + getRoleByName, +}); +const checkMemoryOptOut = generateCheckAccess({ + permissionType: PermissionTypes.MEMORIES, + permissions: [Permissions.USE, Permissions.OPT_OUT], + getRoleByName, +}); router.use(requireJwtAuth); diff --git a/api/server/routes/prompts.js b/api/server/routes/prompts.js index e3ab5bf5d3..c18418cba5 100644 --- a/api/server/routes/prompts.js +++ b/api/server/routes/prompts.js @@ -1,5 +1,7 @@ const express = require('express'); -const { PermissionTypes, Permissions, SystemRoles } = require('librechat-data-provider'); +const { logger } = require('@librechat/data-schemas'); +const { generateCheckAccess } = require('@librechat/api'); +const { Permissions, SystemRoles, PermissionTypes } = require('librechat-data-provider'); const { getPrompt, getPrompts, @@ -14,24 +16,30 @@ const { // updatePromptLabels, makePromptProduction, } = require('~/models/Prompt'); -const { requireJwtAuth, generateCheckAccess } = require('~/server/middleware'); -const { logger } = require('~/config'); +const { requireJwtAuth } = require('~/server/middleware'); +const { getRoleByName } = require('~/models/Role'); const router = express.Router(); -const checkPromptAccess = generateCheckAccess(PermissionTypes.PROMPTS, [Permissions.USE]); -const checkPromptCreate = generateCheckAccess(PermissionTypes.PROMPTS, [ - Permissions.USE, - Permissions.CREATE, -]); +const checkPromptAccess = generateCheckAccess({ + permissionType: PermissionTypes.PROMPTS, + permissions: [Permissions.USE], + getRoleByName, +}); +const checkPromptCreate = generateCheckAccess({ + permissionType: PermissionTypes.PROMPTS, + permissions: [Permissions.USE, Permissions.CREATE], + getRoleByName, +}); -const checkGlobalPromptShare = generateCheckAccess( - PermissionTypes.PROMPTS, - [Permissions.USE, Permissions.CREATE], - { +const checkGlobalPromptShare = generateCheckAccess({ + permissionType: PermissionTypes.PROMPTS, + permissions: [Permissions.USE, Permissions.CREATE], + bodyProps: { [Permissions.SHARED_GLOBAL]: ['projectIds', 'removeProjectIds'], }, -); + getRoleByName, +}); router.use(requireJwtAuth); router.use(checkPromptAccess); diff --git a/api/server/routes/tags.js b/api/server/routes/tags.js index d3e27d3711..0a4ee5084c 100644 --- a/api/server/routes/tags.js +++ b/api/server/routes/tags.js @@ -1,18 +1,24 @@ const express = require('express'); +const { logger } = require('@librechat/data-schemas'); +const { generateCheckAccess } = require('@librechat/api'); const { PermissionTypes, Permissions } = require('librechat-data-provider'); const { - getConversationTags, + updateTagsForConversation, updateConversationTag, createConversationTag, deleteConversationTag, - updateTagsForConversation, + getConversationTags, } = require('~/models/ConversationTag'); -const { requireJwtAuth, generateCheckAccess } = require('~/server/middleware'); -const { logger } = require('~/config'); +const { requireJwtAuth } = require('~/server/middleware'); +const { getRoleByName } = require('~/models/Role'); const router = express.Router(); -const checkBookmarkAccess = generateCheckAccess(PermissionTypes.BOOKMARKS, [Permissions.USE]); +const checkBookmarkAccess = generateCheckAccess({ + permissionType: PermissionTypes.BOOKMARKS, + permissions: [Permissions.USE], + getRoleByName, +}); router.use(requireJwtAuth); router.use(checkBookmarkAccess); diff --git a/client/src/Providers/ActivePanelContext.tsx b/client/src/Providers/ActivePanelContext.tsx new file mode 100644 index 0000000000..4a8d6ccfc4 --- /dev/null +++ b/client/src/Providers/ActivePanelContext.tsx @@ -0,0 +1,37 @@ +import { createContext, useContext, useState, ReactNode } from 'react'; + +interface ActivePanelContextType { + active: string | undefined; + setActive: (id: string) => void; +} + +const ActivePanelContext = createContext(undefined); + +export function ActivePanelProvider({ + children, + defaultActive, +}: { + children: ReactNode; + defaultActive?: string; +}) { + const [active, _setActive] = useState(defaultActive); + + const setActive = (id: string) => { + localStorage.setItem('side:active-panel', id); + _setActive(id); + }; + + return ( + + {children} + + ); +} + +export function useActivePanel() { + const context = useContext(ActivePanelContext); + if (context === undefined) { + throw new Error('useActivePanel must be used within an ActivePanelProvider'); + } + return context; +} diff --git a/client/src/Providers/AgentPanelContext.tsx b/client/src/Providers/AgentPanelContext.tsx index 2cc64ba3ed..b15d334078 100644 --- a/client/src/Providers/AgentPanelContext.tsx +++ b/client/src/Providers/AgentPanelContext.tsx @@ -40,41 +40,40 @@ export function AgentPanelProvider({ children }: { children: React.ReactNode }) agent_id: agent_id || '', })) || []; - const groupedTools = - tools?.reduce( - (acc, tool) => { - if (tool.tool_id.includes(Constants.mcp_delimiter)) { - const [_toolName, serverName] = tool.tool_id.split(Constants.mcp_delimiter); - const groupKey = `${serverName.toLowerCase()}`; - if (!acc[groupKey]) { - acc[groupKey] = { - tool_id: groupKey, - metadata: { - name: `${serverName}`, - pluginKey: groupKey, - description: `${localize('com_ui_tool_collection_prefix')} ${serverName}`, - icon: tool.metadata.icon || '', - } as TPlugin, - agent_id: agent_id || '', - tools: [], - }; - } - acc[groupKey].tools?.push({ - tool_id: tool.tool_id, - metadata: tool.metadata, - agent_id: agent_id || '', - }); - } else { - acc[tool.tool_id] = { - tool_id: tool.tool_id, - metadata: tool.metadata, + const groupedTools = tools?.reduce( + (acc, tool) => { + if (tool.tool_id.includes(Constants.mcp_delimiter)) { + const [_toolName, serverName] = tool.tool_id.split(Constants.mcp_delimiter); + const groupKey = `${serverName.toLowerCase()}`; + if (!acc[groupKey]) { + acc[groupKey] = { + tool_id: groupKey, + metadata: { + name: `${serverName}`, + pluginKey: groupKey, + description: `${localize('com_ui_tool_collection_prefix')} ${serverName}`, + icon: tool.metadata.icon || '', + } as TPlugin, agent_id: agent_id || '', + tools: [], }; } - return acc; - }, - {} as Record, - ) || {}; + acc[groupKey].tools?.push({ + tool_id: tool.tool_id, + metadata: tool.metadata, + agent_id: agent_id || '', + }); + } else { + acc[tool.tool_id] = { + tool_id: tool.tool_id, + metadata: tool.metadata, + agent_id: agent_id || '', + }; + } + return acc; + }, + {} as Record, + ); const value = { action, diff --git a/client/src/Providers/index.ts b/client/src/Providers/index.ts index 8809532b49..b455cb3f1e 100644 --- a/client/src/Providers/index.ts +++ b/client/src/Providers/index.ts @@ -1,6 +1,7 @@ export { default as AssistantsProvider } from './AssistantsContext'; export { default as AgentsProvider } from './AgentsContext'; export { default as ToastProvider } from './ToastContext'; +export * from './ActivePanelContext'; export * from './AgentPanelContext'; export * from './ChatContext'; export * from './ShareContext'; diff --git a/client/src/common/types.ts b/client/src/common/types.ts index 214dc349b5..c7f2d6788a 100644 --- a/client/src/common/types.ts +++ b/client/src/common/types.ts @@ -219,11 +219,11 @@ export type AgentPanelContextType = { mcps?: t.MCP[]; setMcp: React.Dispatch>; setMcps: React.Dispatch>; - groupedTools: Record; tools: t.AgentToolType[]; activePanel?: string; setActivePanel: React.Dispatch>; setCurrentAgentId: React.Dispatch>; + groupedTools?: Record; agent_id?: string; }; diff --git a/client/src/components/Chat/Input/Files/AttachFileChat.tsx b/client/src/components/Chat/Input/Files/AttachFileChat.tsx index 746c3d9c17..d49230ff89 100644 --- a/client/src/components/Chat/Input/Files/AttachFileChat.tsx +++ b/client/src/components/Chat/Input/Files/AttachFileChat.tsx @@ -4,9 +4,9 @@ import { supportsFiles, mergeFileConfig, isAgentsEndpoint, - EndpointFileConfig, fileConfig as defaultFileConfig, } from 'librechat-data-provider'; +import type { EndpointFileConfig } from 'librechat-data-provider'; import { useGetFileConfig } from '~/data-provider'; import AttachFileMenu from './AttachFileMenu'; import { useChatContext } from '~/Providers'; @@ -14,22 +14,25 @@ import { useChatContext } from '~/Providers'; function AttachFileChat({ disableInputs }: { disableInputs: boolean }) { const { conversation } = useChatContext(); const conversationId = conversation?.conversationId ?? Constants.NEW_CONVO; - const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null }; - const isAgents = useMemo(() => isAgentsEndpoint(_endpoint), [_endpoint]); + const { endpoint, endpointType } = conversation ?? { endpoint: null }; + const isAgents = useMemo(() => isAgentsEndpoint(endpoint), [endpoint]); const { data: fileConfig = defaultFileConfig } = useGetFileConfig({ select: (data) => mergeFileConfig(data), }); - const endpointFileConfig = fileConfig.endpoints[_endpoint ?? ''] as - | EndpointFileConfig - | undefined; - - const endpointSupportsFiles: boolean = supportsFiles[endpointType ?? _endpoint ?? ''] ?? false; + const endpointFileConfig = fileConfig.endpoints[endpoint ?? ''] as EndpointFileConfig | undefined; + const endpointSupportsFiles: boolean = supportsFiles[endpointType ?? endpoint ?? ''] ?? false; const isUploadDisabled = (disableInputs || endpointFileConfig?.disabled) ?? false; if (isAgents || (endpointSupportsFiles && !isUploadDisabled)) { - return ; + return ( + + ); } return null; diff --git a/client/src/components/Chat/Input/Files/AttachFileMenu.tsx b/client/src/components/Chat/Input/Files/AttachFileMenu.tsx index 2bffa4f50c..c038f30114 100644 --- a/client/src/components/Chat/Input/Files/AttachFileMenu.tsx +++ b/client/src/components/Chat/Input/Files/AttachFileMenu.tsx @@ -2,6 +2,7 @@ import { useSetRecoilState } from 'recoil'; import * as Ariakit from '@ariakit/react'; import React, { useRef, useState, useMemo } from 'react'; import { FileSearch, ImageUpIcon, TerminalSquareIcon, FileType2Icon } from 'lucide-react'; +import type { EndpointFileConfig } from 'librechat-data-provider'; import { FileUpload, TooltipAnchor, DropdownPopup, AttachmentIcon } from '~/components'; import { EToolResources, EModelEndpoint } from 'librechat-data-provider'; import { useGetEndpointsQuery } from '~/data-provider'; @@ -12,9 +13,10 @@ import { cn } from '~/utils'; interface AttachFileMenuProps { conversationId: string; disabled?: boolean | null; + endpointFileConfig?: EndpointFileConfig; } -const AttachFileMenu = ({ disabled, conversationId }: AttachFileMenuProps) => { +const AttachFileMenu = ({ disabled, conversationId, endpointFileConfig }: AttachFileMenuProps) => { const localize = useLocalize(); const isUploadDisabled = disabled ?? false; const inputRef = useRef(null); @@ -24,6 +26,7 @@ const AttachFileMenu = ({ disabled, conversationId }: AttachFileMenuProps) => { const { data: endpointsConfig } = useGetEndpointsQuery(); const { handleFileChange } = useFileHandling({ overrideEndpoint: EModelEndpoint.agents, + overrideEndpointFileConfig: endpointFileConfig, }); /** TODO: Ephemeral Agent Capabilities diff --git a/client/src/components/SidePanel/Agents/AgentConfig.tsx b/client/src/components/SidePanel/Agents/AgentConfig.tsx index 2afa56601c..b622868acd 100644 --- a/client/src/components/SidePanel/Agents/AgentConfig.tsx +++ b/client/src/components/SidePanel/Agents/AgentConfig.tsx @@ -168,7 +168,7 @@ export default function AgentConfig({ const visibleToolIds = new Set(selectedToolIds); // Check what group parent tools should be shown if any subtool is present - Object.entries(allTools).forEach(([toolId, toolObj]) => { + Object.entries(allTools ?? {}).forEach(([toolId, toolObj]) => { if (toolObj.tools?.length) { // if any subtool of this group is selected, ensure group parent tool rendered if (toolObj.tools.some((st) => selectedToolIds.includes(st.tool_id))) { @@ -299,6 +299,7 @@ export default function AgentConfig({
{/* // Render all visible IDs (including groups with subtools selected) */} {[...visibleToolIds].map((toolId, i) => { + if (!allTools) return null; const tool = allTools[toolId]; if (!tool) return null; return ( diff --git a/client/src/components/SidePanel/Agents/AgentTool.tsx b/client/src/components/SidePanel/Agents/AgentTool.tsx index 4876f447fb..6ea613dc78 100644 --- a/client/src/components/SidePanel/Agents/AgentTool.tsx +++ b/client/src/components/SidePanel/Agents/AgentTool.tsx @@ -19,7 +19,7 @@ export default function AgentTool({ allTools, }: { tool: string; - allTools: Record; + allTools?: Record; agent_id?: string; }) { const [isHovering, setIsHovering] = useState(false); @@ -30,8 +30,10 @@ export default function AgentTool({ const { showToast } = useToastContext(); const updateUserPlugins = useUpdateUserPluginsMutation(); const { getValues, setValue } = useFormContext(); + if (!allTools) { + return null; + } const currentTool = allTools[tool]; - const getSelectedTools = () => { if (!currentTool?.tools) return []; const formTools = getValues('tools') || []; diff --git a/client/src/components/SidePanel/Builder/AssistantPanel.tsx b/client/src/components/SidePanel/Builder/AssistantPanel.tsx index c78d456ff1..4c3a794823 100644 --- a/client/src/components/SidePanel/Builder/AssistantPanel.tsx +++ b/client/src/components/SidePanel/Builder/AssistantPanel.tsx @@ -17,9 +17,9 @@ import { } from '~/data-provider'; import { cn, cardStyle, defaultTextProps, removeFocusOutlines } from '~/utils'; import AssistantConversationStarters from './AssistantConversationStarters'; +import AssistantToolsDialog from '~/components/Tools/AssistantToolsDialog'; import { useAssistantsMapContext, useToastContext } from '~/Providers'; import { useSelectAssistant, useLocalize } from '~/hooks'; -import { ToolSelectDialog } from '~/components/Tools'; import AppendDateCheckbox from './AppendDateCheckbox'; import CapabilitiesForm from './CapabilitiesForm'; import { SelectDropDown } from '~/components/ui'; @@ -468,11 +468,10 @@ export default function AssistantPanel({
- diff --git a/client/src/components/SidePanel/Nav.tsx b/client/src/components/SidePanel/Nav.tsx index d901d6b47a..fa6d8751b1 100644 --- a/client/src/components/SidePanel/Nav.tsx +++ b/client/src/components/SidePanel/Nav.tsx @@ -1,21 +1,15 @@ -import { useState } from 'react'; import * as AccordionPrimitive from '@radix-ui/react-accordion'; import type { NavLink, NavProps } from '~/common'; -import { Accordion, AccordionItem, AccordionContent } from '~/components/ui/Accordion'; -import { TooltipAnchor, Button } from '~/components'; +import { AccordionContent, AccordionItem, TooltipAnchor, Accordion, Button } from '~/components/ui'; +import { ActivePanelProvider, useActivePanel } from '~/Providers'; import { useLocalize } from '~/hooks'; import { cn } from '~/utils'; -export default function Nav({ links, isCollapsed, resize, defaultActive }: NavProps) { +function NavContent({ links, isCollapsed, resize }: Omit) { const localize = useLocalize(); - const [active, _setActive] = useState(defaultActive); + const { active, setActive } = useActivePanel(); const getVariant = (link: NavLink) => (link.id === active ? 'default' : 'ghost'); - const setActive = (id: string) => { - localStorage.setItem('side:active-panel', id + ''); - _setActive(id); - }; - return (
); } + +export default function Nav({ links, isCollapsed, resize, defaultActive }: NavProps) { + return ( + + + + ); +} diff --git a/client/src/components/Tools/AssistantToolsDialog.tsx b/client/src/components/Tools/AssistantToolsDialog.tsx new file mode 100644 index 0000000000..ce013af135 --- /dev/null +++ b/client/src/components/Tools/AssistantToolsDialog.tsx @@ -0,0 +1,254 @@ +import { useEffect } from 'react'; +import { Search, X } from 'lucide-react'; +import { Dialog, DialogPanel, DialogTitle, Description } from '@headlessui/react'; +import { useFormContext } from 'react-hook-form'; +import { isAgentsEndpoint } from 'librechat-data-provider'; +import { useUpdateUserPluginsMutation } from 'librechat-data-provider/react-query'; +import type { + AssistantsEndpoint, + EModelEndpoint, + TPluginAction, + TError, +} from 'librechat-data-provider'; +import type { TPluginStoreDialogProps } from '~/common/types'; +import { PluginPagination, PluginAuthForm } from '~/components/Plugins/Store'; +import { useLocalize, usePluginDialogHelpers } from '~/hooks'; +import { useAvailableToolsQuery } from '~/data-provider'; +import ToolItem from './ToolItem'; + +function AssistantToolsDialog({ + isOpen, + endpoint, + setIsOpen, +}: TPluginStoreDialogProps & { + endpoint: AssistantsEndpoint | EModelEndpoint.agents; +}) { + const localize = useLocalize(); + const { getValues, setValue } = useFormContext(); + const { data: tools } = useAvailableToolsQuery(endpoint); + const isAgentTools = isAgentsEndpoint(endpoint); + + const { + maxPage, + setMaxPage, + currentPage, + setCurrentPage, + itemsPerPage, + searchChanged, + setSearchChanged, + searchValue, + setSearchValue, + gridRef, + handleSearch, + handleChangePage, + error, + setError, + errorMessage, + setErrorMessage, + showPluginAuthForm, + setShowPluginAuthForm, + selectedPlugin, + setSelectedPlugin, + } = usePluginDialogHelpers(); + + const updateUserPlugins = useUpdateUserPluginsMutation(); + const handleInstallError = (error: TError) => { + setError(true); + const errorMessage = error.response?.data?.message ?? ''; + if (errorMessage) { + setErrorMessage(errorMessage); + } + setTimeout(() => { + setError(false); + setErrorMessage(''); + }, 5000); + }; + + const handleInstall = (pluginAction: TPluginAction) => { + const addFunction = () => { + const fns = getValues('functions').slice(); + fns.push(pluginAction.pluginKey); + setValue('functions', fns); + }; + + if (!pluginAction.auth) { + return addFunction(); + } + + updateUserPlugins.mutate(pluginAction, { + onError: (error: unknown) => { + handleInstallError(error as TError); + }, + onSuccess: addFunction, + }); + + setShowPluginAuthForm(false); + }; + + const onRemoveTool = (tool: string) => { + setShowPluginAuthForm(false); + updateUserPlugins.mutate( + { pluginKey: tool, action: 'uninstall', auth: null, isEntityTool: true }, + { + onError: (error: unknown) => { + handleInstallError(error as TError); + }, + onSuccess: () => { + const fns = getValues('functions').filter((fn: string) => fn !== tool); + setValue('functions', fns); + }, + }, + ); + }; + + const onAddTool = (pluginKey: string) => { + setShowPluginAuthForm(false); + const getAvailablePluginFromKey = tools?.find((p) => p.pluginKey === pluginKey); + setSelectedPlugin(getAvailablePluginFromKey); + + const { authConfig, authenticated = false } = getAvailablePluginFromKey ?? {}; + + if (authConfig && authConfig.length > 0 && !authenticated) { + setShowPluginAuthForm(true); + } else { + handleInstall({ pluginKey, action: 'install', auth: null }); + } + }; + + const filteredTools = tools?.filter((tool) => + tool.name.toLowerCase().includes(searchValue.toLowerCase()), + ); + + useEffect(() => { + if (filteredTools) { + setMaxPage(Math.ceil(filteredTools.length / itemsPerPage)); + if (searchChanged) { + setCurrentPage(1); + setSearchChanged(false); + } + } + }, [ + tools, + itemsPerPage, + searchValue, + filteredTools, + searchChanged, + setMaxPage, + setCurrentPage, + setSearchChanged, + ]); + + return ( + { + setIsOpen(false); + setCurrentPage(1); + setSearchValue(''); + }} + className="relative z-[102]" + > + {/* The backdrop, rendered as a fixed sibling to the panel container */} +
+ {/* Full-screen container to center the panel */} +
+ +
+
+
+ + {isAgentTools + ? localize('com_nav_tool_dialog_agents') + : localize('com_nav_tool_dialog')} + + + {localize('com_nav_tool_dialog_description')} + +
+
+
+
+ +
+
+
+ {error && ( +
+ {localize('com_nav_plugin_auth_error')} {errorMessage} +
+ )} + {showPluginAuthForm && ( +
+ handleInstall(installActionData)} + isEntityTool={true} + /> +
+ )} +
+
+
+ + +
+
+ {filteredTools && + filteredTools + .slice((currentPage - 1) * itemsPerPage, currentPage * itemsPerPage) + .map((tool, index) => ( + onAddTool(tool.pluginKey)} + onRemoveTool={() => onRemoveTool(tool.pluginKey)} + /> + ))} +
+
+
+ {maxPage > 0 ? ( + + ) : ( +
+ )} +
+
+
+
+
+ ); +} + +export default AssistantToolsDialog; diff --git a/client/src/components/Tools/ToolItem.tsx b/client/src/components/Tools/ToolItem.tsx index 0b16b0ba42..501c08848a 100644 --- a/client/src/components/Tools/ToolItem.tsx +++ b/client/src/components/Tools/ToolItem.tsx @@ -1,9 +1,9 @@ import { XCircle, PlusCircleIcon, Wrench } from 'lucide-react'; -import { AgentToolType } from 'librechat-data-provider'; +import type { TPlugin, AgentToolType } from 'librechat-data-provider'; import { useLocalize } from '~/hooks'; type ToolItemProps = { - tool: AgentToolType; + tool: TPlugin | AgentToolType; onAddTool: () => void; onRemoveTool: () => void; isInstalled?: boolean; @@ -19,9 +19,13 @@ function ToolItem({ tool, onAddTool, onRemoveTool, isInstalled = false }: ToolIt } }; - const name = tool.metadata?.name || tool.tool_id; - const description = tool.metadata?.description || ''; - const icon = tool.metadata?.icon; + const name = + (tool as AgentToolType).metadata?.name || + (tool as AgentToolType).tool_id || + (tool as TPlugin).name; + const description = + (tool as AgentToolType).metadata?.description || (tool as TPlugin).description || ''; + const icon = (tool as AgentToolType).metadata?.icon || (tool as TPlugin).icon; return (
diff --git a/client/src/components/Tools/ToolSelectDialog.tsx b/client/src/components/Tools/ToolSelectDialog.tsx index cf8c958921..0d380fefbb 100644 --- a/client/src/components/Tools/ToolSelectDialog.tsx +++ b/client/src/components/Tools/ToolSelectDialog.tsx @@ -67,15 +67,14 @@ function ToolSelectDialog({ }, 5000); }; - const toolsFormKey = 'tools'; const handleInstall = (pluginAction: TPluginAction) => { const addFunction = () => { - const installedToolIds: string[] = getValues(toolsFormKey) || []; + const installedToolIds: string[] = getValues('tools') || []; // Add the parent installedToolIds.push(pluginAction.pluginKey); // If this tool is a group, add subtools too - const groupObj = groupedTools[pluginAction.pluginKey]; + const groupObj = groupedTools?.[pluginAction.pluginKey]; if (groupObj?.tools && groupObj.tools.length > 0) { for (const sub of groupObj.tools) { if (!installedToolIds.includes(sub.tool_id)) { @@ -83,7 +82,7 @@ function ToolSelectDialog({ } } } - setValue(toolsFormKey, Array.from(new Set(installedToolIds))); // no duplicates just in case + setValue('tools', Array.from(new Set(installedToolIds))); // no duplicates just in case }; if (!pluginAction.auth) { @@ -101,7 +100,7 @@ function ToolSelectDialog({ }; const onRemoveTool = (toolId: string) => { - const groupObj = groupedTools[toolId]; + const groupObj = groupedTools?.[toolId]; const toolIdsToRemove = [toolId]; if (groupObj?.tools && groupObj.tools.length > 0) { toolIdsToRemove.push(...groupObj.tools.map((sub) => sub.tool_id)); @@ -113,8 +112,8 @@ function ToolSelectDialog({ onError: (error: unknown) => handleInstallError(error as TError), onSuccess: () => { const remainingToolIds = - getValues(toolsFormKey)?.filter((toolId) => !toolIdsToRemove.includes(toolId)) || []; - setValue(toolsFormKey, remainingToolIds); + getValues('tools')?.filter((toolId) => !toolIdsToRemove.includes(toolId)) || []; + setValue('tools', remainingToolIds); }, }, ); @@ -268,7 +267,7 @@ function ToolSelectDialog({ onAddTool(tool.tool_id)} onRemoveTool={() => onRemoveTool(tool.tool_id)} /> diff --git a/client/src/components/ui/index.ts b/client/src/components/ui/index.ts index 31443c900f..4f989484de 100644 --- a/client/src/components/ui/index.ts +++ b/client/src/components/ui/index.ts @@ -1,3 +1,4 @@ +export * from './Accordion'; export * from './AnimatedTabs'; export * from './AlertDialog'; export * from './Breadcrumb'; diff --git a/client/src/hooks/Files/useFileHandling.ts b/client/src/hooks/Files/useFileHandling.ts index 7f74a02733..cd1c5834c8 100644 --- a/client/src/hooks/Files/useFileHandling.ts +++ b/client/src/hooks/Files/useFileHandling.ts @@ -1,33 +1,34 @@ -import { useQueryClient } from '@tanstack/react-query'; -import type { TEndpointsConfig, TError } from 'librechat-data-provider'; -import { - defaultAssistantsVersion, - fileConfig as defaultFileConfig, - EModelEndpoint, - isAgentsEndpoint, - isAssistantsEndpoint, - mergeFileConfig, - QueryKeys, -} from 'librechat-data-provider'; -import debounce from 'lodash/debounce'; import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'; import { v4 } from 'uuid'; +import { useQueryClient } from '@tanstack/react-query'; +import { + QueryKeys, + EModelEndpoint, + mergeFileConfig, + isAgentsEndpoint, + isAssistantsEndpoint, + defaultAssistantsVersion, + fileConfig as defaultFileConfig, +} from 'librechat-data-provider'; +import debounce from 'lodash/debounce'; +import type { EndpointFileConfig, TEndpointsConfig, TError } from 'librechat-data-provider'; import type { ExtendedFile, FileSetter } from '~/common'; import { useGetFileConfig, useUploadFileMutation } from '~/data-provider'; import useLocalize, { TranslationKeys } from '~/hooks/useLocalize'; -import { useChatContext } from '~/Providers/ChatContext'; +import { useDelayedUploadToast } from './useDelayedUploadToast'; +import { processFileForUpload } from '~/utils/heicConverter'; import { useToastContext } from '~/Providers/ToastContext'; +import { useChatContext } from '~/Providers/ChatContext'; import { logger, validateFiles } from '~/utils'; import useClientResize from './useClientResize'; -import { processFileForUpload } from '~/utils/heicConverter'; -import { useDelayedUploadToast } from './useDelayedUploadToast'; import useUpdateFiles from './useUpdateFiles'; type UseFileHandling = { - overrideEndpoint?: EModelEndpoint; fileSetter?: FileSetter; fileFilter?: (file: File) => boolean; additionalMetadata?: Record; + overrideEndpoint?: EModelEndpoint; + overrideEndpointFileConfig?: EndpointFileConfig; }; const useFileHandling = (params?: UseFileHandling) => { @@ -246,8 +247,9 @@ const useFileHandling = (params?: UseFileHandling) => { fileList, setError, endpointFileConfig: - fileConfig?.endpoints[endpoint] ?? - fileConfig?.endpoints.default ?? + params?.overrideEndpointFileConfig ?? + fileConfig?.endpoints?.[endpoint] ?? + fileConfig?.endpoints?.default ?? defaultFileConfig.endpoints[endpoint] ?? defaultFileConfig.endpoints.default, }); diff --git a/client/src/hooks/Nav/useSideNavLinks.ts b/client/src/hooks/Nav/useSideNavLinks.ts index 13657c058e..728b856735 100644 --- a/client/src/hooks/Nav/useSideNavLinks.ts +++ b/client/src/hooks/Nav/useSideNavLinks.ts @@ -79,7 +79,7 @@ export default function useSideNavLinks({ title: 'com_sidepanel_assistant_builder', label: '', icon: Blocks, - id: 'assistants', + id: EModelEndpoint.assistants, Component: PanelSwitch, }); } @@ -94,7 +94,7 @@ export default function useSideNavLinks({ title: 'com_sidepanel_agent_builder', label: '', icon: Blocks, - id: 'agents', + id: EModelEndpoint.agents, Component: AgentPanelSwitch, }); } diff --git a/packages/api/src/index.ts b/packages/api/src/index.ts index 0341de44b0..0b00293240 100644 --- a/packages/api/src/index.ts +++ b/packages/api/src/index.ts @@ -11,6 +11,8 @@ export * from './oauth'; export * from './crypto'; /* Flow */ export * from './flow/manager'; +/* Middleware */ +export * from './middleware'; /* Agents */ export * from './agents'; /* Endpoints */ diff --git a/packages/api/src/middleware/access.ts b/packages/api/src/middleware/access.ts new file mode 100644 index 0000000000..d88ade1e56 --- /dev/null +++ b/packages/api/src/middleware/access.ts @@ -0,0 +1,141 @@ +import { logger } from '@librechat/data-schemas'; +import { + Permissions, + EndpointURLs, + EModelEndpoint, + PermissionTypes, + isAgentsEndpoint, +} from 'librechat-data-provider'; +import type { NextFunction, Request as ServerRequest, Response as ServerResponse } from 'express'; +import type { IRole, IUser } from '@librechat/data-schemas'; + +export function skipAgentCheck(req?: ServerRequest): boolean { + if (!req || !req?.body?.endpoint) { + return false; + } + + if (req.method !== 'POST') { + return false; + } + + if (!req.originalUrl?.includes(EndpointURLs[EModelEndpoint.agents])) { + return false; + } + return !isAgentsEndpoint(req.body.endpoint); +} + +/** + * Core function to check if a user has one or more required permissions + * @param user - The user object + * @param permissionType - The type of permission to check + * @param permissions - The list of specific permissions to check + * @param bodyProps - An optional object where keys are permissions and values are arrays of properties to check + * @param checkObject - The object to check properties against + * @param skipCheck - An optional function that takes the checkObject and returns true to skip permission checking + * @returns Whether the user has the required permissions + */ +export const checkAccess = async ({ + req, + user, + permissionType, + permissions, + getRoleByName, + bodyProps = {} as Record, + checkObject = {}, + skipCheck, +}: { + user: IUser; + req?: ServerRequest; + permissionType: PermissionTypes; + permissions: Permissions[]; + bodyProps?: Record; + checkObject?: object; + /** If skipCheck function is provided and returns true, skip permission checking */ + skipCheck?: (req?: ServerRequest) => boolean; + getRoleByName: (roleName: string, fieldsToSelect?: string | string[]) => Promise; +}): Promise => { + if (skipCheck && skipCheck(req)) { + return true; + } + + if (!user || !user.role) { + return false; + } + + const role = await getRoleByName(user.role); + if (role && role.permissions && role.permissions[permissionType]) { + const hasAnyPermission = permissions.some((permission) => { + if ( + role.permissions?.[permissionType as keyof typeof role.permissions]?.[ + permission as keyof (typeof role.permissions)[typeof permissionType] + ] + ) { + return true; + } + + if (bodyProps[permission] && checkObject) { + return bodyProps[permission].some((prop) => + Object.prototype.hasOwnProperty.call(checkObject, prop), + ); + } + + return false; + }); + + return hasAnyPermission; + } + + return false; +}; + +/** + * Middleware to check if a user has one or more required permissions, optionally based on `req.body` properties. + * @param permissionType - The type of permission to check. + * @param permissions - The list of specific permissions to check. + * @param bodyProps - An optional object where keys are permissions and values are arrays of `req.body` properties to check. + * @param skipCheck - An optional function that takes req.body and returns true to skip permission checking. + * @param getRoleByName - A function to get the role by name. + * @returns Express middleware function. + */ +export const generateCheckAccess = ({ + permissionType, + permissions, + bodyProps = {} as Record, + skipCheck, + getRoleByName, +}: { + permissionType: PermissionTypes; + permissions: Permissions[]; + bodyProps?: Record; + skipCheck?: (req?: ServerRequest) => boolean; + getRoleByName: (roleName: string, fieldsToSelect?: string | string[]) => Promise; +}): ((req: ServerRequest, res: ServerResponse, next: NextFunction) => Promise) => { + return async (req, res, next) => { + try { + const hasAccess = await checkAccess({ + req, + user: req.user as IUser, + permissionType, + permissions, + bodyProps, + checkObject: req.body, + skipCheck, + getRoleByName, + }); + + if (hasAccess) { + return next(); + } + + logger.warn( + `[${permissionType}] Forbidden: "${req.originalUrl}" - Insufficient permissions for User ${req.user?.id}: ${permissions.join(', ')}`, + ); + return res.status(403).json({ message: 'Forbidden: Insufficient permissions' }); + } catch (error) { + logger.error(error); + return res.status(500).json({ + message: `Server error: ${error instanceof Error ? error.message : 'Unknown error'}`, + }); + } + }; +}; diff --git a/packages/api/src/middleware/index.ts b/packages/api/src/middleware/index.ts new file mode 100644 index 0000000000..176e8bc9ac --- /dev/null +++ b/packages/api/src/middleware/index.ts @@ -0,0 +1 @@ +export * from './access'; diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index 004ed572ca..4d154f4958 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -949,11 +949,11 @@ export const initialModelsConfig: TModelsConfig = { [EModelEndpoint.bedrock]: defaultModels[EModelEndpoint.bedrock], }; -export const EndpointURLs: Record = { +export const EndpointURLs = { [EModelEndpoint.assistants]: '/api/assistants/v2/chat', [EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat', [EModelEndpoint.agents]: `/api/${EModelEndpoint.agents}/chat`, -}; +} as const; export const modularEndpoints = new Set([ EModelEndpoint.gptPlugins, diff --git a/packages/data-provider/src/types.ts b/packages/data-provider/src/types.ts index 469c378aba..877d6f31ae 100644 --- a/packages/data-provider/src/types.ts +++ b/packages/data-provider/src/types.ts @@ -134,7 +134,7 @@ export type EventSubmission = Omit & { initialRe export type TPluginAction = { pluginKey: string; action: 'install' | 'uninstall'; - auth?: Partial>; + auth?: Partial> | null; isEntityTool?: boolean; }; @@ -144,7 +144,7 @@ export type TUpdateUserPlugins = { isEntityTool?: boolean; pluginKey: string; action: string; - auth?: Partial>; + auth?: Partial> | null; }; // TODO `label` needs to be changed to the proper `TranslationKeys` From 452151e4087f6c1eef0b6ee4c08ca7d16c406fcc Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Thu, 26 Jun 2025 19:10:21 -0400 Subject: [PATCH 17/65] =?UTF-8?q?=F0=9F=90=9B=20fix:=20RAG=20API=20failing?= =?UTF-8?q?=20with=20`OPENID=5FREUSE=5FTOKENS`=20Enabled=20(#8090)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Implement Short-Lived JWT Token Generation for RAG API * fix: Update import paths * fix: Correct environment variable names for OpenID on behalf flow * fix: Remove unnecessary spaces in OpenID on behalf flow userinfo scope --------- Co-authored-by: Atef Bellaaj --- .env.example | 4 ++-- api/app/clients/prompts/createContextHandlers.js | 7 ++++--- api/app/clients/tools/util/fileSearch.js | 5 +++-- api/server/services/AuthService.js | 16 +++++++++++++++- api/server/services/Files/Local/crud.js | 5 +++-- api/server/services/Files/VectorDB/crud.js | 6 ++++-- api/strategies/openidStrategy.js | 4 ++-- 7 files changed, 33 insertions(+), 14 deletions(-) diff --git a/.env.example b/.env.example index 2b811c79de..a58a37efb6 100644 --- a/.env.example +++ b/.env.example @@ -453,8 +453,8 @@ OPENID_REUSE_TOKENS= OPENID_JWKS_URL_CACHE_ENABLED= OPENID_JWKS_URL_CACHE_TIME= # 600000 ms eq to 10 minutes leave empty to disable caching #Set to true to trigger token exchange flow to acquire access token for the userinfo endpoint. -OPENID_ON_BEHALF_FLOW_FOR_USERINFRO_REQUIRED= -OPENID_ON_BEHALF_FLOW_USERINFRO_SCOPE = "user.read" # example for Scope Needed for Microsoft Graph API +OPENID_ON_BEHALF_FLOW_FOR_USERINFO_REQUIRED= +OPENID_ON_BEHALF_FLOW_USERINFO_SCOPE="user.read" # example for Scope Needed for Microsoft Graph API # Set to true to use the OpenID Connect end session endpoint for logout OPENID_USE_END_SESSION_ENDPOINT= diff --git a/api/app/clients/prompts/createContextHandlers.js b/api/app/clients/prompts/createContextHandlers.js index 57847bea3e..b3ea9164e7 100644 --- a/api/app/clients/prompts/createContextHandlers.js +++ b/api/app/clients/prompts/createContextHandlers.js @@ -1,6 +1,7 @@ const axios = require('axios'); -const { isEnabled } = require('~/server/utils'); -const { logger } = require('~/config'); +const { isEnabled } = require('@librechat/api'); +const { logger } = require('@librechat/data-schemas'); +const { generateShortLivedToken } = require('~/server/services/AuthService'); const footer = `Use the context as your learned knowledge to better answer the user. @@ -18,7 +19,7 @@ function createContextHandlers(req, userMessageContent) { const queryPromises = []; const processedFiles = []; const processedIds = new Set(); - const jwtToken = req.headers.authorization.split(' ')[1]; + const jwtToken = generateShortLivedToken(req.user.id); const useFullContext = isEnabled(process.env.RAG_USE_FULL_CONTEXT); const query = async (file) => { diff --git a/api/app/clients/tools/util/fileSearch.js b/api/app/clients/tools/util/fileSearch.js index 19d3a79edb..050a0fd896 100644 --- a/api/app/clients/tools/util/fileSearch.js +++ b/api/app/clients/tools/util/fileSearch.js @@ -1,9 +1,10 @@ const { z } = require('zod'); const axios = require('axios'); const { tool } = require('@langchain/core/tools'); +const { logger } = require('@librechat/data-schemas'); const { Tools, EToolResources } = require('librechat-data-provider'); +const { generateShortLivedToken } = require('~/server/services/AuthService'); const { getFiles } = require('~/models/File'); -const { logger } = require('~/config'); /** * @@ -59,7 +60,7 @@ const createFileSearchTool = async ({ req, files, entity_id }) => { if (files.length === 0) { return 'No files to search. Instruct the user to add files for the search.'; } - const jwtToken = req.headers.authorization.split(' ')[1]; + const jwtToken = generateShortLivedToken(req.user.id); if (!jwtToken) { return 'There was an error authenticating the file search request.'; } diff --git a/api/server/services/AuthService.js b/api/server/services/AuthService.js index 6061277437..8c7cbf7d92 100644 --- a/api/server/services/AuthService.js +++ b/api/server/services/AuthService.js @@ -1,4 +1,5 @@ const bcrypt = require('bcryptjs'); +const jwt = require('jsonwebtoken'); const { webcrypto } = require('node:crypto'); const { isEnabled } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); @@ -499,6 +500,18 @@ const resendVerificationEmail = async (req) => { }; } }; +/** + * Generate a short-lived JWT token + * @param {String} userId - The ID of the user + * @param {String} [expireIn='5m'] - The expiration time for the token (default is 5 minutes) + * @returns {String} - The generated JWT token + */ +const generateShortLivedToken = (userId, expireIn = '5m') => { + return jwt.sign({ id: userId }, process.env.JWT_SECRET, { + expiresIn: expireIn, + algorithm: 'HS256', + }); +}; module.exports = { logoutUser, @@ -506,7 +519,8 @@ module.exports = { registerUser, setAuthTokens, resetPassword, + setOpenIDAuthTokens, requestPasswordReset, resendVerificationEmail, - setOpenIDAuthTokens, + generateShortLivedToken, }; diff --git a/api/server/services/Files/Local/crud.js b/api/server/services/Files/Local/crud.js index 7df528c5e1..455d4e0c4f 100644 --- a/api/server/services/Files/Local/crud.js +++ b/api/server/services/Files/Local/crud.js @@ -1,10 +1,11 @@ const fs = require('fs'); const path = require('path'); const axios = require('axios'); +const { logger } = require('@librechat/data-schemas'); const { EModelEndpoint } = require('librechat-data-provider'); +const { generateShortLivedToken } = require('~/server/services/AuthService'); const { getBufferMetadata } = require('~/server/utils'); const paths = require('~/config/paths'); -const { logger } = require('~/config'); /** * Saves a file to a specified output path with a new filename. @@ -206,7 +207,7 @@ const deleteLocalFile = async (req, file) => { const cleanFilepath = file.filepath.split('?')[0]; if (file.embedded && process.env.RAG_API_URL) { - const jwtToken = req.headers.authorization.split(' ')[1]; + const jwtToken = generateShortLivedToken(req.user.id); axios.delete(`${process.env.RAG_API_URL}/documents`, { headers: { Authorization: `Bearer ${jwtToken}`, diff --git a/api/server/services/Files/VectorDB/crud.js b/api/server/services/Files/VectorDB/crud.js index 1aeabc6c46..d7018f7669 100644 --- a/api/server/services/Files/VectorDB/crud.js +++ b/api/server/services/Files/VectorDB/crud.js @@ -4,6 +4,7 @@ const FormData = require('form-data'); const { logAxiosError } = require('@librechat/api'); const { logger } = require('@librechat/data-schemas'); const { FileSources } = require('librechat-data-provider'); +const { generateShortLivedToken } = require('~/server/services/AuthService'); /** * Deletes a file from the vector database. This function takes a file object, constructs the full path, and @@ -23,7 +24,8 @@ const deleteVectors = async (req, file) => { return; } try { - const jwtToken = req.headers.authorization.split(' ')[1]; + const jwtToken = generateShortLivedToken(req.user.id); + return await axios.delete(`${process.env.RAG_API_URL}/documents`, { headers: { Authorization: `Bearer ${jwtToken}`, @@ -70,7 +72,7 @@ async function uploadVectors({ req, file, file_id, entity_id }) { } try { - const jwtToken = req.headers.authorization.split(' ')[1]; + const jwtToken = generateShortLivedToken(req.user.id); const formData = new FormData(); formData.append('file_id', file_id); formData.append('file', fs.createReadStream(file.path)); diff --git a/api/strategies/openidStrategy.js b/api/strategies/openidStrategy.js index 2449872a9d..63a1aafd5a 100644 --- a/api/strategies/openidStrategy.js +++ b/api/strategies/openidStrategy.js @@ -118,7 +118,7 @@ class CustomOpenIDStrategy extends OpenIDStrategy { */ const exchangeAccessTokenIfNeeded = async (config, accessToken, sub, fromCache = false) => { const tokensCache = getLogStores(CacheKeys.OPENID_EXCHANGED_TOKENS); - const onBehalfFlowRequired = isEnabled(process.env.OPENID_ON_BEHALF_FLOW_FOR_USERINFRO_REQUIRED); + const onBehalfFlowRequired = isEnabled(process.env.OPENID_ON_BEHALF_FLOW_FOR_USERINFO_REQUIRED); if (onBehalfFlowRequired) { if (fromCache) { const cachedToken = await tokensCache.get(sub); @@ -130,7 +130,7 @@ const exchangeAccessTokenIfNeeded = async (config, accessToken, sub, fromCache = config, 'urn:ietf:params:oauth:grant-type:jwt-bearer', { - scope: process.env.OPENID_ON_BEHALF_FLOW_USERINFRO_SCOPE || 'user.read', + scope: process.env.OPENID_ON_BEHALF_FLOW_USERINFO_SCOPE || 'user.read', assertion: accessToken, requested_token_use: 'on_behalf_of', }, From d60ad61325b79ca4a7f1b8981ed2fef4cef13129 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 26 Jun 2025 19:12:46 -0400 Subject: [PATCH 18/65] =?UTF-8?q?=F0=9F=8C=8D=20i18n:=20Update=20translati?= =?UTF-8?q?on.json=20with=20latest=20translations=20(#8058)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- client/src/locales/en/translation.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index 4c6ab1df47..aeb7695b8b 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -205,10 +205,10 @@ "com_endpoint_google_custom_name_placeholder": "Set a custom name for Google", "com_endpoint_google_maxoutputtokens": "Maximum number of tokens that can be generated in the response. Specify a lower value for shorter responses and a higher value for longer responses. Note: models may stop before reaching this maximum.", "com_endpoint_google_temp": "Higher values = more random, while lower values = more focused and deterministic. We recommend altering this or Top P but not both.", - "com_endpoint_google_topk": "Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model's vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).", - "com_endpoint_google_topp": "Top-p changes how the model selects tokens for output. Tokens are selected from most K (see topK parameter) probable to least until the sum of their probabilities equals the top-p value.", "com_endpoint_google_thinking": "Enables or disables reasoning. This setting is only supported by certain models (2.5 series). For older models, this setting may have no effect.", "com_endpoint_google_thinking_budget": "Guides the number of thinking tokens the model uses. The actual amount may exceed or fall below this value depending on the prompt.\n\nThis setting is only supported by certain models (2.5 series). Gemini 2.5 Pro supports 128-32,768 tokens. Gemini 2.5 Flash supports 0-24,576 tokens. Gemini 2.5 Flash Lite supports 512-24,576 tokens.\n\nLeave blank or set to \"-1\" to let the model automatically decide when and how much to think. By default, Gemini 2.5 Flash Lite does not think.", + "com_endpoint_google_topk": "Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model's vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).", + "com_endpoint_google_topp": "Top-p changes how the model selects tokens for output. Tokens are selected from most K (see topK parameter) probable to least until the sum of their probabilities equals the top-p value.", "com_endpoint_instructions_assistants": "Override Instructions", "com_endpoint_instructions_assistants_placeholder": "Overrides the instructions of the assistant. This is useful for modifying the behavior on a per-run basis.", "com_endpoint_max_output_tokens": "Max Output Tokens", @@ -916,6 +916,7 @@ "com_ui_rename_prompt": "Rename Prompt", "com_ui_requires_auth": "Requires Authentication", "com_ui_reset_var": "Reset {{0}}", + "com_ui_reset_zoom": "Reset Zoom", "com_ui_result": "Result", "com_ui_revoke": "Revoke", "com_ui_revoke_info": "Revoke all user provided credentials", @@ -1057,7 +1058,6 @@ "com_ui_x_selected": "{{0}} selected", "com_ui_yes": "Yes", "com_ui_zoom": "Zoom", - "com_ui_reset_zoom": "Reset Zoom", "com_user_message": "You", "com_warning_resubmit_unsupported": "Resubmitting the AI message is not supported for this endpoint." -} +} \ No newline at end of file From dd67e463e4bf01eb2d433d1d4dde389b24171576 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Thu, 26 Jun 2025 19:19:04 -0400 Subject: [PATCH 19/65] =?UTF-8?q?=F0=9F=93=A6=20chore:=20bump=20`pbkdf2`?= =?UTF-8?q?=20to=20v3.1.3=20(#8091)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package-lock.json | 80 ++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 69 insertions(+), 11 deletions(-) diff --git a/package-lock.json b/package-lock.json index e3a47a4fd4..224bb65c51 100644 --- a/package-lock.json +++ b/package-lock.json @@ -38719,21 +38719,57 @@ "integrity": "sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg==" }, "node_modules/pbkdf2": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", - "integrity": "sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.3.tgz", + "integrity": "sha512-wfRLBZ0feWRhCIkoMB6ete7czJcnNnqRpcoWQBLqatqXXmelSRqfdDK4F3u9T2s2cXas/hQJcryI/4lAL+XTlA==", "dev": true, + "license": "MIT", "dependencies": { - "create-hash": "^1.1.2", - "create-hmac": "^1.1.4", - "ripemd160": "^2.0.1", - "safe-buffer": "^5.0.1", - "sha.js": "^2.4.8" + "create-hash": "~1.1.3", + "create-hmac": "^1.1.7", + "ripemd160": "=2.0.1", + "safe-buffer": "^5.2.1", + "sha.js": "^2.4.11", + "to-buffer": "^1.2.0" }, "engines": { "node": ">=0.12" } }, + "node_modules/pbkdf2/node_modules/create-hash": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.1.3.tgz", + "integrity": "sha512-snRpch/kwQhcdlnZKYanNF1m0RDlrCdSKQaH87w1FCFPVPNCQ/Il9QJKAX2jVBZddRdaHBMC+zXa9Gw9tmkNUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "sha.js": "^2.4.0" + } + }, + "node_modules/pbkdf2/node_modules/hash-base": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-2.0.2.tgz", + "integrity": "sha512-0TROgQ1/SxE6KmxWSvXHvRj90/Xo1JvZShofnYF+f6ZsGtR4eES7WfrQzPalmyagfKZCXpVnitiRebZulWsbiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1" + } + }, + "node_modules/pbkdf2/node_modules/ripemd160": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.1.tgz", + "integrity": "sha512-J7f4wutN8mdbV08MJnXibYpCOPHR+yzy+iQ/AsjMv2j8cLavQ8VGagDFUwwTAdF8FmRKVeNpbTTEwNHCW1g94w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hash-base": "^2.0.0", + "inherits": "^2.0.1" + } + }, "node_modules/peek-readable": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-5.0.0.tgz", @@ -39919,9 +39955,9 @@ } }, "node_modules/prettier-eslint/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -43961,6 +43997,28 @@ "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", "dev": true }, + "node_modules/to-buffer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.2.1.tgz", + "integrity": "sha512-tB82LpAIWjhLYbqjx3X4zEeHN6M8CiuOEy2JY8SEQVdYRe3CCHOFaqrBW1doLDrfpWhplcW7BL+bO3/6S3pcDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "isarray": "^2.0.5", + "safe-buffer": "^5.2.1", + "typed-array-buffer": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/to-buffer/node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true, + "license": "MIT" + }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", From 1060ae80401016ff8004e9991a010a316d545ad7 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Sat, 28 Jun 2025 12:33:43 -0400 Subject: [PATCH 20/65] =?UTF-8?q?=F0=9F=90=9B=20fix:=20Assistants=20Endpoi?= =?UTF-8?q?nt=20Handling=20in=20`createPayload`=20Function=20(#8123)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 chore: bump librechat-data-provider version to 0.7.89 * 🐛 fix: Assistants endpoint handling in createPayload function --- package-lock.json | 2 +- packages/data-provider/package.json | 2 +- packages/data-provider/src/createPayload.ts | 9 +++++---- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/package-lock.json b/package-lock.json index 224bb65c51..10210b9c25 100644 --- a/package-lock.json +++ b/package-lock.json @@ -46717,7 +46717,7 @@ }, "packages/data-provider": { "name": "librechat-data-provider", - "version": "0.7.88", + "version": "0.7.89", "license": "ISC", "dependencies": { "axios": "^1.8.2", diff --git a/packages/data-provider/package.json b/packages/data-provider/package.json index d46bfcf716..cb4e94eb77 100644 --- a/packages/data-provider/package.json +++ b/packages/data-provider/package.json @@ -1,6 +1,6 @@ { "name": "librechat-data-provider", - "version": "0.7.88", + "version": "0.7.89", "description": "data services for librechat apps", "main": "dist/index.js", "module": "dist/index.es.js", diff --git a/packages/data-provider/src/createPayload.ts b/packages/data-provider/src/createPayload.ts index d00ac45ffc..756eb9f49d 100644 --- a/packages/data-provider/src/createPayload.ts +++ b/packages/data-provider/src/createPayload.ts @@ -13,16 +13,17 @@ export default function createPayload(submission: t.TSubmission) { ephemeralAgent, } = submission; const { conversationId } = s.tConvoUpdateSchema.parse(conversation); - const { endpoint: _e } = endpointOption as { + const { endpoint: _e, endpointType } = endpointOption as { endpoint: s.EModelEndpoint; endpointType?: s.EModelEndpoint; }; const endpoint = _e as s.EModelEndpoint; let server = `${EndpointURLs[s.EModelEndpoint.agents]}/${endpoint}`; - - if (isEdited && s.isAssistantsEndpoint(endpoint)) { - server += '/modify'; + if (s.isAssistantsEndpoint(endpoint)) { + server = + EndpointURLs[(endpointType ?? endpoint) as 'assistants' | 'azureAssistants'] + + (isEdited ? '/modify' : ''); } const payload: t.TPayload = { From 3e1591d4042f2031cc2f08dc011e70751d857dc1 Mon Sep 17 00:00:00 2001 From: matt burnett Date: Sat, 28 Jun 2025 12:35:41 -0400 Subject: [PATCH 21/65] =?UTF-8?q?=F0=9F=A4=96=20fix:=20Remove=20`versions`?= =?UTF-8?q?=20and=20`=5F=5Fv`=20when=20Duplicating=20an=20Agent=20(#8115)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Revert "Add tests for agent duplication controller" This reverts commit 3e7beb1cc336bcfe1c57411e9c151f5e6aa927e4. --- .../controllers/agents/__tests__/v1.spec.js | 195 ++++++++++++++++++ api/server/controllers/agents/v1.js | 2 + 2 files changed, 197 insertions(+) create mode 100644 api/server/controllers/agents/__tests__/v1.spec.js diff --git a/api/server/controllers/agents/__tests__/v1.spec.js b/api/server/controllers/agents/__tests__/v1.spec.js new file mode 100644 index 0000000000..b097cd98ce --- /dev/null +++ b/api/server/controllers/agents/__tests__/v1.spec.js @@ -0,0 +1,195 @@ +const { duplicateAgent } = require('../v1'); +const { getAgent, createAgent } = require('~/models/Agent'); +const { getActions } = require('~/models/Action'); +const { nanoid } = require('nanoid'); + +jest.mock('~/models/Agent'); +jest.mock('~/models/Action'); +jest.mock('nanoid'); + +describe('duplicateAgent', () => { + let req, res; + + beforeEach(() => { + req = { + params: { id: 'agent_123' }, + user: { id: 'user_456' }, + }; + res = { + status: jest.fn().mockReturnThis(), + json: jest.fn(), + }; + jest.clearAllMocks(); + }); + + it('should duplicate an agent successfully', async () => { + const mockAgent = { + id: 'agent_123', + name: 'Test Agent', + description: 'Test Description', + instructions: 'Test Instructions', + provider: 'openai', + model: 'gpt-4', + tools: ['file_search'], + actions: [], + author: 'user_789', + versions: [{ name: 'Test Agent', version: 1 }], + __v: 0, + }; + + const mockNewAgent = { + id: 'agent_new_123', + name: 'Test Agent (1/2/23, 12:34)', + description: 'Test Description', + instructions: 'Test Instructions', + provider: 'openai', + model: 'gpt-4', + tools: ['file_search'], + actions: [], + author: 'user_456', + versions: [ + { + name: 'Test Agent (1/2/23, 12:34)', + description: 'Test Description', + instructions: 'Test Instructions', + provider: 'openai', + model: 'gpt-4', + tools: ['file_search'], + actions: [], + createdAt: new Date(), + updatedAt: new Date(), + }, + ], + }; + + getAgent.mockResolvedValue(mockAgent); + getActions.mockResolvedValue([]); + nanoid.mockReturnValue('new_123'); + createAgent.mockResolvedValue(mockNewAgent); + + await duplicateAgent(req, res); + + expect(getAgent).toHaveBeenCalledWith({ id: 'agent_123' }); + expect(getActions).toHaveBeenCalledWith({ agent_id: 'agent_123' }, true); + expect(createAgent).toHaveBeenCalledWith( + expect.objectContaining({ + id: 'agent_new_123', + author: 'user_456', + name: expect.stringContaining('Test Agent ('), + description: 'Test Description', + instructions: 'Test Instructions', + provider: 'openai', + model: 'gpt-4', + tools: ['file_search'], + actions: [], + }), + ); + + expect(createAgent).toHaveBeenCalledWith( + expect.not.objectContaining({ + versions: expect.anything(), + __v: expect.anything(), + }), + ); + + expect(res.status).toHaveBeenCalledWith(201); + expect(res.json).toHaveBeenCalledWith({ + agent: mockNewAgent, + actions: [], + }); + }); + + it('should ensure duplicated agent has clean versions array without nested fields', async () => { + const mockAgent = { + id: 'agent_123', + name: 'Test Agent', + description: 'Test Description', + versions: [ + { + name: 'Test Agent', + versions: [{ name: 'Nested' }], + __v: 1, + }, + ], + __v: 2, + }; + + const mockNewAgent = { + id: 'agent_new_123', + name: 'Test Agent (1/2/23, 12:34)', + description: 'Test Description', + versions: [ + { + name: 'Test Agent (1/2/23, 12:34)', + description: 'Test Description', + createdAt: new Date(), + updatedAt: new Date(), + }, + ], + }; + + getAgent.mockResolvedValue(mockAgent); + getActions.mockResolvedValue([]); + nanoid.mockReturnValue('new_123'); + createAgent.mockResolvedValue(mockNewAgent); + + await duplicateAgent(req, res); + + expect(mockNewAgent.versions).toHaveLength(1); + + const firstVersion = mockNewAgent.versions[0]; + expect(firstVersion).not.toHaveProperty('versions'); + expect(firstVersion).not.toHaveProperty('__v'); + + expect(mockNewAgent).not.toHaveProperty('__v'); + + expect(res.status).toHaveBeenCalledWith(201); + }); + + it('should return 404 if agent not found', async () => { + getAgent.mockResolvedValue(null); + + await duplicateAgent(req, res); + + expect(res.status).toHaveBeenCalledWith(404); + expect(res.json).toHaveBeenCalledWith({ + error: 'Agent not found', + status: 'error', + }); + }); + + it('should handle tool_resources.ocr correctly', async () => { + const mockAgent = { + id: 'agent_123', + name: 'Test Agent', + tool_resources: { + ocr: { enabled: true, config: 'test' }, + other: { should: 'not be copied' }, + }, + }; + + getAgent.mockResolvedValue(mockAgent); + getActions.mockResolvedValue([]); + nanoid.mockReturnValue('new_123'); + createAgent.mockResolvedValue({ id: 'agent_new_123' }); + + await duplicateAgent(req, res); + + expect(createAgent).toHaveBeenCalledWith( + expect.objectContaining({ + tool_resources: { + ocr: { enabled: true, config: 'test' }, + }, + }), + ); + }); + + it('should handle errors gracefully', async () => { + getAgent.mockRejectedValue(new Error('Database error')); + + await duplicateAgent(req, res); + + expect(res.status).toHaveBeenCalledWith(500); + expect(res.json).toHaveBeenCalledWith({ error: 'Database error' }); + }); +}); diff --git a/api/server/controllers/agents/v1.js b/api/server/controllers/agents/v1.js index 18bd7190f0..764a2e05d4 100644 --- a/api/server/controllers/agents/v1.js +++ b/api/server/controllers/agents/v1.js @@ -242,6 +242,8 @@ const duplicateAgentHandler = async (req, res) => { createdAt: _createdAt, updatedAt: _updatedAt, tool_resources: _tool_resources = {}, + versions: _versions, + __v: _v, ...cloneData } = agent; cloneData.name = `${agent.name} (${new Date().toLocaleString('en-US', { From 3f3cfefc52cba7bb3fda7f85a7103e540eed40c8 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Sat, 28 Jun 2025 13:26:03 -0400 Subject: [PATCH 22/65] =?UTF-8?q?=F0=9F=97=92=EF=B8=8F=20feat:=20Add=20Goo?= =?UTF-8?q?gle=20Vertex=20AI=20Mistral=20OCR=20Strategy=20(#8125)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Implemented new uploadGoogleVertexMistralOCR function for processing OCR using Google Vertex AI. * Added vertexMistralOCRStrategy to handle file uploads. * Updated FileSources and OCRStrategy enums to include vertexai_mistral_ocr. * Introduced helper functions for JWT creation and Google service account configuration loading. --- api/server/services/Files/strategies.js | 28 ++- packages/api/src/files/mistral/crud.ts | 218 ++++++++++++++++++++++ packages/data-provider/src/config.ts | 1 + packages/data-provider/src/types/files.ts | 1 + 4 files changed, 247 insertions(+), 1 deletion(-) diff --git a/api/server/services/Files/strategies.js b/api/server/services/Files/strategies.js index 41dcd5518a..4f8067142b 100644 --- a/api/server/services/Files/strategies.js +++ b/api/server/services/Files/strategies.js @@ -1,5 +1,9 @@ const { FileSources } = require('librechat-data-provider'); -const { uploadMistralOCR, uploadAzureMistralOCR } = require('@librechat/api'); +const { + uploadMistralOCR, + uploadAzureMistralOCR, + uploadGoogleVertexMistralOCR, +} = require('@librechat/api'); const { getFirebaseURL, prepareImageURL, @@ -222,6 +226,26 @@ const azureMistralOCRStrategy = () => ({ handleFileUpload: uploadAzureMistralOCR, }); +const vertexMistralOCRStrategy = () => ({ + /** @type {typeof saveFileFromURL | null} */ + saveURL: null, + /** @type {typeof getLocalFileURL | null} */ + getFileURL: null, + /** @type {typeof saveLocalBuffer | null} */ + saveBuffer: null, + /** @type {typeof processLocalAvatar | null} */ + processAvatar: null, + /** @type {typeof uploadLocalImage | null} */ + handleImageUpload: null, + /** @type {typeof prepareImagesLocal | null} */ + prepareImagePayload: null, + /** @type {typeof deleteLocalFile | null} */ + deleteFile: null, + /** @type {typeof getLocalFileStream | null} */ + getDownloadStream: null, + handleFileUpload: uploadGoogleVertexMistralOCR, +}); + // Strategy Selector const getStrategyFunctions = (fileSource) => { if (fileSource === FileSources.firebase) { @@ -244,6 +268,8 @@ const getStrategyFunctions = (fileSource) => { return mistralOCRStrategy(); } else if (fileSource === FileSources.azure_mistral_ocr) { return azureMistralOCRStrategy(); + } else if (fileSource === FileSources.vertexai_mistral_ocr) { + return vertexMistralOCRStrategy(); } else { throw new Error('Invalid file source'); } diff --git a/packages/api/src/files/mistral/crud.ts b/packages/api/src/files/mistral/crud.ts index d89be8f14d..f3ad74b731 100644 --- a/packages/api/src/files/mistral/crud.ts +++ b/packages/api/src/files/mistral/crud.ts @@ -32,6 +32,13 @@ interface AuthConfig { baseURL: string; } +/** Helper type for Google service account */ +interface GoogleServiceAccount { + client_email?: string; + private_key?: string; + project_id?: string; +} + /** Helper type for OCR request context */ interface OCRContext { req: Pick & { @@ -424,3 +431,214 @@ export const uploadAzureMistralOCR = async ( throw createOCRError(error, 'Error uploading document to Azure Mistral OCR API:'); } }; + +/** + * Loads Google service account configuration + */ +async function loadGoogleAuthConfig(): Promise<{ + serviceAccount: GoogleServiceAccount; + accessToken: string; +}> { + /** Path from current file to project root auth.json */ + const authJsonPath = path.join(__dirname, '..', '..', '..', 'api', 'data', 'auth.json'); + + let serviceKey: GoogleServiceAccount; + try { + const authJsonContent = fs.readFileSync(authJsonPath, 'utf8'); + serviceKey = JSON.parse(authJsonContent) as GoogleServiceAccount; + } catch { + throw new Error(`Google service account not found at ${authJsonPath}`); + } + + if (!serviceKey.client_email || !serviceKey.private_key || !serviceKey.project_id) { + throw new Error('Invalid Google service account configuration'); + } + + const jwt = await createJWT(serviceKey); + const accessToken = await exchangeJWTForAccessToken(jwt); + + return { + serviceAccount: serviceKey, + accessToken, + }; +} + +/** + * Creates a JWT token manually + */ +async function createJWT(serviceKey: GoogleServiceAccount): Promise { + const crypto = await import('crypto'); + + const header = { + alg: 'RS256', + typ: 'JWT', + }; + + const now = Math.floor(Date.now() / 1000); + const payload = { + iss: serviceKey.client_email, + scope: 'https://www.googleapis.com/auth/cloud-platform', + aud: 'https://oauth2.googleapis.com/token', + exp: now + 3600, + iat: now, + }; + + const encodedHeader = Buffer.from(JSON.stringify(header)).toString('base64url'); + const encodedPayload = Buffer.from(JSON.stringify(payload)).toString('base64url'); + + const signatureInput = `${encodedHeader}.${encodedPayload}`; + + const sign = crypto.createSign('RSA-SHA256'); + sign.update(signatureInput); + sign.end(); + + const signature = sign.sign(serviceKey.private_key!, 'base64url'); + + return `${signatureInput}.${signature}`; +} + +/** + * Exchanges JWT for access token + */ +async function exchangeJWTForAccessToken(jwt: string): Promise { + const response = await axios.post( + 'https://oauth2.googleapis.com/token', + new URLSearchParams({ + grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', + assertion: jwt, + }), + { + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + }, + ); + + if (!response.data?.access_token) { + throw new Error('No access token in response'); + } + + return response.data.access_token; +} + +/** + * Performs OCR using Google Vertex AI + */ +async function performGoogleVertexOCR({ + url, + accessToken, + projectId, + model, + documentType = 'document_url', +}: { + url: string; + accessToken: string; + projectId: string; + model: string; + documentType?: 'document_url' | 'image_url'; +}): Promise { + const location = process.env.GOOGLE_LOC || 'us-central1'; + const modelId = model || 'mistral-ocr-2505'; + + let baseURL: string; + if (location === 'global') { + baseURL = `https://aiplatform.googleapis.com/v1/projects/${projectId}/locations/global/publishers/mistralai/models/${modelId}:rawPredict`; + } else { + baseURL = `https://${location}-aiplatform.googleapis.com/v1/projects/${projectId}/locations/${location}/publishers/mistralai/models/${modelId}:rawPredict`; + } + + const documentKey = documentType === 'image_url' ? 'image_url' : 'document_url'; + + const requestBody = { + model: modelId, + document: { + type: documentType, + [documentKey]: url, + }, + include_image_base64: true, + }; + + logger.debug('Sending request to Google Vertex AI:', { + url: baseURL, + body: { + ...requestBody, + document: { ...requestBody.document, [documentKey]: 'base64_data_hidden' }, + }, + }); + + return axios + .post(baseURL, requestBody, { + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${accessToken}`, + Accept: 'application/json', + }, + }) + .then((res) => { + logger.debug('Google Vertex AI response received'); + return res.data; + }) + .catch((error) => { + if (error.response?.data) { + logger.error('Vertex AI error response: ' + JSON.stringify(error.response.data, null, 2)); + } + throw new Error( + logAxiosError({ + error: error as AxiosError, + message: 'Error calling Google Vertex AI Mistral OCR', + }), + ); + }); +} + +/** + * Use Google Vertex AI Mistral OCR API to process the OCR result. + * + * @param params - The params object. + * @param params.req - The request object from Express. It should have a `user` property with an `id` + * representing the user + * @param params.file - The file object, which is part of the request. The file object should + * have a `mimetype` property that tells us the file type + * @param params.loadAuthValues - Function to load authentication values + * @returns - The result object containing the processed `text` and `images` (not currently used), + * along with the `filename` and `bytes` properties. + */ +export const uploadGoogleVertexMistralOCR = async ( + context: OCRContext, +): Promise => { + try { + const { serviceAccount, accessToken } = await loadGoogleAuthConfig(); + const model = getModelConfig(context.req.app.locals?.ocr); + + const buffer = fs.readFileSync(context.file.path); + const base64 = buffer.toString('base64'); + const base64Prefix = `data:${context.file.mimetype || 'application/pdf'};base64,`; + + const documentType = getDocumentType(context.file); + const ocrResult = await performGoogleVertexOCR({ + url: `${base64Prefix}${base64}`, + accessToken, + projectId: serviceAccount.project_id!, + model, + documentType, + }); + + if (!ocrResult || !ocrResult.pages || ocrResult.pages.length === 0) { + throw new Error( + 'No OCR result returned from service, may be down or the file is not supported.', + ); + } + + const { text, images } = processOCRResult(ocrResult); + + return { + filename: context.file.originalname, + bytes: text.length * 4, + filepath: FileSources.vertexai_mistral_ocr as string, + text, + images, + }; + } catch (error) { + throw createOCRError(error, 'Error uploading document to Google Vertex AI Mistral OCR:'); + } +}; diff --git a/packages/data-provider/src/config.ts b/packages/data-provider/src/config.ts index 4d154f4958..cf69603bf1 100644 --- a/packages/data-provider/src/config.ts +++ b/packages/data-provider/src/config.ts @@ -615,6 +615,7 @@ export enum OCRStrategy { MISTRAL_OCR = 'mistral_ocr', CUSTOM_OCR = 'custom_ocr', AZURE_MISTRAL_OCR = 'azure_mistral_ocr', + VERTEXAI_MISTRAL_OCR = 'vertexai_mistral_ocr', } export enum SearchCategories { diff --git a/packages/data-provider/src/types/files.ts b/packages/data-provider/src/types/files.ts index 95b74a4216..fd60278053 100644 --- a/packages/data-provider/src/types/files.ts +++ b/packages/data-provider/src/types/files.ts @@ -11,6 +11,7 @@ export enum FileSources { execute_code = 'execute_code', mistral_ocr = 'mistral_ocr', azure_mistral_ocr = 'azure_mistral_ocr', + vertexai_mistral_ocr = 'vertexai_mistral_ocr', text = 'text', } From 20100e120bfd4d2571a8a759104036110a014692 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Sun, 29 Jun 2025 17:09:37 -0400 Subject: [PATCH 23/65] =?UTF-8?q?=F0=9F=94=91=20feat:=20Set=20Google=20Ser?= =?UTF-8?q?vice=20Key=20File=20Path=20(#8130)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../services/Config/loadAsyncEndpoints.js | 32 +++++++++++++------ .../services/Endpoints/google/initialize.js | 16 +++++++++- packages/api/src/files/mistral/crud.ts | 13 +++++--- 3 files changed, 47 insertions(+), 14 deletions(-) diff --git a/api/server/services/Config/loadAsyncEndpoints.js b/api/server/services/Config/loadAsyncEndpoints.js index 0282146cd1..edded906d8 100644 --- a/api/server/services/Config/loadAsyncEndpoints.js +++ b/api/server/services/Config/loadAsyncEndpoints.js @@ -1,3 +1,5 @@ +const fs = require('fs'); +const path = require('path'); const { EModelEndpoint } = require('librechat-data-provider'); const { isUserProvided } = require('~/server/utils'); const { config } = require('./EndpointService'); @@ -11,9 +13,21 @@ const { openAIApiKey, azureOpenAIApiKey, useAzurePlugins, userProvidedOpenAI, go async function loadAsyncEndpoints(req) { let i = 0; let serviceKey, googleUserProvides; + const serviceKeyPath = + process.env.GOOGLE_SERVICE_KEY_FILE_PATH || + path.join(__dirname, '../../..', 'data', 'auth.json'); + try { - serviceKey = require('~/data/auth.json'); - } catch (e) { + if (process.env.GOOGLE_SERVICE_KEY_FILE_PATH) { + const absolutePath = path.isAbsolute(serviceKeyPath) + ? serviceKeyPath + : path.resolve(serviceKeyPath); + const fileContent = fs.readFileSync(absolutePath, 'utf8'); + serviceKey = JSON.parse(fileContent); + } else { + serviceKey = require('~/data/auth.json'); + } + } catch { if (i === 0) { i++; } @@ -32,14 +46,14 @@ async function loadAsyncEndpoints(req) { const gptPlugins = useAzure || openAIApiKey || azureOpenAIApiKey ? { - availableAgents: ['classic', 'functions'], - userProvide: useAzure ? false : userProvidedOpenAI, - userProvideURL: useAzure - ? false - : config[EModelEndpoint.openAI]?.userProvideURL || + availableAgents: ['classic', 'functions'], + userProvide: useAzure ? false : userProvidedOpenAI, + userProvideURL: useAzure + ? false + : config[EModelEndpoint.openAI]?.userProvideURL || config[EModelEndpoint.azureOpenAI]?.userProvideURL, - azure: useAzurePlugins || useAzure, - } + azure: useAzurePlugins || useAzure, + } : false; return { google, gptPlugins }; diff --git a/api/server/services/Endpoints/google/initialize.js b/api/server/services/Endpoints/google/initialize.js index 60e874d5b8..169d625e3f 100644 --- a/api/server/services/Endpoints/google/initialize.js +++ b/api/server/services/Endpoints/google/initialize.js @@ -1,3 +1,5 @@ +const fs = require('fs'); +const path = require('path'); const { getGoogleConfig, isEnabled } = require('@librechat/api'); const { EModelEndpoint, AuthKeys } = require('librechat-data-provider'); const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService'); @@ -15,8 +17,20 @@ const initializeClient = async ({ req, res, endpointOption, overrideModel, optio } let serviceKey = {}; + try { - serviceKey = require('~/data/auth.json'); + if (process.env.GOOGLE_SERVICE_KEY_FILE_PATH) { + const serviceKeyPath = + process.env.GOOGLE_SERVICE_KEY_FILE_PATH || + path.join(__dirname, '../../../../..', 'data', 'auth.json'); + const absolutePath = path.isAbsolute(serviceKeyPath) + ? serviceKeyPath + : path.resolve(serviceKeyPath); + const fileContent = fs.readFileSync(absolutePath, 'utf8'); + serviceKey = JSON.parse(fileContent); + } else { + serviceKey = require('~/data/auth.json'); + } } catch (_e) { // Do nothing } diff --git a/packages/api/src/files/mistral/crud.ts b/packages/api/src/files/mistral/crud.ts index f3ad74b731..5b0d10659a 100644 --- a/packages/api/src/files/mistral/crud.ts +++ b/packages/api/src/files/mistral/crud.ts @@ -439,15 +439,20 @@ async function loadGoogleAuthConfig(): Promise<{ serviceAccount: GoogleServiceAccount; accessToken: string; }> { - /** Path from current file to project root auth.json */ - const authJsonPath = path.join(__dirname, '..', '..', '..', 'api', 'data', 'auth.json'); + /** Path from environment variable or default location */ + const serviceKeyPath = + process.env.GOOGLE_SERVICE_KEY_FILE_PATH || + path.join(__dirname, '..', '..', '..', 'api', 'data', 'auth.json'); + const absolutePath = path.isAbsolute(serviceKeyPath) + ? serviceKeyPath + : path.resolve(serviceKeyPath); let serviceKey: GoogleServiceAccount; try { - const authJsonContent = fs.readFileSync(authJsonPath, 'utf8'); + const authJsonContent = fs.readFileSync(absolutePath, 'utf8'); serviceKey = JSON.parse(authJsonContent) as GoogleServiceAccount; } catch { - throw new Error(`Google service account not found at ${authJsonPath}`); + throw new Error(`Google service account not found at ${absolutePath}`); } if (!serviceKey.client_email || !serviceKey.private_key || !serviceKey.project_id) { From f869d772f7c65002a3b1e7aa5cac9f06d9a8f3ca Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Mon, 30 Jun 2025 18:34:47 -0400 Subject: [PATCH 24/65] =?UTF-8?q?=F0=9F=AA=90=20feat:=20Initial=20OpenAI?= =?UTF-8?q?=20Responses=20API=20Support=20(#8149)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update @librechat/agents to v2.4.47 * WIP: temporary auto-toggle responses api for o1/o3-pro * feat: Enable Responses API for OpenAI models - Updated the OpenAI client initialization to check for the useResponsesApi parameter in model options. - Added translations for enabling the Responses API in the UI. - Introduced useResponsesApi parameter in data provider settings and schemas. - Updated relevant schemas to include useResponsesApi for conversation and preset configurations. * refactor: Remove useResponsesApi check from OpenAI client initialization and update translation for Responses API - Removed the check for useResponsesApi in the OpenAI client initialization. - Updated the translation for enabling the Responses API to clarify its functionality. * chore: update @librechat/agents dependency to version 2.4.48 * chore: update @librechat/agents dependency to version 2.4.49 * chore: linting * chore: linting * feat: Enhance DynamicSlider and validation for enumMappings - Added support for enumMappings in DynamicSlider to display values correctly based on enum settings. - Implemented validation for enumMappings in the generate function to ensure all options have corresponding mappings. - Added tests for handling empty string options and incomplete enumMappings in the generate.spec.ts file. * feat: Enhance DynamicSlider localization support - Added localization handling for mapped values in DynamicSlider when using enumMappings. - Updated the logic to check if the mapped value is a localization key and return the localized string if applicable. - Adjusted dependencies in useCallback hooks to include localize for proper functionality. * feat: Add reasoning summary and effort options to OpenAI configuration and UI * feat: Add enumMappings for ImageDetail options in parameter settings * style: Improve styling for DynamicSlider component labels and inputs * chore: Update reasoning effort description and parameter order for OpenAI params --------- Co-authored-by: Dustin Healy --- api/package.json | 2 +- .../SidePanel/Parameters/DynamicSlider.tsx | 85 +++++++++++--- client/src/locales/en/translation.json | 13 ++- package-lock.json | 10 +- packages/api/package.json | 2 +- packages/api/src/agents/run.ts | 6 + packages/api/src/endpoints/openai/llm.ts | 108 +++++++++++------- packages/api/src/types/run.ts | 5 +- packages/data-provider/specs/generate.spec.ts | 77 ++++++++++++- packages/data-provider/src/generate.ts | 28 ++++- .../data-provider/src/parameterSettings.ts | 64 ++++++++++- packages/data-provider/src/schemas.ts | 26 ++++- packages/data-schemas/src/schema/defaults.ts | 8 +- packages/data-schemas/src/schema/preset.ts | 2 + packages/data-schemas/src/types/convo.ts | 2 + 15 files changed, 355 insertions(+), 83 deletions(-) diff --git a/api/package.json b/api/package.json index 571db53aa6..fd002d58ce 100644 --- a/api/package.json +++ b/api/package.json @@ -48,7 +48,7 @@ "@langchain/google-genai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13", "@langchain/textsplitters": "^0.1.0", - "@librechat/agents": "^2.4.46", + "@librechat/agents": "^2.4.49", "@librechat/api": "*", "@librechat/data-schemas": "*", "@node-saml/passport-saml": "^5.0.0", diff --git a/client/src/components/SidePanel/Parameters/DynamicSlider.tsx b/client/src/components/SidePanel/Parameters/DynamicSlider.tsx index a9142468e9..d1b9bd9678 100644 --- a/client/src/components/SidePanel/Parameters/DynamicSlider.tsx +++ b/client/src/components/SidePanel/Parameters/DynamicSlider.tsx @@ -18,6 +18,7 @@ function DynamicSlider({ setOption, optionType, options, + enumMappings, readonly = false, showDefault = false, includeInput = true, @@ -60,24 +61,68 @@ function DynamicSlider({ const enumToNumeric = useMemo(() => { if (isEnum && options) { - return options.reduce((acc, mapping, index) => { - acc[mapping] = index; - return acc; - }, {} as Record); + return options.reduce( + (acc, mapping, index) => { + acc[mapping] = index; + return acc; + }, + {} as Record, + ); } return {}; }, [isEnum, options]); const valueToEnumOption = useMemo(() => { if (isEnum && options) { - return options.reduce((acc, option, index) => { - acc[index] = option; - return acc; - }, {} as Record); + return options.reduce( + (acc, option, index) => { + acc[index] = option; + return acc; + }, + {} as Record, + ); } return {}; }, [isEnum, options]); + const getDisplayValue = useCallback( + (value: string | number | undefined | null): string => { + if (isEnum && enumMappings && value != null) { + const stringValue = String(value); + // Check if the value exists in enumMappings + if (stringValue in enumMappings) { + const mappedValue = String(enumMappings[stringValue]); + // Check if the mapped value is a localization key + if (mappedValue.startsWith('com_')) { + return localize(mappedValue as TranslationKeys) ?? mappedValue; + } + return mappedValue; + } + } + // Always return a string for Input component compatibility + if (value != null) { + return String(value); + } + return String(defaultValue ?? ''); + }, + [isEnum, enumMappings, defaultValue, localize], + ); + + const getDefaultDisplayValue = useCallback((): string => { + if (defaultValue != null && enumMappings) { + const stringDefault = String(defaultValue); + if (stringDefault in enumMappings) { + const mappedValue = String(enumMappings[stringDefault]); + // Check if the mapped value is a localization key + if (mappedValue.startsWith('com_')) { + return localize(mappedValue as TranslationKeys) ?? mappedValue; + } + return mappedValue; + } + } + return String(defaultValue ?? ''); + }, [defaultValue, enumMappings, localize]); + const handleValueChange = useCallback( (value: number) => { if (isEnum) { @@ -115,12 +160,12 @@ function DynamicSlider({
@@ -132,13 +177,13 @@ function DynamicSlider({ onChange={(value) => setInputValue(Number(value))} max={range ? range.max : (options?.length ?? 0) - 1} min={range ? range.min : 0} - step={range ? range.step ?? 1 : 1} + step={range ? (range.step ?? 1) : 1} controls={false} className={cn( defaultTextProps, cn( optionText, - 'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200', + 'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 py-1 text-xs group-hover/temp:border-gray-200', ), )} /> @@ -146,13 +191,13 @@ function DynamicSlider({ ({})} className={cn( defaultTextProps, cn( optionText, - 'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200', + 'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 py-1 text-xs group-hover/temp:border-gray-200', ), )} /> @@ -164,19 +209,23 @@ function DynamicSlider({ value={[ isEnum ? enumToNumeric[(selectedValue as number) ?? ''] - : (inputValue as number) ?? (defaultValue as number), + : ((inputValue as number) ?? (defaultValue as number)), ]} onValueChange={(value) => handleValueChange(value[0])} onDoubleClick={() => setInputValue(defaultValue as string | number)} max={max} min={range ? range.min : 0} - step={range ? range.step ?? 1 : 1} + step={range ? (range.step ?? 1) : 1} className="flex h-4 w-full" /> {description && ( )} diff --git a/client/src/locales/en/translation.json b/client/src/locales/en/translation.json index aeb7695b8b..ad133e7a35 100644 --- a/client/src/locales/en/translation.json +++ b/client/src/locales/en/translation.json @@ -225,12 +225,14 @@ "com_endpoint_openai_max_tokens": "Optional 'max_tokens' field, representing the maximum number of tokens that can be generated in the chat completion. The total length of input tokens and generated tokens is limited by the models context length. You may experience errors if this number exceeds the max context tokens.", "com_endpoint_openai_pres": "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.", "com_endpoint_openai_prompt_prefix_placeholder": "Set custom instructions to include in System Message. Default: none", - "com_endpoint_openai_reasoning_effort": "o1 and o3 models only: constrains effort on reasoning for reasoning models. Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response.", + "com_endpoint_openai_reasoning_effort": "Reasoning models only: constrains effort on reasoning for reasoning models. Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response.", + "com_endpoint_openai_reasoning_summary": "Responses API only: A summary of the reasoning performed by the model. This can be useful for debugging and understanding the model's reasoning process. Set to none,auto, concise, or detailed.", "com_endpoint_openai_resend": "Resend all previously attached images. Note: this can significantly increase token cost and you may experience errors with many image attachments.", "com_endpoint_openai_resend_files": "Resend all previously attached files. Note: this will increase token cost and you may experience errors with many attachments.", "com_endpoint_openai_stop": "Up to 4 sequences where the API will stop generating further tokens.", "com_endpoint_openai_temp": "Higher values = more random, while lower values = more focused and deterministic. We recommend altering this or Top P but not both.", "com_endpoint_openai_topp": "An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We recommend altering this or temperature but not both.", + "com_endpoint_openai_use_responses_api": "Use the Responses API instead of Chat Completions, which includes extended features from OpenAI. Required for o1-pro, o3-pro, and to enable reasoning summaries.", "com_endpoint_output": "Output", "com_endpoint_plug_image_detail": "Image Detail", "com_endpoint_plug_resend_files": "Resend Files", @@ -261,6 +263,7 @@ "com_endpoint_prompt_prefix_assistants_placeholder": "Set additional instructions or context on top of the Assistant's main instructions. Ignored if empty.", "com_endpoint_prompt_prefix_placeholder": "Set custom instructions or context. Ignored if empty.", "com_endpoint_reasoning_effort": "Reasoning Effort", + "com_endpoint_reasoning_summary": "Reasoning Summary", "com_endpoint_save_as_preset": "Save As Preset", "com_endpoint_search": "Search endpoint by name", "com_endpoint_search_endpoint_models": "Search {{0}} models...", @@ -276,6 +279,7 @@ "com_endpoint_top_k": "Top K", "com_endpoint_top_p": "Top P", "com_endpoint_use_active_assistant": "Use Active Assistant", + "com_endpoint_use_responses_api": "Use Responses API", "com_error_expired_user_key": "Provided key for {{0}} expired at {{1}}. Please provide a new key and try again.", "com_error_files_dupe": "Duplicate file detected.", "com_error_files_empty": "Empty files are not allowed.", @@ -820,6 +824,11 @@ "com_ui_loading": "Loading...", "com_ui_locked": "Locked", "com_ui_logo": "{{0}} Logo", + "com_ui_low": "Low", + "com_ui_concise": "Concise", + "com_ui_detailed": "Detailed", + "com_ui_high": "High", + "com_ui_medium": "Medium", "com_ui_manage": "Manage", "com_ui_max_tags": "Maximum number allowed is {{0}}, using latest values.", "com_ui_mcp_dialog_desc": "Please enter the necessary information below.", @@ -1060,4 +1069,4 @@ "com_ui_zoom": "Zoom", "com_user_message": "You", "com_warning_resubmit_unsupported": "Resubmitting the AI message is not supported for this endpoint." -} \ No newline at end of file +} diff --git a/package-lock.json b/package-lock.json index 10210b9c25..8d04047ef4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -64,7 +64,7 @@ "@langchain/google-genai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13", "@langchain/textsplitters": "^0.1.0", - "@librechat/agents": "^2.4.46", + "@librechat/agents": "^2.4.49", "@librechat/api": "*", "@librechat/data-schemas": "*", "@node-saml/passport-saml": "^5.0.0", @@ -19436,9 +19436,9 @@ } }, "node_modules/@librechat/agents": { - "version": "2.4.46", - "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.46.tgz", - "integrity": "sha512-zR27U19/WGF3HN64oBbiaFgjjWHaF7BjYzRFWzQKEkk+iEzCe59IpuEZUizQ54YcY02nhhh6S3MNUjhAJwMYVA==", + "version": "2.4.49", + "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.49.tgz", + "integrity": "sha512-Bnp/PZsg1VgnmGS80tW4ssKpcqUZ7xysKesV/8gGaUBF1VDBiYBh0gC6ugfJhltNOv93rEVSucjPlTAuHimNCg==", "license": "MIT", "dependencies": { "@langchain/anthropic": "^0.3.23", @@ -46624,7 +46624,7 @@ "typescript": "^5.0.4" }, "peerDependencies": { - "@librechat/agents": "^2.4.46", + "@librechat/agents": "^2.4.49", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.12.3", "axios": "^1.8.2", diff --git a/packages/api/package.json b/packages/api/package.json index ed2b70965d..a4c41ec537 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -69,7 +69,7 @@ "registry": "https://registry.npmjs.org/" }, "peerDependencies": { - "@librechat/agents": "^2.4.46", + "@librechat/agents": "^2.4.49", "@librechat/data-schemas": "*", "@modelcontextprotocol/sdk": "^1.12.3", "axios": "^1.8.2", diff --git a/packages/api/src/agents/run.ts b/packages/api/src/agents/run.ts index e12d2cf2b6..9f07a1fb9c 100644 --- a/packages/api/src/agents/run.ts +++ b/packages/api/src/agents/run.ts @@ -1,6 +1,7 @@ import { Run, Providers } from '@librechat/agents'; import { providerEndpointMap, KnownEndpoints } from 'librechat-data-provider'; import type { + OpenAIClientOptions, StandardGraphConfig, EventHandler, GenericTool, @@ -76,6 +77,11 @@ export async function createRun({ (agent.endpoint && agent.endpoint.toLowerCase().includes(KnownEndpoints.openrouter)) ) { reasoningKey = 'reasoning'; + } else if ( + (llmConfig as OpenAIClientOptions).useResponsesApi === true && + (provider === Providers.OPENAI || provider === Providers.AZURE) + ) { + reasoningKey = 'reasoning'; } const graphConfig: StandardGraphConfig = { diff --git a/packages/api/src/endpoints/openai/llm.ts b/packages/api/src/endpoints/openai/llm.ts index ddf61016e8..0c3135c554 100644 --- a/packages/api/src/endpoints/openai/llm.ts +++ b/packages/api/src/endpoints/openai/llm.ts @@ -1,9 +1,23 @@ import { ProxyAgent } from 'undici'; -import { KnownEndpoints } from 'librechat-data-provider'; +import { KnownEndpoints, removeNullishValues } from 'librechat-data-provider'; +import type { OpenAI } from 'openai'; import type * as t from '~/types'; import { sanitizeModelName, constructAzureURL } from '~/utils/azure'; import { isEnabled } from '~/utils/common'; +function hasReasoningParams({ + reasoning_effort, + reasoning_summary, +}: { + reasoning_effort?: string | null; + reasoning_summary?: string | null; +}): boolean { + return ( + (reasoning_effort != null && reasoning_effort !== '') || + (reasoning_summary != null && reasoning_summary !== '') + ); +} + /** * Generates configuration options for creating a language model (LLM) instance. * @param apiKey - The API key for authentication. @@ -17,7 +31,7 @@ export function getOpenAIConfig( endpoint?: string | null, ): t.LLMConfigResult { const { - modelOptions = {}, + modelOptions: _modelOptions = {}, reverseProxyUrl, defaultQuery, headers, @@ -27,7 +41,7 @@ export function getOpenAIConfig( addParams, dropParams, } = options; - + const { reasoning_effort, reasoning_summary, ...modelOptions } = _modelOptions; const llmConfig: Partial & Partial = Object.assign( { streaming, @@ -40,39 +54,6 @@ export function getOpenAIConfig( Object.assign(llmConfig, addParams); } - // Note: OpenAI Web Search models do not support any known parameters besides `max_tokens` - if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) { - const searchExcludeParams = [ - 'frequency_penalty', - 'presence_penalty', - 'temperature', - 'top_p', - 'top_k', - 'stop', - 'logit_bias', - 'seed', - 'response_format', - 'n', - 'logprobs', - 'user', - ]; - - const updatedDropParams = dropParams || []; - const combinedDropParams = [...new Set([...updatedDropParams, ...searchExcludeParams])]; - - combinedDropParams.forEach((param) => { - if (param in llmConfig) { - delete llmConfig[param as keyof t.ClientOptions]; - } - }); - } else if (dropParams && Array.isArray(dropParams)) { - dropParams.forEach((param) => { - if (param in llmConfig) { - delete llmConfig[param as keyof t.ClientOptions]; - } - }); - } - let useOpenRouter = false; const configOptions: t.OpenAIConfiguration = {}; @@ -139,11 +120,19 @@ export function getOpenAIConfig( configOptions.organization = process.env.OPENAI_ORGANIZATION; } - if (useOpenRouter && llmConfig.reasoning_effort != null) { - llmConfig.reasoning = { - effort: llmConfig.reasoning_effort, - }; - delete llmConfig.reasoning_effort; + if ( + hasReasoningParams({ reasoning_effort, reasoning_summary }) && + (llmConfig.useResponsesApi === true || useOpenRouter) + ) { + llmConfig.reasoning = removeNullishValues( + { + effort: reasoning_effort, + summary: reasoning_summary, + }, + true, + ) as OpenAI.Reasoning; + } else if (hasReasoningParams({ reasoning_effort })) { + llmConfig.reasoning_effort = reasoning_effort; } if (llmConfig.max_tokens != null) { @@ -151,6 +140,43 @@ export function getOpenAIConfig( delete llmConfig.max_tokens; } + /** + * Note: OpenAI Web Search models do not support any known parameters besides `max_tokens` + */ + if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) { + const searchExcludeParams = [ + 'frequency_penalty', + 'presence_penalty', + 'reasoning', + 'reasoning_effort', + 'temperature', + 'top_p', + 'top_k', + 'stop', + 'logit_bias', + 'seed', + 'response_format', + 'n', + 'logprobs', + 'user', + ]; + + const updatedDropParams = dropParams || []; + const combinedDropParams = [...new Set([...updatedDropParams, ...searchExcludeParams])]; + + combinedDropParams.forEach((param) => { + if (param in llmConfig) { + delete llmConfig[param as keyof t.ClientOptions]; + } + }); + } else if (dropParams && Array.isArray(dropParams)) { + dropParams.forEach((param) => { + if (param in llmConfig) { + delete llmConfig[param as keyof t.ClientOptions]; + } + }); + } + return { llmConfig, configOptions, diff --git a/packages/api/src/types/run.ts b/packages/api/src/types/run.ts index 72c61a587f..81dce780d4 100644 --- a/packages/api/src/types/run.ts +++ b/packages/api/src/types/run.ts @@ -1,4 +1,4 @@ -import type { Providers } from '@librechat/agents'; +import type { Providers, ClientOptions } from '@librechat/agents'; import type { AgentModelParameters } from 'librechat-data-provider'; import type { OpenAIConfiguration } from './openai'; @@ -8,4 +8,5 @@ export type RunLLMConfig = { streamUsage: boolean; usage?: boolean; configuration?: OpenAIConfiguration; -} & AgentModelParameters; +} & AgentModelParameters & + ClientOptions; diff --git a/packages/data-provider/specs/generate.spec.ts b/packages/data-provider/specs/generate.spec.ts index 64ca86a036..2c3cda0f17 100644 --- a/packages/data-provider/specs/generate.spec.ts +++ b/packages/data-provider/specs/generate.spec.ts @@ -1,4 +1,3 @@ -/* eslint-disable jest/no-conditional-expect */ import { ZodError, z } from 'zod'; import { generateDynamicSchema, validateSettingDefinitions, OptionTypes } from '../src/generate'; import type { SettingsConfiguration } from '../src/generate'; @@ -97,6 +96,37 @@ describe('generateDynamicSchema', () => { expect(result['data']).toEqual({ testEnum: 'option2' }); }); + it('should generate a schema for enum settings with empty string option', () => { + const settings: SettingsConfiguration = [ + { + key: 'testEnumWithEmpty', + description: 'A test enum setting with empty string', + type: 'enum', + default: '', + options: ['', 'option1', 'option2'], + enumMappings: { + '': 'None', + option1: 'First Option', + option2: 'Second Option', + }, + component: 'slider', + columnSpan: 2, + label: 'Test Enum with Empty String', + }, + ]; + + const schema = generateDynamicSchema(settings); + const result = schema.safeParse({ testEnumWithEmpty: '' }); + + expect(result.success).toBeTruthy(); + expect(result['data']).toEqual({ testEnumWithEmpty: '' }); + + // Test with non-empty option + const result2 = schema.safeParse({ testEnumWithEmpty: 'option1' }); + expect(result2.success).toBeTruthy(); + expect(result2['data']).toEqual({ testEnumWithEmpty: 'option1' }); + }); + it('should fail for incorrect enum value', () => { const settings: SettingsConfiguration = [ { @@ -481,6 +511,47 @@ describe('validateSettingDefinitions', () => { expect(() => validateSettingDefinitions(settingsExceedingMaxTags)).toThrow(ZodError); }); + + // Test for incomplete enumMappings + test('should throw error for incomplete enumMappings', () => { + const settingsWithIncompleteEnumMappings: SettingsConfiguration = [ + { + key: 'displayMode', + type: 'enum', + component: 'dropdown', + options: ['light', 'dark', 'auto'], + enumMappings: { + light: 'Light Mode', + dark: 'Dark Mode', + // Missing mapping for 'auto' + }, + optionType: OptionTypes.Custom, + }, + ]; + + expect(() => validateSettingDefinitions(settingsWithIncompleteEnumMappings)).toThrow(ZodError); + }); + + // Test for complete enumMappings including empty string + test('should not throw error for complete enumMappings including empty string', () => { + const settingsWithCompleteEnumMappings: SettingsConfiguration = [ + { + key: 'selectionMode', + type: 'enum', + component: 'slider', + options: ['', 'single', 'multiple'], + enumMappings: { + '': 'None', + single: 'Single Selection', + multiple: 'Multiple Selection', + }, + default: '', + optionType: OptionTypes.Custom, + }, + ]; + + expect(() => validateSettingDefinitions(settingsWithCompleteEnumMappings)).not.toThrow(); + }); }); const settingsConfiguration: SettingsConfiguration = [ @@ -515,7 +586,7 @@ const settingsConfiguration: SettingsConfiguration = [ { key: 'presence_penalty', description: - 'Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model\'s likelihood to talk about new topics.', + "Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.", type: 'number', default: 0, range: { @@ -529,7 +600,7 @@ const settingsConfiguration: SettingsConfiguration = [ { key: 'frequency_penalty', description: - 'Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model\'s likelihood to repeat the same line verbatim.', + "Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.", type: 'number', default: 0, range: { diff --git a/packages/data-provider/src/generate.ts b/packages/data-provider/src/generate.ts index bf0b2c1acd..21f63a34d9 100644 --- a/packages/data-provider/src/generate.ts +++ b/packages/data-provider/src/generate.ts @@ -467,7 +467,11 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi } /* Default value checks */ - if (setting.type === SettingTypes.Number && isNaN(setting.default as number) && setting.default != null) { + if ( + setting.type === SettingTypes.Number && + isNaN(setting.default as number) && + setting.default != null + ) { errors.push({ code: ZodIssueCode.custom, message: `Invalid default value for setting ${setting.key}. Must be a number.`, @@ -475,7 +479,11 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi }); } - if (setting.type === SettingTypes.Boolean && typeof setting.default !== 'boolean' && setting.default != null) { + if ( + setting.type === SettingTypes.Boolean && + typeof setting.default !== 'boolean' && + setting.default != null + ) { errors.push({ code: ZodIssueCode.custom, message: `Invalid default value for setting ${setting.key}. Must be a boolean.`, @@ -485,7 +493,8 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi if ( (setting.type === SettingTypes.String || setting.type === SettingTypes.Enum) && - typeof setting.default !== 'string' && setting.default != null + typeof setting.default !== 'string' && + setting.default != null ) { errors.push({ code: ZodIssueCode.custom, @@ -520,6 +529,19 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi path: ['default'], }); } + + // Validate enumMappings + if (setting.enumMappings && setting.type === SettingTypes.Enum && setting.options) { + for (const option of setting.options) { + if (!(option in setting.enumMappings)) { + errors.push({ + code: ZodIssueCode.custom, + message: `Missing enumMapping for option "${option}" in setting ${setting.key}.`, + path: ['enumMappings'], + }); + } + } + } } if (errors.length > 0) { diff --git a/packages/data-provider/src/parameterSettings.ts b/packages/data-provider/src/parameterSettings.ts index 8b1dd222a4..91de6a83af 100644 --- a/packages/data-provider/src/parameterSettings.ts +++ b/packages/data-provider/src/parameterSettings.ts @@ -4,6 +4,7 @@ import { openAISettings, googleSettings, ReasoningEffort, + ReasoningSummary, BedrockProviders, anthropicSettings, } from './types'; @@ -71,6 +72,11 @@ const baseDefinitions: Record = { default: ImageDetail.auto, component: 'slider', options: [ImageDetail.low, ImageDetail.auto, ImageDetail.high], + enumMappings: { + [ImageDetail.low]: 'com_ui_low', + [ImageDetail.auto]: 'com_ui_auto', + [ImageDetail.high]: 'com_ui_high', + }, optionType: 'conversation', columnSpan: 2, }, @@ -211,9 +217,57 @@ const openAIParams: Record = { description: 'com_endpoint_openai_reasoning_effort', descriptionCode: true, type: 'enum', - default: ReasoningEffort.medium, + default: ReasoningEffort.none, component: 'slider', - options: [ReasoningEffort.low, ReasoningEffort.medium, ReasoningEffort.high], + options: [ + ReasoningEffort.none, + ReasoningEffort.low, + ReasoningEffort.medium, + ReasoningEffort.high, + ], + enumMappings: { + [ReasoningEffort.none]: 'com_ui_none', + [ReasoningEffort.low]: 'com_ui_low', + [ReasoningEffort.medium]: 'com_ui_medium', + [ReasoningEffort.high]: 'com_ui_high', + }, + optionType: 'model', + columnSpan: 4, + }, + useResponsesApi: { + key: 'useResponsesApi', + label: 'com_endpoint_use_responses_api', + labelCode: true, + description: 'com_endpoint_openai_use_responses_api', + descriptionCode: true, + type: 'boolean', + default: false, + component: 'switch', + optionType: 'model', + showDefault: false, + columnSpan: 2, + }, + reasoning_summary: { + key: 'reasoning_summary', + label: 'com_endpoint_reasoning_summary', + labelCode: true, + description: 'com_endpoint_openai_reasoning_summary', + descriptionCode: true, + type: 'enum', + default: ReasoningSummary.none, + component: 'slider', + options: [ + ReasoningSummary.none, + ReasoningSummary.auto, + ReasoningSummary.concise, + ReasoningSummary.detailed, + ], + enumMappings: { + [ReasoningSummary.none]: 'com_ui_none', + [ReasoningSummary.auto]: 'com_ui_auto', + [ReasoningSummary.concise]: 'com_ui_concise', + [ReasoningSummary.detailed]: 'com_ui_detailed', + }, optionType: 'model', columnSpan: 4, }, @@ -526,6 +580,8 @@ const openAI: SettingsConfiguration = [ librechat.resendFiles, baseDefinitions.imageDetail, openAIParams.reasoning_effort, + openAIParams.useResponsesApi, + openAIParams.reasoning_summary, ]; const openAICol1: SettingsConfiguration = [ @@ -542,9 +598,11 @@ const openAICol2: SettingsConfiguration = [ openAIParams.frequency_penalty, openAIParams.presence_penalty, baseDefinitions.stop, - openAIParams.reasoning_effort, librechat.resendFiles, baseDefinitions.imageDetail, + openAIParams.reasoning_effort, + openAIParams.useResponsesApi, + openAIParams.reasoning_summary, ]; const anthropicConfig: SettingsConfiguration = [ diff --git a/packages/data-provider/src/schemas.ts b/packages/data-provider/src/schemas.ts index 463150d36f..340e60d34a 100644 --- a/packages/data-provider/src/schemas.ts +++ b/packages/data-provider/src/schemas.ts @@ -112,11 +112,19 @@ export enum ImageDetail { } export enum ReasoningEffort { + none = '', low = 'low', medium = 'medium', high = 'high', } +export enum ReasoningSummary { + none = '', + auto = 'auto', + concise = 'concise', + detailed = 'detailed', +} + export const imageDetailNumeric = { [ImageDetail.low]: 0, [ImageDetail.auto]: 1, @@ -131,6 +139,7 @@ export const imageDetailValue = { export const eImageDetailSchema = z.nativeEnum(ImageDetail); export const eReasoningEffortSchema = z.nativeEnum(ReasoningEffort); +export const eReasoningSummarySchema = z.nativeEnum(ReasoningSummary); export const defaultAssistantFormValues = { assistant: '', @@ -619,8 +628,11 @@ export const tConversationSchema = z.object({ file_ids: z.array(z.string()).optional(), /* vision */ imageDetail: eImageDetailSchema.optional(), - /* OpenAI: o1 only */ - reasoning_effort: eReasoningEffortSchema.optional(), + /* OpenAI: Reasoning models only */ + reasoning_effort: eReasoningEffortSchema.optional().nullable(), + reasoning_summary: eReasoningSummarySchema.optional().nullable(), + /* OpenAI: use Responses API */ + useResponsesApi: z.boolean().optional(), /* assistant */ assistant_id: z.string().optional(), /* agents */ @@ -717,6 +729,12 @@ export const tQueryParamsSchema = tConversationSchema top_p: true, /** @endpoints openAI, custom, azureOpenAI */ max_tokens: true, + /** @endpoints openAI, custom, azureOpenAI */ + reasoning_effort: true, + /** @endpoints openAI, custom, azureOpenAI */ + reasoning_summary: true, + /** @endpoints openAI, custom, azureOpenAI */ + useResponsesApi: true, /** @endpoints google, anthropic, bedrock */ topP: true, /** @endpoints google, anthropic */ @@ -1044,10 +1062,12 @@ export const openAIBaseSchema = tConversationSchema.pick({ maxContextTokens: true, max_tokens: true, reasoning_effort: true, + reasoning_summary: true, + useResponsesApi: true, }); export const openAISchema = openAIBaseSchema - .transform((obj: Partial) => removeNullishValues(obj)) + .transform((obj: Partial) => removeNullishValues(obj, true)) .catch(() => ({})); export const compactGoogleSchema = googleBaseSchema diff --git a/packages/data-schemas/src/schema/defaults.ts b/packages/data-schemas/src/schema/defaults.ts index d6e8ed851c..d42771d09c 100644 --- a/packages/data-schemas/src/schema/defaults.ts +++ b/packages/data-schemas/src/schema/defaults.ts @@ -131,8 +131,14 @@ export const conversationPreset = { max_tokens: { type: Number, }, - /** omni models only */ + useResponsesApi: { + type: Boolean, + }, + /** Reasoning models only */ reasoning_effort: { type: String, }, + reasoning_summary: { + type: String, + }, }; diff --git a/packages/data-schemas/src/schema/preset.ts b/packages/data-schemas/src/schema/preset.ts index 95f1e276d9..1b128413f3 100644 --- a/packages/data-schemas/src/schema/preset.ts +++ b/packages/data-schemas/src/schema/preset.ts @@ -46,6 +46,8 @@ export interface IPreset extends Document { maxContextTokens?: number; max_tokens?: number; reasoning_effort?: string; + reasoning_summary?: string; + useResponsesApi?: boolean; // end of additional fields agentOptions?: unknown; } diff --git a/packages/data-schemas/src/types/convo.ts b/packages/data-schemas/src/types/convo.ts index f088db8c92..b97f179b53 100644 --- a/packages/data-schemas/src/types/convo.ts +++ b/packages/data-schemas/src/types/convo.ts @@ -45,6 +45,8 @@ export interface IConversation extends Document { maxContextTokens?: number; max_tokens?: number; reasoning_effort?: string; + reasoning_summary?: string; + useResponsesApi?: boolean; // Additional fields files?: string[]; expiredAt?: Date; From 313539d1ed89bb24917e5e894fa2e5f79c9f7750 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Mon, 30 Jun 2025 18:51:50 -0400 Subject: [PATCH 25/65] =?UTF-8?q?=F0=9F=94=91=20refactor:=20Prioritize=20`?= =?UTF-8?q?GOOGLE=5FKEY`=20When=20GCP=20Service=20Key=20File=20Provided=20?= =?UTF-8?q?(#8150)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/api/src/endpoints/google/llm.ts | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/api/src/endpoints/google/llm.ts b/packages/api/src/endpoints/google/llm.ts index 0721acce29..c2c26e3251 100644 --- a/packages/api/src/endpoints/google/llm.ts +++ b/packages/api/src/endpoints/google/llm.ts @@ -98,8 +98,8 @@ export function getGoogleConfig( const serviceKey = typeof serviceKeyRaw === 'string' ? JSON.parse(serviceKeyRaw) : (serviceKeyRaw ?? {}); - const project_id = serviceKey?.project_id ?? null; const apiKey = creds[AuthKeys.GOOGLE_API_KEY] ?? null; + const project_id = !apiKey ? (serviceKey?.project_id ?? null) : null; const reverseProxyUrl = options.reverseProxyUrl; const authHeader = options.authHeader; @@ -128,7 +128,7 @@ export function getGoogleConfig( } // If we have a GCP project => Vertex AI - if (project_id && provider === Providers.VERTEXAI) { + if (provider === Providers.VERTEXAI) { (llmConfig as VertexAIClientOptions).authOptions = { credentials: { ...serviceKey }, projectId: project_id, @@ -136,6 +136,10 @@ export function getGoogleConfig( (llmConfig as VertexAIClientOptions).location = process.env.GOOGLE_LOC || 'us-central1'; } else if (apiKey && provider === Providers.GOOGLE) { llmConfig.apiKey = apiKey; + } else { + throw new Error( + `Invalid credentials provided. Please provide either a valid API key or service account credentials for Google Cloud.`, + ); } const shouldEnableThinking = From 55d63caaf4eb869876340c86fe8188cf5fdfad49 Mon Sep 17 00:00:00 2001 From: Samuel Path Date: Tue, 1 Jul 2025 15:20:33 +0200 Subject: [PATCH 26/65] =?UTF-8?q?=F0=9F=92=BB=20ci:=20Make=20Unit=20Tests?= =?UTF-8?q?=20Pass=20on=20MacOS=20(#8165)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/server/routes/files/multer.spec.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/api/server/routes/files/multer.spec.js b/api/server/routes/files/multer.spec.js index 0324262a71..2fb9147aef 100644 --- a/api/server/routes/files/multer.spec.js +++ b/api/server/routes/files/multer.spec.js @@ -477,7 +477,9 @@ describe('Multer Configuration', () => { done(new Error('Expected mkdirSync to throw an error but no error was thrown')); } catch (error) { // This is the expected behavior - mkdirSync throws synchronously for invalid paths - expect(error.code).toBe('EACCES'); + // On Linux, this typically returns EACCES (permission denied) + // On macOS/Darwin, this returns ENOENT (no such file or directory) + expect(['EACCES', 'ENOENT']).toContain(error.code); done(); } }); From a648ad3d13371a98ec5cd16378d5d0cccdf187cd Mon Sep 17 00:00:00 2001 From: Samuel Path Date: Tue, 1 Jul 2025 16:05:00 +0200 Subject: [PATCH 27/65] =?UTF-8?q?=E2=9C=85=20fix:=20Agent=20MCP=20Tools=20?= =?UTF-8?q?Checkbox=20Inactive=20When=20Hidden=20(#8166)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- client/src/components/SidePanel/Agents/AgentTool.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/src/components/SidePanel/Agents/AgentTool.tsx b/client/src/components/SidePanel/Agents/AgentTool.tsx index 6ea613dc78..5703cede0a 100644 --- a/client/src/components/SidePanel/Agents/AgentTool.tsx +++ b/client/src/components/SidePanel/Agents/AgentTool.tsx @@ -226,7 +226,7 @@ export default function AgentTool({ }} className={cn( 'h-4 w-4 rounded border border-gray-300 transition-all duration-200 hover:border-gray-400 dark:border-gray-600 dark:hover:border-gray-500', - isExpanded ? 'opacity-100' : 'opacity-0', + isExpanded ? 'visible' : 'pointer-events-none invisible', )} onClick={(e) => e.stopPropagation()} onKeyDown={(e) => { From 434289fe929f3e7412980b74baa32d365776255f Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Tue, 1 Jul 2025 15:43:10 -0400 Subject: [PATCH 28/65] =?UTF-8?q?=F0=9F=94=80=20feat:=20Save=20&=20Submit?= =?UTF-8?q?=20Message=20Content=20Parts=20(#8171)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🐛 fix: Enhance provider validation and error handling in getProviderConfig function * WIP: edit text part * refactor: Allow updating of both TEXT and THINK content types in message updates * WIP: first pass, save & submit * chore: remove legacy generation user message field * feat: merge edited content * fix: update placeholder and description for bedrock setting * fix: remove unsupported warning message for AI resubmission --- api/app/clients/BaseClient.js | 85 +++++++++++++++++-- api/server/controllers/agents/request.js | 11 ++- api/server/routes/messages.js | 7 +- api/server/services/Endpoints/index.js | 17 ++++ client/src/common/types.ts | 5 ++ .../Chat/Messages/Content/ContentParts.tsx | 13 ++- .../Messages/Content/Parts/EditTextPart.tsx | 39 ++++----- client/src/hooks/Chat/useChatFunctions.ts | 68 +++++++-------- client/src/hooks/SSE/useStepHandler.ts | 59 +++++++++++-- client/src/locales/en/translation.json | 3 +- packages/data-provider/src/createPayload.ts | 2 + .../data-provider/src/parameterSettings.ts | 4 +- packages/data-provider/src/schemas.ts | 1 + packages/data-provider/src/types.ts | 10 +++ 14 files changed, 240 insertions(+), 84 deletions(-) diff --git a/api/app/clients/BaseClient.js b/api/app/clients/BaseClient.js index c8f4228f10..0598f0da21 100644 --- a/api/app/clients/BaseClient.js +++ b/api/app/clients/BaseClient.js @@ -13,7 +13,6 @@ const { const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require('~/models'); const { checkBalance } = require('~/models/balanceMethods'); const { truncateToolCallOutputs } = require('./prompts'); -const { addSpaceIfNeeded } = require('~/server/utils'); const { getFiles } = require('~/models/File'); const TextStream = require('./TextStream'); const { logger } = require('~/config'); @@ -572,7 +571,7 @@ class BaseClient { }); } - const { generation = '' } = opts; + const { editedContent } = opts; // It's not necessary to push to currentMessages // depending on subclass implementation of handling messages @@ -587,11 +586,21 @@ class BaseClient { isCreatedByUser: false, model: this.modelOptions?.model ?? this.model, sender: this.sender, - text: generation, }; this.currentMessages.push(userMessage, latestMessage); - } else { - latestMessage.text = generation; + } else if (editedContent != null) { + // Handle editedContent for content parts + if (editedContent && latestMessage.content && Array.isArray(latestMessage.content)) { + const { index, text, type } = editedContent; + if (index >= 0 && index < latestMessage.content.length) { + const contentPart = latestMessage.content[index]; + if (type === ContentTypes.THINK && contentPart.type === ContentTypes.THINK) { + contentPart[ContentTypes.THINK] = text; + } else if (type === ContentTypes.TEXT && contentPart.type === ContentTypes.TEXT) { + contentPart[ContentTypes.TEXT] = text; + } + } + } } this.continued = true; } else { @@ -672,16 +681,32 @@ class BaseClient { }; if (typeof completion === 'string') { - responseMessage.text = addSpaceIfNeeded(generation) + completion; + responseMessage.text = completion; } else if ( Array.isArray(completion) && (this.clientName === EModelEndpoint.agents || isParamEndpoint(this.options.endpoint, this.options.endpointType)) ) { responseMessage.text = ''; - responseMessage.content = completion; + + if (!opts.editedContent || this.currentMessages.length === 0) { + responseMessage.content = completion; + } else { + const latestMessage = this.currentMessages[this.currentMessages.length - 1]; + if (!latestMessage?.content) { + responseMessage.content = completion; + } else { + const existingContent = [...latestMessage.content]; + const { type: editedType } = opts.editedContent; + responseMessage.content = this.mergeEditedContent( + existingContent, + completion, + editedType, + ); + } + } } else if (Array.isArray(completion)) { - responseMessage.text = addSpaceIfNeeded(generation) + completion.join(''); + responseMessage.text = completion.join(''); } if ( @@ -1095,6 +1120,50 @@ class BaseClient { return numTokens; } + /** + * Merges completion content with existing content when editing TEXT or THINK types + * @param {Array} existingContent - The existing content array + * @param {Array} newCompletion - The new completion content + * @param {string} editedType - The type of content being edited + * @returns {Array} The merged content array + */ + mergeEditedContent(existingContent, newCompletion, editedType) { + if (!newCompletion.length) { + return existingContent.concat(newCompletion); + } + + if (editedType !== ContentTypes.TEXT && editedType !== ContentTypes.THINK) { + return existingContent.concat(newCompletion); + } + + const lastIndex = existingContent.length - 1; + const lastExisting = existingContent[lastIndex]; + const firstNew = newCompletion[0]; + + if (lastExisting?.type !== firstNew?.type || firstNew?.type !== editedType) { + return existingContent.concat(newCompletion); + } + + const mergedContent = [...existingContent]; + if (editedType === ContentTypes.TEXT) { + mergedContent[lastIndex] = { + ...mergedContent[lastIndex], + [ContentTypes.TEXT]: + (mergedContent[lastIndex][ContentTypes.TEXT] || '') + (firstNew[ContentTypes.TEXT] || ''), + }; + } else { + mergedContent[lastIndex] = { + ...mergedContent[lastIndex], + [ContentTypes.THINK]: + (mergedContent[lastIndex][ContentTypes.THINK] || '') + + (firstNew[ContentTypes.THINK] || ''), + }; + } + + // Add remaining completion items + return mergedContent.concat(newCompletion.slice(1)); + } + async sendPayload(payload, opts = {}) { if (opts && typeof opts === 'object') { this.setOptions(opts); diff --git a/api/server/controllers/agents/request.js b/api/server/controllers/agents/request.js index 5d55991e19..2c8e424b5d 100644 --- a/api/server/controllers/agents/request.js +++ b/api/server/controllers/agents/request.js @@ -14,8 +14,11 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => { text, endpointOption, conversationId, + isContinued = false, + editedContent = null, parentMessageId = null, overrideParentMessageId = null, + responseMessageId: editedResponseMessageId = null, } = req.body; let sender; @@ -67,7 +70,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => { handler(); } } catch (e) { - // Ignore cleanup errors + logger.error('[AgentController] Error in cleanup handler', e); } } } @@ -155,7 +158,7 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => { try { res.removeListener('close', closeHandler); } catch (e) { - // Ignore + logger.error('[AgentController] Error removing close listener', e); } }); @@ -163,10 +166,14 @@ const AgentController = async (req, res, next, initializeClient, addTitle) => { user: userId, onStart, getReqData, + isContinued, + editedContent, conversationId, parentMessageId, abortController, overrideParentMessageId, + isEdited: !!editedContent, + responseMessageId: editedResponseMessageId, progressOptions: { res, }, diff --git a/api/server/routes/messages.js b/api/server/routes/messages.js index 356dd25097..0a277a1bd6 100644 --- a/api/server/routes/messages.js +++ b/api/server/routes/messages.js @@ -235,12 +235,13 @@ router.put('/:conversationId/:messageId', validateMessageReq, async (req, res) = return res.status(400).json({ error: 'Content part not found' }); } - if (updatedContent[index].type !== ContentTypes.TEXT) { + const currentPartType = updatedContent[index].type; + if (currentPartType !== ContentTypes.TEXT && currentPartType !== ContentTypes.THINK) { return res.status(400).json({ error: 'Cannot update non-text content' }); } - const oldText = updatedContent[index].text; - updatedContent[index] = { type: ContentTypes.TEXT, text }; + const oldText = updatedContent[index][currentPartType]; + updatedContent[index] = { type: currentPartType, [currentPartType]: text }; let tokenCount = message.tokenCount; if (tokenCount !== undefined) { diff --git a/api/server/services/Endpoints/index.js b/api/server/services/Endpoints/index.js index b6e398366b..8171789418 100644 --- a/api/server/services/Endpoints/index.js +++ b/api/server/services/Endpoints/index.js @@ -7,6 +7,16 @@ const initCustom = require('~/server/services/Endpoints/custom/initialize'); const initGoogle = require('~/server/services/Endpoints/google/initialize'); const { getCustomEndpointConfig } = require('~/server/services/Config'); +/** Check if the provider is a known custom provider + * @param {string | undefined} [provider] - The provider string + * @returns {boolean} - True if the provider is a known custom provider, false otherwise + */ +function isKnownCustomProvider(provider) { + return [Providers.XAI, Providers.OLLAMA, Providers.DEEPSEEK, Providers.OPENROUTER].includes( + provider || '', + ); +} + const providerConfigMap = { [Providers.XAI]: initCustom, [Providers.OLLAMA]: initCustom, @@ -46,6 +56,13 @@ async function getProviderConfig(provider) { overrideProvider = Providers.OPENAI; } + if (isKnownCustomProvider(overrideProvider)) { + customEndpointConfig = await getCustomEndpointConfig(provider); + if (!customEndpointConfig) { + throw new Error(`Provider ${provider} not supported`); + } + } + return { getOptions, overrideProvider, diff --git a/client/src/common/types.ts b/client/src/common/types.ts index c7f2d6788a..9349b7695e 100644 --- a/client/src/common/types.ts +++ b/client/src/common/types.ts @@ -336,6 +336,11 @@ export type TAskProps = { export type TOptions = { editedMessageId?: string | null; editedText?: string | null; + editedContent?: { + index: number; + text: string; + type: 'text' | 'think'; + }; isRegenerate?: boolean; isContinued?: boolean; isEdited?: boolean; diff --git a/client/src/components/Chat/Messages/Content/ContentParts.tsx b/client/src/components/Chat/Messages/Content/ContentParts.tsx index 0a1b4616a0..49f6be255a 100644 --- a/client/src/components/Chat/Messages/Content/ContentParts.tsx +++ b/client/src/components/Chat/Messages/Content/ContentParts.tsx @@ -81,14 +81,23 @@ const ContentParts = memo( return ( <> {content.map((part, idx) => { - if (part?.type !== ContentTypes.TEXT || typeof part.text !== 'string') { + if (!part) { + return null; + } + const isTextPart = + part?.type === ContentTypes.TEXT || + typeof (part as unknown as Agents.MessageContentText)?.text !== 'string'; + const isThinkPart = + part?.type === ContentTypes.THINK || + typeof (part as unknown as Agents.ReasoningDeltaUpdate)?.think !== 'string'; + if (!isTextPart && !isThinkPart) { return null; } return ( & { +}: Omit & { index: number; messageId: string; + part: Agents.MessageContentText | Agents.ReasoningDeltaUpdate; }) => { const localize = useLocalize(); const { addedIndex } = useAddedChatContext(); - const { getMessages, setMessages, conversation } = useChatContext(); + const { ask, getMessages, setMessages, conversation } = useChatContext(); const [latestMultiMessage, setLatestMultiMessage] = useRecoilState( store.latestMessageFamily(addedIndex), ); @@ -34,15 +36,16 @@ const EditTextPart = ({ [getMessages, messageId], ); + const chatDirection = useRecoilValue(store.chatDirection); + const textAreaRef = useRef(null); const updateMessageContentMutation = useUpdateMessageContentMutation(conversationId ?? ''); - const chatDirection = useRecoilValue(store.chatDirection).toLowerCase(); - const isRTL = chatDirection === 'rtl'; + const isRTL = chatDirection?.toLowerCase() === 'rtl'; const { register, handleSubmit, setValue } = useForm({ defaultValues: { - text: text ?? '', + text: (ContentTypes.THINK in part ? part.think : part.text) || '', }, }); @@ -55,15 +58,7 @@ const EditTextPart = ({ } }, []); - /* - const resubmitMessage = () => { - showToast({ - status: 'warning', - message: localize('com_warning_resubmit_unsupported'), - }); - - // const resubmitMessage = (data: { text: string }) => { - // Not supported by AWS Bedrock + const resubmitMessage = (data: { text: string }) => { const messages = getMessages(); const parentMessage = messages?.find((msg) => msg.messageId === message?.parentMessageId); @@ -73,17 +68,19 @@ const EditTextPart = ({ ask( { ...parentMessage }, { - editedText: data.text, + editedContent: { + index, + text: data.text, + type: part.type, + }, editedMessageId: messageId, isRegenerate: true, isEdited: true, }, ); - setSiblingIdx((siblingIdx ?? 0) - 1); enterEdit(true); }; - */ const updateMessage = (data: { text: string }) => { const messages = getMessages(); @@ -167,13 +164,13 @@ const EditTextPart = ({ />
- {/* */} +