Merge branch 'dev' into feat/prompt-enhancement

This commit is contained in:
Marco Beretta 2025-06-23 14:48:46 +02:00 committed by GitHub
commit 3d261a969d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
365 changed files with 23826 additions and 8790 deletions

View file

@ -515,6 +515,18 @@ EMAIL_PASSWORD=
EMAIL_FROM_NAME= EMAIL_FROM_NAME=
EMAIL_FROM=noreply@librechat.ai EMAIL_FROM=noreply@librechat.ai
#========================#
# Mailgun API #
#========================#
# MAILGUN_API_KEY=your-mailgun-api-key
# MAILGUN_DOMAIN=mg.yourdomain.com
# EMAIL_FROM=noreply@yourdomain.com
# EMAIL_FROM_NAME="LibreChat"
# # Optional: For EU region
# MAILGUN_HOST=https://api.eu.mailgun.net
#========================# #========================#
# Firebase CDN # # Firebase CDN #
#========================# #========================#

View file

@ -30,8 +30,8 @@ Project maintainers have the right and responsibility to remove, edit, or reject
2. Install typescript globally: `npm i -g typescript`. 2. Install typescript globally: `npm i -g typescript`.
3. Run `npm ci` to install dependencies. 3. Run `npm ci` to install dependencies.
4. Build the data provider: `npm run build:data-provider`. 4. Build the data provider: `npm run build:data-provider`.
5. Build MCP: `npm run build:mcp`. 5. Build data schemas: `npm run build:data-schemas`.
6. Build data schemas: `npm run build:data-schemas`. 6. Build API methods: `npm run build:api`.
7. Setup and run unit tests: 7. Setup and run unit tests:
- Copy `.env.test`: `cp api/test/.env.test.example api/test/.env.test`. - Copy `.env.test`: `cp api/test/.env.test.example api/test/.env.test`.
- Run backend unit tests: `npm run test:api`. - Run backend unit tests: `npm run test:api`.

View file

@ -7,6 +7,7 @@ on:
- release/* - release/*
paths: paths:
- 'api/**' - 'api/**'
- 'packages/api/**'
jobs: jobs:
tests_Backend: tests_Backend:
name: Run Backend unit tests name: Run Backend unit tests
@ -36,12 +37,12 @@ jobs:
- name: Install Data Provider Package - name: Install Data Provider Package
run: npm run build:data-provider run: npm run build:data-provider
- name: Install MCP Package
run: npm run build:mcp
- name: Install Data Schemas Package - name: Install Data Schemas Package
run: npm run build:data-schemas run: npm run build:data-schemas
- name: Install API Package
run: npm run build:api
- name: Create empty auth.json file - name: Create empty auth.json file
run: | run: |
mkdir -p api/data mkdir -p api/data
@ -66,5 +67,8 @@ jobs:
- name: Run librechat-data-provider unit tests - name: Run librechat-data-provider unit tests
run: cd packages/data-provider && npm run test:ci run: cd packages/data-provider && npm run test:ci
- name: Run librechat-mcp unit tests - name: Run @librechat/data-schemas unit tests
run: cd packages/mcp && npm run test:ci run: cd packages/data-schemas && npm run test:ci
- name: Run @librechat/api unit tests
run: cd packages/api && npm run test:ci

View file

@ -98,6 +98,8 @@ jobs:
cd client cd client
UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "") UNUSED=$(depcheck --json | jq -r '.dependencies | join("\n")' || echo "")
UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "") UNUSED=$(comm -23 <(echo "$UNUSED" | sort) <(cat ../client_used_deps.txt ../client_used_code.txt | sort) || echo "")
# Filter out false positives
UNUSED=$(echo "$UNUSED" | grep -v "^micromark-extension-llm-math$" || echo "")
echo "CLIENT_UNUSED<<EOF" >> $GITHUB_ENV echo "CLIENT_UNUSED<<EOF" >> $GITHUB_ENV
echo "$UNUSED" >> $GITHUB_ENV echo "$UNUSED" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV echo "EOF" >> $GITHUB_ENV

1
.gitignore vendored
View file

@ -55,6 +55,7 @@ bower_components/
# AI # AI
.clineignore .clineignore
.cursor .cursor
.aider*
# Floobits # Floobits
.floo .floo

View file

@ -14,7 +14,7 @@ RUN npm config set fetch-retry-maxtimeout 600000 && \
npm config set fetch-retry-mintimeout 15000 npm config set fetch-retry-mintimeout 15000
COPY package*.json ./ COPY package*.json ./
COPY packages/data-provider/package*.json ./packages/data-provider/ COPY packages/data-provider/package*.json ./packages/data-provider/
COPY packages/mcp/package*.json ./packages/mcp/ COPY packages/api/package*.json ./packages/api/
COPY packages/data-schemas/package*.json ./packages/data-schemas/ COPY packages/data-schemas/package*.json ./packages/data-schemas/
COPY client/package*.json ./client/ COPY client/package*.json ./client/
COPY api/package*.json ./api/ COPY api/package*.json ./api/
@ -24,26 +24,27 @@ FROM base-min AS base
WORKDIR /app WORKDIR /app
RUN npm ci RUN npm ci
# Build data-provider # Build `data-provider` package
FROM base AS data-provider-build FROM base AS data-provider-build
WORKDIR /app/packages/data-provider WORKDIR /app/packages/data-provider
COPY packages/data-provider ./ COPY packages/data-provider ./
RUN npm run build RUN npm run build
# Build mcp package # Build `data-schemas` package
FROM base AS mcp-build
WORKDIR /app/packages/mcp
COPY packages/mcp ./
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
RUN npm run build
# Build data-schemas
FROM base AS data-schemas-build FROM base AS data-schemas-build
WORKDIR /app/packages/data-schemas WORKDIR /app/packages/data-schemas
COPY packages/data-schemas ./ COPY packages/data-schemas ./
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
RUN npm run build RUN npm run build
# Build `api` package
FROM base AS api-package-build
WORKDIR /app/packages/api
COPY packages/api ./
COPY --from=data-provider-build /app/packages/data-provider/dist /app/packages/data-provider/dist
COPY --from=data-schemas-build /app/packages/data-schemas/dist /app/packages/data-schemas/dist
RUN npm run build
# Client build # Client build
FROM base AS client-build FROM base AS client-build
WORKDIR /app/client WORKDIR /app/client
@ -63,8 +64,8 @@ RUN npm ci --omit=dev
COPY api ./api COPY api ./api
COPY config ./config COPY config ./config
COPY --from=data-provider-build /app/packages/data-provider/dist ./packages/data-provider/dist COPY --from=data-provider-build /app/packages/data-provider/dist ./packages/data-provider/dist
COPY --from=mcp-build /app/packages/mcp/dist ./packages/mcp/dist
COPY --from=data-schemas-build /app/packages/data-schemas/dist ./packages/data-schemas/dist COPY --from=data-schemas-build /app/packages/data-schemas/dist ./packages/data-schemas/dist
COPY --from=api-package-build /app/packages/api/dist ./packages/api/dist
COPY --from=client-build /app/client/dist ./client/dist COPY --from=client-build /app/client/dist ./client/dist
WORKDIR /app/api WORKDIR /app/api
EXPOSE 3080 EXPOSE 3080

View file

@ -150,8 +150,8 @@ Click on the thumbnail to open the video☝
**Other:** **Other:**
- **Website:** [librechat.ai](https://librechat.ai) - **Website:** [librechat.ai](https://librechat.ai)
- **Documentation:** [docs.librechat.ai](https://docs.librechat.ai) - **Documentation:** [librechat.ai/docs](https://librechat.ai/docs)
- **Blog:** [blog.librechat.ai](https://blog.librechat.ai) - **Blog:** [librechat.ai/blog](https://librechat.ai/blog)
--- ---

View file

@ -10,6 +10,7 @@ const {
validateVisionModel, validateVisionModel,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { SplitStreamHandler: _Handler } = require('@librechat/agents'); const { SplitStreamHandler: _Handler } = require('@librechat/agents');
const { Tokenizer, createFetch, createStreamEventHandlers } = require('@librechat/api');
const { const {
truncateText, truncateText,
formatMessage, formatMessage,
@ -26,8 +27,6 @@ const {
const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils'); const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils');
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens'); const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
const { encodeAndFormat } = require('~/server/services/Files/images/encode'); const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { createFetch, createStreamEventHandlers } = require('./generators');
const Tokenizer = require('~/server/services/Tokenizer');
const { sleep } = require('~/server/utils'); const { sleep } = require('~/server/utils');
const BaseClient = require('./BaseClient'); const BaseClient = require('./BaseClient');
const { logger } = require('~/config'); const { logger } = require('~/config');
@ -191,10 +190,11 @@ class AnthropicClient extends BaseClient {
reverseProxyUrl: this.options.reverseProxyUrl, reverseProxyUrl: this.options.reverseProxyUrl,
}), }),
apiKey: this.apiKey, apiKey: this.apiKey,
fetchOptions: {},
}; };
if (this.options.proxy) { if (this.options.proxy) {
options.httpAgent = new HttpsProxyAgent(this.options.proxy); options.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
} }
if (this.options.reverseProxyUrl) { if (this.options.reverseProxyUrl) {

View file

@ -2,6 +2,7 @@ const { Keyv } = require('keyv');
const crypto = require('crypto'); const crypto = require('crypto');
const { CohereClient } = require('cohere-ai'); const { CohereClient } = require('cohere-ai');
const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source'); const { fetchEventSource } = require('@waylaidwanderer/fetch-event-source');
const { constructAzureURL, genAzureChatCompletion } = require('@librechat/api');
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken'); const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
const { const {
ImageDetail, ImageDetail,
@ -10,9 +11,9 @@ const {
CohereConstants, CohereConstants,
mapModelToAzureConfig, mapModelToAzureConfig,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { extractBaseURL, constructAzureURL, genAzureChatCompletion } = require('~/utils');
const { createContextHandlers } = require('./prompts'); const { createContextHandlers } = require('./prompts');
const { createCoherePayload } = require('./llm'); const { createCoherePayload } = require('./llm');
const { extractBaseURL } = require('~/utils');
const BaseClient = require('./BaseClient'); const BaseClient = require('./BaseClient');
const { logger } = require('~/config'); const { logger } = require('~/config');

View file

@ -1,4 +1,5 @@
const { google } = require('googleapis'); const { google } = require('googleapis');
const { Tokenizer } = require('@librechat/api');
const { concat } = require('@langchain/core/utils/stream'); const { concat } = require('@langchain/core/utils/stream');
const { ChatVertexAI } = require('@langchain/google-vertexai'); const { ChatVertexAI } = require('@langchain/google-vertexai');
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai'); const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
@ -19,7 +20,6 @@ const {
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getSafetySettings } = require('~/server/services/Endpoints/google/llm'); const { getSafetySettings } = require('~/server/services/Endpoints/google/llm');
const { encodeAndFormat } = require('~/server/services/Files/images'); const { encodeAndFormat } = require('~/server/services/Files/images');
const Tokenizer = require('~/server/services/Tokenizer');
const { spendTokens } = require('~/models/spendTokens'); const { spendTokens } = require('~/models/spendTokens');
const { getModelMaxTokens } = require('~/utils'); const { getModelMaxTokens } = require('~/utils');
const { sleep } = require('~/server/utils'); const { sleep } = require('~/server/utils');
@ -34,7 +34,8 @@ const BaseClient = require('./BaseClient');
const loc = process.env.GOOGLE_LOC || 'us-central1'; const loc = process.env.GOOGLE_LOC || 'us-central1';
const publisher = 'google'; const publisher = 'google';
const endpointPrefix = `${loc}-aiplatform.googleapis.com`; const endpointPrefix =
loc === 'global' ? 'aiplatform.googleapis.com' : `${loc}-aiplatform.googleapis.com`;
const settings = endpointSettings[EModelEndpoint.google]; const settings = endpointSettings[EModelEndpoint.google];
const EXCLUDED_GENAI_MODELS = /gemini-(?:1\.0|1-0|pro)/; const EXCLUDED_GENAI_MODELS = /gemini-(?:1\.0|1-0|pro)/;

View file

@ -1,10 +1,11 @@
const { z } = require('zod'); const { z } = require('zod');
const axios = require('axios'); const axios = require('axios');
const { Ollama } = require('ollama'); const { Ollama } = require('ollama');
const { sleep } = require('@librechat/agents');
const { logAxiosError } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { Constants } = require('librechat-data-provider'); const { Constants } = require('librechat-data-provider');
const { deriveBaseURL, logAxiosError } = require('~/utils'); const { deriveBaseURL } = require('~/utils');
const { sleep } = require('~/server/utils');
const { logger } = require('~/config');
const ollamaPayloadSchema = z.object({ const ollamaPayloadSchema = z.object({
mirostat: z.number().optional(), mirostat: z.number().optional(),
@ -67,7 +68,7 @@ class OllamaClient {
return models; return models;
} catch (error) { } catch (error) {
const logMessage = const logMessage =
'Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn\'t start with `ollama` (case-insensitive).'; "Failed to fetch models from Ollama API. If you are not using Ollama directly, and instead, through some aggregator or reverse proxy that handles fetching via OpenAI spec, ensure the name of the endpoint doesn't start with `ollama` (case-insensitive).";
logAxiosError({ message: logMessage, error }); logAxiosError({ message: logMessage, error });
return []; return [];
} }

View file

@ -1,6 +1,14 @@
const { OllamaClient } = require('./OllamaClient'); const { OllamaClient } = require('./OllamaClient');
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { SplitStreamHandler, CustomOpenAIClient: OpenAI } = require('@librechat/agents'); const { SplitStreamHandler, CustomOpenAIClient: OpenAI } = require('@librechat/agents');
const {
isEnabled,
Tokenizer,
createFetch,
constructAzureURL,
genAzureChatCompletion,
createStreamEventHandlers,
} = require('@librechat/api');
const { const {
Constants, Constants,
ImageDetail, ImageDetail,
@ -16,13 +24,6 @@ const {
validateVisionModel, validateVisionModel,
mapModelToAzureConfig, mapModelToAzureConfig,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const {
extractBaseURL,
constructAzureURL,
getModelMaxTokens,
genAzureChatCompletion,
getModelMaxOutputTokens,
} = require('~/utils');
const { const {
truncateText, truncateText,
formatMessage, formatMessage,
@ -30,10 +31,9 @@ const {
titleInstruction, titleInstruction,
createContextHandlers, createContextHandlers,
} = require('./prompts'); } = require('./prompts');
const { extractBaseURL, getModelMaxTokens, getModelMaxOutputTokens } = require('~/utils');
const { encodeAndFormat } = require('~/server/services/Files/images/encode'); const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const { createFetch, createStreamEventHandlers } = require('./generators'); const { addSpaceIfNeeded, sleep } = require('~/server/utils');
const { addSpaceIfNeeded, isEnabled, sleep } = require('~/server/utils');
const Tokenizer = require('~/server/services/Tokenizer');
const { spendTokens } = require('~/models/spendTokens'); const { spendTokens } = require('~/models/spendTokens');
const { handleOpenAIErrors } = require('./tools/util'); const { handleOpenAIErrors } = require('./tools/util');
const { createLLM, RunManager } = require('./llm'); const { createLLM, RunManager } = require('./llm');
@ -1159,6 +1159,7 @@ ${convo}
logger.debug('[OpenAIClient] chatCompletion', { baseURL, modelOptions }); logger.debug('[OpenAIClient] chatCompletion', { baseURL, modelOptions });
const opts = { const opts = {
baseURL, baseURL,
fetchOptions: {},
}; };
if (this.useOpenRouter) { if (this.useOpenRouter) {
@ -1177,7 +1178,7 @@ ${convo}
} }
if (this.options.proxy) { if (this.options.proxy) {
opts.httpAgent = new HttpsProxyAgent(this.options.proxy); opts.fetchOptions.agent = new HttpsProxyAgent(this.options.proxy);
} }
/** @type {TAzureConfig | undefined} */ /** @type {TAzureConfig | undefined} */
@ -1395,7 +1396,7 @@ ${convo}
...modelOptions, ...modelOptions,
stream: true, stream: true,
}; };
const stream = await openai.beta.chat.completions const stream = await openai.chat.completions
.stream(params) .stream(params)
.on('abort', () => { .on('abort', () => {
/* Do nothing here */ /* Do nothing here */

View file

@ -1,71 +0,0 @@
const fetch = require('node-fetch');
const { GraphEvents } = require('@librechat/agents');
const { logger, sendEvent } = require('~/config');
const { sleep } = require('~/server/utils');
/**
* Makes a function to make HTTP request and logs the process.
* @param {Object} params
* @param {boolean} [params.directEndpoint] - Whether to use a direct endpoint.
* @param {string} [params.reverseProxyUrl] - The reverse proxy URL to use for the request.
* @returns {Promise<Response>} - A promise that resolves to the response of the fetch request.
*/
function createFetch({ directEndpoint = false, reverseProxyUrl = '' }) {
/**
* Makes an HTTP request and logs the process.
* @param {RequestInfo} url - The URL to make the request to. Can be a string or a Request object.
* @param {RequestInit} [init] - Optional init options for the request.
* @returns {Promise<Response>} - A promise that resolves to the response of the fetch request.
*/
return async (_url, init) => {
let url = _url;
if (directEndpoint) {
url = reverseProxyUrl;
}
logger.debug(`Making request to ${url}`);
if (typeof Bun !== 'undefined') {
return await fetch(url, init);
}
return await fetch(url, init);
};
}
// Add this at the module level outside the class
/**
* Creates event handlers for stream events that don't capture client references
* @param {Object} res - The response object to send events to
* @returns {Object} Object containing handler functions
*/
function createStreamEventHandlers(res) {
return {
[GraphEvents.ON_RUN_STEP]: (event) => {
if (res) {
sendEvent(res, event);
}
},
[GraphEvents.ON_MESSAGE_DELTA]: (event) => {
if (res) {
sendEvent(res, event);
}
},
[GraphEvents.ON_REASONING_DELTA]: (event) => {
if (res) {
sendEvent(res, event);
}
},
};
}
function createHandleLLMNewToken(streamRate) {
return async () => {
if (streamRate) {
await sleep(streamRate);
}
};
}
module.exports = {
createFetch,
createHandleLLMNewToken,
createStreamEventHandlers,
};

View file

@ -1,6 +1,5 @@
const { ChatOpenAI } = require('@langchain/openai'); const { ChatOpenAI } = require('@langchain/openai');
const { sanitizeModelName, constructAzureURL } = require('~/utils'); const { isEnabled, sanitizeModelName, constructAzureURL } = require('@librechat/api');
const { isEnabled } = require('~/server/utils');
/** /**
* Creates a new instance of a language model (LLM) for chat interactions. * Creates a new instance of a language model (LLM) for chat interactions.

View file

@ -309,7 +309,7 @@ describe('AnthropicClient', () => {
}; };
client.setOptions({ modelOptions, promptCache: true }); client.setOptions({ modelOptions, promptCache: true });
const anthropicClient = client.getClient(modelOptions); const anthropicClient = client.getClient(modelOptions);
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta'); expect(anthropicClient._options.defaultHeaders).toBeUndefined();
}); });
it('should not add beta header for other models', () => { it('should not add beta header for other models', () => {
@ -320,7 +320,7 @@ describe('AnthropicClient', () => {
}, },
}); });
const anthropicClient = client.getClient(); const anthropicClient = client.getClient();
expect(anthropicClient.defaultHeaders).not.toHaveProperty('anthropic-beta'); expect(anthropicClient._options.defaultHeaders).toBeUndefined();
}); });
}); });

View file

@ -33,7 +33,9 @@ jest.mock('~/models', () => ({
const { getConvo, saveConvo } = require('~/models'); const { getConvo, saveConvo } = require('~/models');
jest.mock('@librechat/agents', () => { jest.mock('@librechat/agents', () => {
const { Providers } = jest.requireActual('@librechat/agents');
return { return {
Providers,
ChatOpenAI: jest.fn().mockImplementation(() => { ChatOpenAI: jest.fn().mockImplementation(() => {
return {}; return {};
}), }),

View file

@ -1,184 +0,0 @@
require('dotenv').config();
const fs = require('fs');
const { z } = require('zod');
const path = require('path');
const yaml = require('js-yaml');
const { createOpenAPIChain } = require('langchain/chains');
const { DynamicStructuredTool } = require('@langchain/core/tools');
const { ChatPromptTemplate, HumanMessagePromptTemplate } = require('@langchain/core/prompts');
const { logger } = require('~/config');
function addLinePrefix(text, prefix = '// ') {
return text
.split('\n')
.map((line) => prefix + line)
.join('\n');
}
function createPrompt(name, functions) {
const prefix = `// The ${name} tool has the following functions. Determine the desired or most optimal function for the user's query:`;
const functionDescriptions = functions
.map((func) => `// - ${func.name}: ${func.description}`)
.join('\n');
return `${prefix}\n${functionDescriptions}
// You are an expert manager and scrum master. You must provide a detailed intent to better execute the function.
// Always format as such: {{"func": "function_name", "intent": "intent and expected result"}}`;
}
const AuthBearer = z
.object({
type: z.string().includes('service_http'),
authorization_type: z.string().includes('bearer'),
verification_tokens: z.object({
openai: z.string(),
}),
})
.catch(() => false);
const AuthDefinition = z
.object({
type: z.string(),
authorization_type: z.string(),
verification_tokens: z.object({
openai: z.string(),
}),
})
.catch(() => false);
async function readSpecFile(filePath) {
try {
const fileContents = await fs.promises.readFile(filePath, 'utf8');
if (path.extname(filePath) === '.json') {
return JSON.parse(fileContents);
}
return yaml.load(fileContents);
} catch (e) {
logger.error('[readSpecFile] error', e);
return false;
}
}
async function getSpec(url) {
const RegularUrl = z
.string()
.url()
.catch(() => false);
if (RegularUrl.parse(url) && path.extname(url) === '.json') {
const response = await fetch(url);
return await response.json();
}
const ValidSpecPath = z
.string()
.url()
.catch(async () => {
const spec = path.join(__dirname, '..', '.well-known', 'openapi', url);
if (!fs.existsSync(spec)) {
return false;
}
return await readSpecFile(spec);
});
return ValidSpecPath.parse(url);
}
async function createOpenAPIPlugin({ data, llm, user, message, memory, signal }) {
let spec;
try {
spec = await getSpec(data.api.url);
} catch (error) {
logger.error('[createOpenAPIPlugin] getSpec error', error);
return null;
}
if (!spec) {
logger.warn('[createOpenAPIPlugin] No spec found');
return null;
}
const headers = {};
const { auth, name_for_model, description_for_model, description_for_human } = data;
if (auth && AuthDefinition.parse(auth)) {
logger.debug('[createOpenAPIPlugin] auth detected', auth);
const { openai } = auth.verification_tokens;
if (AuthBearer.parse(auth)) {
headers.authorization = `Bearer ${openai}`;
logger.debug('[createOpenAPIPlugin] added auth bearer', headers);
}
}
const chainOptions = { llm };
if (data.headers && data.headers['librechat_user_id']) {
logger.debug('[createOpenAPIPlugin] id detected', headers);
headers[data.headers['librechat_user_id']] = user;
}
if (Object.keys(headers).length > 0) {
logger.debug('[createOpenAPIPlugin] headers detected', headers);
chainOptions.headers = headers;
}
if (data.params) {
logger.debug('[createOpenAPIPlugin] params detected', data.params);
chainOptions.params = data.params;
}
let history = '';
if (memory) {
logger.debug('[createOpenAPIPlugin] openAPI chain: memory detected', memory);
const { history: chat_history } = await memory.loadMemoryVariables({});
history = chat_history?.length > 0 ? `\n\n## Chat History:\n${chat_history}\n` : '';
}
chainOptions.prompt = ChatPromptTemplate.fromMessages([
HumanMessagePromptTemplate.fromTemplate(
`# Use the provided API's to respond to this query:\n\n{query}\n\n## Instructions:\n${addLinePrefix(
description_for_model,
)}${history}`,
),
]);
const chain = await createOpenAPIChain(spec, chainOptions);
const { functions } = chain.chains[0].lc_kwargs.llmKwargs;
return new DynamicStructuredTool({
name: name_for_model,
description_for_model: `${addLinePrefix(description_for_human)}${createPrompt(
name_for_model,
functions,
)}`,
description: `${description_for_human}`,
schema: z.object({
func: z
.string()
.describe(
`The function to invoke. The functions available are: ${functions
.map((func) => func.name)
.join(', ')}`,
),
intent: z
.string()
.describe('Describe your intent with the function and your expected result'),
}),
func: async ({ func = '', intent = '' }) => {
const filteredFunctions = functions.filter((f) => f.name === func);
chain.chains[0].lc_kwargs.llmKwargs.functions = filteredFunctions;
const query = `${message}${func?.length > 0 ? `\n// Intent: ${intent}` : ''}`;
const result = await chain.call({
query,
signal,
});
return result.response;
},
});
}
module.exports = {
getSpec,
readSpecFile,
createOpenAPIPlugin,
};

View file

@ -1,72 +0,0 @@
const fs = require('fs');
const { createOpenAPIPlugin, getSpec, readSpecFile } = require('./OpenAPIPlugin');
global.fetch = jest.fn().mockImplementationOnce(() => {
return new Promise((resolve) => {
resolve({
ok: true,
json: () => Promise.resolve({ key: 'value' }),
});
});
});
jest.mock('fs', () => ({
promises: {
readFile: jest.fn(),
},
existsSync: jest.fn(),
}));
describe('readSpecFile', () => {
it('reads JSON file correctly', async () => {
fs.promises.readFile.mockResolvedValue(JSON.stringify({ test: 'value' }));
const result = await readSpecFile('test.json');
expect(result).toEqual({ test: 'value' });
});
it('reads YAML file correctly', async () => {
fs.promises.readFile.mockResolvedValue('test: value');
const result = await readSpecFile('test.yaml');
expect(result).toEqual({ test: 'value' });
});
it('handles error correctly', async () => {
fs.promises.readFile.mockRejectedValue(new Error('test error'));
const result = await readSpecFile('test.json');
expect(result).toBe(false);
});
});
describe('getSpec', () => {
it('fetches spec from url correctly', async () => {
const parsedJson = await getSpec('https://www.instacart.com/.well-known/ai-plugin.json');
const isObject = typeof parsedJson === 'object';
expect(isObject).toEqual(true);
});
it('reads spec from file correctly', async () => {
fs.existsSync.mockReturnValue(true);
fs.promises.readFile.mockResolvedValue(JSON.stringify({ test: 'value' }));
const result = await getSpec('test.json');
expect(result).toEqual({ test: 'value' });
});
it('returns false when file does not exist', async () => {
fs.existsSync.mockReturnValue(false);
const result = await getSpec('test.json');
expect(result).toBe(false);
});
});
describe('createOpenAPIPlugin', () => {
it('returns null when getSpec throws an error', async () => {
const result = await createOpenAPIPlugin({ data: { api: { url: 'invalid' } } });
expect(result).toBe(null);
});
it('returns null when no spec is found', async () => {
const result = await createOpenAPIPlugin({});
expect(result).toBe(null);
});
// Add more tests here for different scenarios
});

View file

@ -8,10 +8,10 @@ const { HttpsProxyAgent } = require('https-proxy-agent');
const { FileContext, ContentTypes } = require('librechat-data-provider'); const { FileContext, ContentTypes } = require('librechat-data-provider');
const { getImageBasename } = require('~/server/services/Files/images'); const { getImageBasename } = require('~/server/services/Files/images');
const extractBaseURL = require('~/utils/extractBaseURL'); const extractBaseURL = require('~/utils/extractBaseURL');
const { logger } = require('~/config'); const logger = require('~/config/winston');
const displayMessage = const displayMessage =
'DALL-E displayed an image. All generated images are already plainly visible, so don\'t repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.'; "DALL-E displayed an image. All generated images are already plainly visible, so don't repeat the descriptions in detail. Do not list download links as they are available in the UI already. The user may download the images by clicking on them, but do not mention anything about downloading to the user.";
class DALLE3 extends Tool { class DALLE3 extends Tool {
constructor(fields = {}) { constructor(fields = {}) {
super(); super();

View file

@ -4,12 +4,13 @@ const { v4 } = require('uuid');
const OpenAI = require('openai'); const OpenAI = require('openai');
const FormData = require('form-data'); const FormData = require('form-data');
const { tool } = require('@langchain/core/tools'); const { tool } = require('@langchain/core/tools');
const { logAxiosError } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { ContentTypes, EImageOutputType } = require('librechat-data-provider'); const { ContentTypes, EImageOutputType } = require('librechat-data-provider');
const { getStrategyFunctions } = require('~/server/services/Files/strategies'); const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { logAxiosError, extractBaseURL } = require('~/utils'); const { extractBaseURL } = require('~/utils');
const { getFiles } = require('~/models/File'); const { getFiles } = require('~/models/File');
const { logger } = require('~/config');
/** Default descriptions for image generation tool */ /** Default descriptions for image generation tool */
const DEFAULT_IMAGE_GEN_DESCRIPTION = ` const DEFAULT_IMAGE_GEN_DESCRIPTION = `

View file

@ -1,10 +1,29 @@
const OpenAI = require('openai'); const OpenAI = require('openai');
const DALLE3 = require('../DALLE3'); const DALLE3 = require('../DALLE3');
const logger = require('~/config/winston');
const { logger } = require('~/config');
jest.mock('openai'); jest.mock('openai');
jest.mock('@librechat/data-schemas', () => {
return {
logger: {
info: jest.fn(),
warn: jest.fn(),
debug: jest.fn(),
error: jest.fn(),
},
};
});
jest.mock('tiktoken', () => {
return {
encoding_for_model: jest.fn().mockReturnValue({
encode: jest.fn(),
decode: jest.fn(),
}),
};
});
const processFileURL = jest.fn(); const processFileURL = jest.fn();
jest.mock('~/server/services/Files/images', () => ({ jest.mock('~/server/services/Files/images', () => ({
@ -37,6 +56,11 @@ jest.mock('fs', () => {
return { return {
existsSync: jest.fn(), existsSync: jest.fn(),
mkdirSync: jest.fn(), mkdirSync: jest.fn(),
promises: {
writeFile: jest.fn(),
readFile: jest.fn(),
unlink: jest.fn(),
},
}; };
}); });

View file

@ -135,7 +135,7 @@ const createFileSearchTool = async ({ req, files, entity_id }) => {
query: z query: z
.string() .string()
.describe( .describe(
'A natural language query to search for relevant information in the files. Be specific and use keywords related to the information you\'re looking for. The query will be used for semantic similarity matching against the file contents.', "A natural language query to search for relevant information in the files. Be specific and use keywords related to the information you're looking for. The query will be used for semantic similarity matching against the file contents.",
), ),
}), }),
}, },

View file

@ -1,14 +1,14 @@
const { mcpToolPattern } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { SerpAPI } = require('@langchain/community/tools/serpapi'); const { SerpAPI } = require('@langchain/community/tools/serpapi');
const { Calculator } = require('@langchain/community/tools/calculator'); const { Calculator } = require('@langchain/community/tools/calculator');
const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents'); const { EnvVar, createCodeExecutionTool, createSearchTool } = require('@librechat/agents');
const { const {
Tools, Tools,
Constants,
EToolResources, EToolResources,
loadWebSearchAuth, loadWebSearchAuth,
replaceSpecialVars, replaceSpecialVars,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
const { const {
availableTools, availableTools,
manifestToolMap, manifestToolMap,
@ -28,11 +28,10 @@ const {
} = require('../'); } = require('../');
const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process'); const { primeFiles: primeCodeFiles } = require('~/server/services/Files/Code/process');
const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch'); const { createFileSearchTool, primeFiles: primeSearchFiles } = require('./fileSearch');
const { getUserPluginAuthValue } = require('~/server/services/PluginService');
const { loadAuthValues } = require('~/server/services/Tools/credentials'); const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { getCachedTools } = require('~/server/services/Config');
const { createMCPTool } = require('~/server/services/MCP'); const { createMCPTool } = require('~/server/services/MCP');
const { logger } = require('~/config');
const mcpToolPattern = new RegExp(`^.+${Constants.mcp_delimiter}.+$`);
/** /**
* Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values. * Validates the availability and authentication of tools for a user based on environment variables or user-specific plugin authentication values.
@ -93,7 +92,7 @@ const validateTools = async (user, tools = []) => {
return Array.from(validToolsSet.values()); return Array.from(validToolsSet.values());
} catch (err) { } catch (err) {
logger.error('[validateTools] There was a problem validating tools', err); logger.error('[validateTools] There was a problem validating tools', err);
throw new Error('There was a problem validating tools'); throw new Error(err);
} }
}; };
@ -236,7 +235,7 @@ const loadTools = async ({
/** @type {Record<string, string>} */ /** @type {Record<string, string>} */
const toolContextMap = {}; const toolContextMap = {};
const appTools = options.req?.app?.locals?.availableTools ?? {}; const appTools = (await getCachedTools({ includeGlobal: true })) ?? {};
for (const tool of tools) { for (const tool of tools) {
if (tool === Tools.execute_code) { if (tool === Tools.execute_code) {
@ -299,6 +298,7 @@ Current Date & Time: ${replaceSpecialVars({ text: '{{iso_datetime}}' })}
requestedTools[tool] = async () => requestedTools[tool] = async () =>
createMCPTool({ createMCPTool({
req: options.req, req: options.req,
res: options.res,
toolKey: tool, toolKey: tool,
model: agent?.model ?? model, model: agent?.model ?? model,
provider: agent?.provider ?? endpoint, provider: agent?.provider ?? endpoint,

View file

@ -29,6 +29,10 @@ const roles = isRedisEnabled
? new Keyv({ store: keyvRedis }) ? new Keyv({ store: keyvRedis })
: new Keyv({ namespace: CacheKeys.ROLES }); : new Keyv({ namespace: CacheKeys.ROLES });
const mcpTools = isRedisEnabled
? new Keyv({ store: keyvRedis })
: new Keyv({ namespace: CacheKeys.MCP_TOOLS });
const audioRuns = isRedisEnabled const audioRuns = isRedisEnabled
? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES }) ? new Keyv({ store: keyvRedis, ttl: Time.TEN_MINUTES })
: new Keyv({ namespace: CacheKeys.AUDIO_RUNS, ttl: Time.TEN_MINUTES }); : new Keyv({ namespace: CacheKeys.AUDIO_RUNS, ttl: Time.TEN_MINUTES });
@ -67,6 +71,7 @@ const openIdExchangedTokensCache = isRedisEnabled
const namespaces = { const namespaces = {
[CacheKeys.ROLES]: roles, [CacheKeys.ROLES]: roles,
[CacheKeys.MCP_TOOLS]: mcpTools,
[CacheKeys.CONFIG_STORE]: config, [CacheKeys.CONFIG_STORE]: config,
[CacheKeys.PENDING_REQ]: pending_req, [CacheKeys.PENDING_REQ]: pending_req,
[ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }), [ViolationTypes.BAN]: new Keyv({ store: keyvMongo, namespace: CacheKeys.BANS, ttl: duration }),

View file

@ -1,7 +1,6 @@
const axios = require('axios');
const { EventSource } = require('eventsource'); const { EventSource } = require('eventsource');
const { Time, CacheKeys } = require('librechat-data-provider'); const { Time } = require('librechat-data-provider');
const { MCPManager, FlowStateManager } = require('librechat-mcp'); const { MCPManager, FlowStateManager } = require('@librechat/api');
const logger = require('./winston'); const logger = require('./winston');
global.EventSource = EventSource; global.EventSource = EventSource;
@ -16,7 +15,7 @@ let flowManager = null;
*/ */
function getMCPManager(userId) { function getMCPManager(userId) {
if (!mcpManager) { if (!mcpManager) {
mcpManager = MCPManager.getInstance(logger); mcpManager = MCPManager.getInstance();
} else { } else {
mcpManager.checkIdleConnections(userId); mcpManager.checkIdleConnections(userId);
} }
@ -31,66 +30,13 @@ function getFlowStateManager(flowsCache) {
if (!flowManager) { if (!flowManager) {
flowManager = new FlowStateManager(flowsCache, { flowManager = new FlowStateManager(flowsCache, {
ttl: Time.ONE_MINUTE * 3, ttl: Time.ONE_MINUTE * 3,
logger,
}); });
} }
return flowManager; return flowManager;
} }
/**
* Sends message data in Server Sent Events format.
* @param {ServerResponse} res - The server response.
* @param {{ data: string | Record<string, unknown>, event?: string }} event - The message event.
* @param {string} event.event - The type of event.
* @param {string} event.data - The message to be sent.
*/
const sendEvent = (res, event) => {
if (typeof event.data === 'string' && event.data.length === 0) {
return;
}
res.write(`event: message\ndata: ${JSON.stringify(event)}\n\n`);
};
/**
* Creates and configures an Axios instance with optional proxy settings.
*
* @typedef {import('axios').AxiosInstance} AxiosInstance
* @typedef {import('axios').AxiosProxyConfig} AxiosProxyConfig
*
* @returns {AxiosInstance} A configured Axios instance
* @throws {Error} If there's an issue creating the Axios instance or parsing the proxy URL
*/
function createAxiosInstance() {
const instance = axios.create();
if (process.env.proxy) {
try {
const url = new URL(process.env.proxy);
/** @type {AxiosProxyConfig} */
const proxyConfig = {
host: url.hostname.replace(/^\[|\]$/g, ''),
protocol: url.protocol.replace(':', ''),
};
if (url.port) {
proxyConfig.port = parseInt(url.port, 10);
}
instance.defaults.proxy = proxyConfig;
} catch (error) {
console.error('Error parsing proxy URL:', error);
throw new Error(`Invalid proxy URL: ${process.env.proxy}`);
}
}
return instance;
}
module.exports = { module.exports = {
logger, logger,
sendEvent,
getMCPManager, getMCPManager,
createAxiosInstance,
getFlowStateManager, getFlowStateManager,
}; };

View file

@ -1,8 +1,11 @@
const mongoose = require('mongoose'); const mongoose = require('mongoose');
const { MeiliSearch } = require('meilisearch'); const { MeiliSearch } = require('meilisearch');
const { logger } = require('@librechat/data-schemas'); const { logger } = require('@librechat/data-schemas');
const { FlowStateManager } = require('@librechat/api');
const { CacheKeys } = require('librechat-data-provider');
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('~/server/utils');
const { getLogStores } = require('~/cache');
const Conversation = mongoose.models.Conversation; const Conversation = mongoose.models.Conversation;
const Message = mongoose.models.Message; const Message = mongoose.models.Message;
@ -28,11 +31,10 @@ class MeiliSearchClient {
} }
} }
async function indexSync() { /**
if (!searchEnabled) { * Performs the actual sync operations for messages and conversations
return; */
} async function performSync() {
try {
const client = MeiliSearchClient.getInstance(); const client = MeiliSearchClient.getInstance();
const { status } = await client.health(); const { status } = await client.health();
@ -42,29 +44,110 @@ async function indexSync() {
if (indexingDisabled === true) { if (indexingDisabled === true) {
logger.info('[indexSync] Indexing is disabled, skipping...'); logger.info('[indexSync] Indexing is disabled, skipping...');
return { messagesSync: false, convosSync: false };
}
let messagesSync = false;
let convosSync = false;
// Check if we need to sync messages
const messageProgress = await Message.getSyncProgress();
if (!messageProgress.isComplete) {
logger.info(
`[indexSync] Messages need syncing: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments} indexed`,
);
// Check if we should do a full sync or incremental
const messageCount = await Message.countDocuments();
const messagesIndexed = messageProgress.totalProcessed;
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
if (messageCount - messagesIndexed > syncThreshold) {
logger.info('[indexSync] Starting full message sync due to large difference');
await Message.syncWithMeili();
messagesSync = true;
} else if (messageCount !== messagesIndexed) {
logger.warn('[indexSync] Messages out of sync, performing incremental sync');
await Message.syncWithMeili();
messagesSync = true;
}
} else {
logger.info(
`[indexSync] Messages are fully synced: ${messageProgress.totalProcessed}/${messageProgress.totalDocuments}`,
);
}
// Check if we need to sync conversations
const convoProgress = await Conversation.getSyncProgress();
if (!convoProgress.isComplete) {
logger.info(
`[indexSync] Conversations need syncing: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments} indexed`,
);
const convoCount = await Conversation.countDocuments();
const convosIndexed = convoProgress.totalProcessed;
const syncThreshold = parseInt(process.env.MEILI_SYNC_THRESHOLD || '1000', 10);
if (convoCount - convosIndexed > syncThreshold) {
logger.info('[indexSync] Starting full conversation sync due to large difference');
await Conversation.syncWithMeili();
convosSync = true;
} else if (convoCount !== convosIndexed) {
logger.warn('[indexSync] Convos out of sync, performing incremental sync');
await Conversation.syncWithMeili();
convosSync = true;
}
} else {
logger.info(
`[indexSync] Conversations are fully synced: ${convoProgress.totalProcessed}/${convoProgress.totalDocuments}`,
);
}
return { messagesSync, convosSync };
}
/**
* Main index sync function that uses FlowStateManager to prevent concurrent execution
*/
async function indexSync() {
if (!searchEnabled) {
return; return;
} }
const messageCount = await Message.countDocuments(); logger.info('[indexSync] Starting index synchronization check...');
const convoCount = await Conversation.countDocuments();
const messages = await client.index('messages').getStats();
const convos = await client.index('convos').getStats();
const messagesIndexed = messages.numberOfDocuments;
const convosIndexed = convos.numberOfDocuments;
logger.debug(`[indexSync] There are ${messageCount} messages and ${messagesIndexed} indexed`); try {
logger.debug(`[indexSync] There are ${convoCount} convos and ${convosIndexed} indexed`); // Get or create FlowStateManager instance
const flowsCache = getLogStores(CacheKeys.FLOWS);
if (messageCount !== messagesIndexed) { if (!flowsCache) {
logger.debug('[indexSync] Messages out of sync, indexing'); logger.warn('[indexSync] Flows cache not available, falling back to direct sync');
Message.syncWithMeili(); return await performSync();
} }
if (convoCount !== convosIndexed) { const flowManager = new FlowStateManager(flowsCache, {
logger.debug('[indexSync] Convos out of sync, indexing'); ttl: 60000 * 10, // 10 minutes TTL for sync operations
Conversation.syncWithMeili(); });
// Use a unique flow ID for the sync operation
const flowId = 'meili-index-sync';
const flowType = 'MEILI_SYNC';
// This will only execute the handler if no other instance is running the sync
const result = await flowManager.createFlowWithHandler(flowId, flowType, performSync);
if (result.messagesSync || result.convosSync) {
logger.info('[indexSync] Sync completed successfully');
} else {
logger.debug('[indexSync] No sync was needed');
} }
return result;
} catch (err) { } catch (err) {
if (err.message.includes('flow already exists')) {
logger.info('[indexSync] Sync already running on another instance');
return;
}
if (err.message.includes('not found')) { if (err.message.includes('not found')) {
logger.debug('[indexSync] Creating indices...'); logger.debug('[indexSync] Creating indices...');
currentTimeout = setTimeout(async () => { currentTimeout = setTimeout(async () => {

View file

@ -11,6 +11,7 @@ const {
removeAgentIdsFromProject, removeAgentIdsFromProject,
removeAgentFromAllProjects, removeAgentFromAllProjects,
} = require('./Project'); } = require('./Project');
const { getCachedTools } = require('~/server/services/Config');
const getLogStores = require('~/cache/getLogStores'); const getLogStores = require('~/cache/getLogStores');
const { getActions } = require('./Action'); const { getActions } = require('./Action');
const { Agent } = require('~/db/models'); const { Agent } = require('~/db/models');
@ -55,12 +56,12 @@ const getAgent = async (searchParameter) => await Agent.findOne(searchParameter)
* @param {string} params.agent_id * @param {string} params.agent_id
* @param {string} params.endpoint * @param {string} params.endpoint
* @param {import('@librechat/agents').ClientOptions} [params.model_parameters] * @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
* @returns {Agent|null} The agent document as a plain object, or null if not found. * @returns {Promise<Agent|null>} The agent document as a plain object, or null if not found.
*/ */
const loadEphemeralAgent = ({ req, agent_id, endpoint, model_parameters: _m }) => { const loadEphemeralAgent = async ({ req, agent_id, endpoint, model_parameters: _m }) => {
const { model, ...model_parameters } = _m; const { model, ...model_parameters } = _m;
/** @type {Record<string, FunctionTool>} */ /** @type {Record<string, FunctionTool>} */
const availableTools = req.app.locals.availableTools; const availableTools = await getCachedTools({ includeGlobal: true });
/** @type {TEphemeralAgent | null} */ /** @type {TEphemeralAgent | null} */
const ephemeralAgent = req.body.ephemeralAgent; const ephemeralAgent = req.body.ephemeralAgent;
const mcpServers = new Set(ephemeralAgent?.mcp); const mcpServers = new Set(ephemeralAgent?.mcp);
@ -111,7 +112,7 @@ const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
return null; return null;
} }
if (agent_id === EPHEMERAL_AGENT_ID) { if (agent_id === EPHEMERAL_AGENT_ID) {
return loadEphemeralAgent({ req, agent_id, endpoint, model_parameters }); return await loadEphemeralAgent({ req, agent_id, endpoint, model_parameters });
} }
const agent = await getAgent({ const agent = await getAgent({
id: agent_id, id: agent_id,
@ -170,7 +171,6 @@ const isDuplicateVersion = (updateData, currentData, versions, actionsHash = nul
'created_at', 'created_at',
'updated_at', 'updated_at',
'__v', '__v',
'agent_ids',
'versions', 'versions',
'actionsHash', // Exclude actionsHash from direct comparison 'actionsHash', // Exclude actionsHash from direct comparison
]; ];
@ -260,11 +260,12 @@ const isDuplicateVersion = (updateData, currentData, versions, actionsHash = nul
* @param {Object} [options] - Optional configuration object. * @param {Object} [options] - Optional configuration object.
* @param {string} [options.updatingUserId] - The ID of the user performing the update (used for tracking non-author updates). * @param {string} [options.updatingUserId] - The ID of the user performing the update (used for tracking non-author updates).
* @param {boolean} [options.forceVersion] - Force creation of a new version even if no fields changed. * @param {boolean} [options.forceVersion] - Force creation of a new version even if no fields changed.
* @param {boolean} [options.skipVersioning] - Skip version creation entirely (useful for isolated operations like sharing).
* @returns {Promise<Agent>} The updated or newly created agent document as a plain object. * @returns {Promise<Agent>} The updated or newly created agent document as a plain object.
* @throws {Error} If the update would create a duplicate version * @throws {Error} If the update would create a duplicate version
*/ */
const updateAgent = async (searchParameter, updateData, options = {}) => { const updateAgent = async (searchParameter, updateData, options = {}) => {
const { updatingUserId = null, forceVersion = false } = options; const { updatingUserId = null, forceVersion = false, skipVersioning = false } = options;
const mongoOptions = { new: true, upsert: false }; const mongoOptions = { new: true, upsert: false };
const currentAgent = await Agent.findOne(searchParameter); const currentAgent = await Agent.findOne(searchParameter);
@ -301,10 +302,8 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
} }
const shouldCreateVersion = const shouldCreateVersion =
forceVersion || !skipVersioning &&
(versions && (forceVersion || Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet);
versions.length > 0 &&
(Object.keys(directUpdates).length > 0 || $push || $pull || $addToSet));
if (shouldCreateVersion) { if (shouldCreateVersion) {
const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash); const duplicateVersion = isDuplicateVersion(updateData, versionData, versions, actionsHash);
@ -339,7 +338,7 @@ const updateAgent = async (searchParameter, updateData, options = {}) => {
versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId); versionEntry.updatedBy = new mongoose.Types.ObjectId(updatingUserId);
} }
if (shouldCreateVersion || forceVersion) { if (shouldCreateVersion) {
updateData.$push = { updateData.$push = {
...($push || {}), ...($push || {}),
versions: versionEntry, versions: versionEntry,
@ -550,7 +549,10 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
delete updateQuery.author; delete updateQuery.author;
} }
const updatedAgent = await updateAgent(updateQuery, updateOps, { updatingUserId: user.id }); const updatedAgent = await updateAgent(updateQuery, updateOps, {
updatingUserId: user.id,
skipVersioning: true,
});
if (updatedAgent) { if (updatedAgent) {
return updatedAgent; return updatedAgent;
} }

File diff suppressed because it is too large Load diff

View file

@ -1,346 +0,0 @@
const { nanoid } = require('nanoid');
const { logger } = require('@librechat/data-schemas');
const { Constants } = require('librechat-data-provider');
const { Conversation, SharedLink } = require('~/db/models');
const { getMessages } = require('./Message');
class ShareServiceError extends Error {
constructor(message, code) {
super(message);
this.name = 'ShareServiceError';
this.code = code;
}
}
const memoizedAnonymizeId = (prefix) => {
const memo = new Map();
return (id) => {
if (!memo.has(id)) {
memo.set(id, `${prefix}_${nanoid()}`);
}
return memo.get(id);
};
};
const anonymizeConvoId = memoizedAnonymizeId('convo');
const anonymizeAssistantId = memoizedAnonymizeId('a');
const anonymizeMessageId = (id) =>
id === Constants.NO_PARENT ? id : memoizedAnonymizeId('msg')(id);
function anonymizeConvo(conversation) {
if (!conversation) {
return null;
}
const newConvo = { ...conversation };
if (newConvo.assistant_id) {
newConvo.assistant_id = anonymizeAssistantId(newConvo.assistant_id);
}
return newConvo;
}
function anonymizeMessages(messages, newConvoId) {
if (!Array.isArray(messages)) {
return [];
}
const idMap = new Map();
return messages.map((message) => {
const newMessageId = anonymizeMessageId(message.messageId);
idMap.set(message.messageId, newMessageId);
const anonymizedAttachments = message.attachments?.map((attachment) => {
return {
...attachment,
messageId: newMessageId,
conversationId: newConvoId,
};
});
return {
...message,
messageId: newMessageId,
parentMessageId:
idMap.get(message.parentMessageId) || anonymizeMessageId(message.parentMessageId),
conversationId: newConvoId,
model: message.model?.startsWith('asst_')
? anonymizeAssistantId(message.model)
: message.model,
attachments: anonymizedAttachments,
};
});
}
async function getSharedMessages(shareId) {
try {
const share = await SharedLink.findOne({ shareId, isPublic: true })
.populate({
path: 'messages',
select: '-_id -__v -user',
})
.select('-_id -__v -user')
.lean();
if (!share?.conversationId || !share.isPublic) {
return null;
}
const newConvoId = anonymizeConvoId(share.conversationId);
const result = {
...share,
conversationId: newConvoId,
messages: anonymizeMessages(share.messages, newConvoId),
};
return result;
} catch (error) {
logger.error('[getShare] Error getting share link', {
error: error.message,
shareId,
});
throw new ShareServiceError('Error getting share link', 'SHARE_FETCH_ERROR');
}
}
async function getSharedLinks(user, pageParam, pageSize, isPublic, sortBy, sortDirection, search) {
try {
const query = { user, isPublic };
if (pageParam) {
if (sortDirection === 'desc') {
query[sortBy] = { $lt: pageParam };
} else {
query[sortBy] = { $gt: pageParam };
}
}
if (search && search.trim()) {
try {
const searchResults = await Conversation.meiliSearch(search);
if (!searchResults?.hits?.length) {
return {
links: [],
nextCursor: undefined,
hasNextPage: false,
};
}
const conversationIds = searchResults.hits.map((hit) => hit.conversationId);
query['conversationId'] = { $in: conversationIds };
} catch (searchError) {
logger.error('[getSharedLinks] Meilisearch error', {
error: searchError.message,
user,
});
return {
links: [],
nextCursor: undefined,
hasNextPage: false,
};
}
}
const sort = {};
sort[sortBy] = sortDirection === 'desc' ? -1 : 1;
if (Array.isArray(query.conversationId)) {
query.conversationId = { $in: query.conversationId };
}
const sharedLinks = await SharedLink.find(query)
.sort(sort)
.limit(pageSize + 1)
.select('-__v -user')
.lean();
const hasNextPage = sharedLinks.length > pageSize;
const links = sharedLinks.slice(0, pageSize);
const nextCursor = hasNextPage ? links[links.length - 1][sortBy] : undefined;
return {
links: links.map((link) => ({
shareId: link.shareId,
title: link?.title || 'Untitled',
isPublic: link.isPublic,
createdAt: link.createdAt,
conversationId: link.conversationId,
})),
nextCursor,
hasNextPage,
};
} catch (error) {
logger.error('[getSharedLinks] Error getting shares', {
error: error.message,
user,
});
throw new ShareServiceError('Error getting shares', 'SHARES_FETCH_ERROR');
}
}
async function deleteAllSharedLinks(user) {
try {
const result = await SharedLink.deleteMany({ user });
return {
message: 'All shared links deleted successfully',
deletedCount: result.deletedCount,
};
} catch (error) {
logger.error('[deleteAllSharedLinks] Error deleting shared links', {
error: error.message,
user,
});
throw new ShareServiceError('Error deleting shared links', 'BULK_DELETE_ERROR');
}
}
async function createSharedLink(user, conversationId) {
if (!user || !conversationId) {
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
}
try {
const [existingShare, conversationMessages] = await Promise.all([
SharedLink.findOne({ conversationId, isPublic: true }).select('-_id -__v -user').lean(),
getMessages({ conversationId }),
]);
if (existingShare && existingShare.isPublic) {
throw new ShareServiceError('Share already exists', 'SHARE_EXISTS');
} else if (existingShare) {
await SharedLink.deleteOne({ conversationId });
}
const conversation = await Conversation.findOne({ conversationId }).lean();
const title = conversation?.title || 'Untitled';
const shareId = nanoid();
await SharedLink.create({
shareId,
conversationId,
messages: conversationMessages,
title,
user,
});
return { shareId, conversationId };
} catch (error) {
logger.error('[createSharedLink] Error creating shared link', {
error: error.message,
user,
conversationId,
});
throw new ShareServiceError('Error creating shared link', 'SHARE_CREATE_ERROR');
}
}
async function getSharedLink(user, conversationId) {
if (!user || !conversationId) {
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
}
try {
const share = await SharedLink.findOne({ conversationId, user, isPublic: true })
.select('shareId -_id')
.lean();
if (!share) {
return { shareId: null, success: false };
}
return { shareId: share.shareId, success: true };
} catch (error) {
logger.error('[getSharedLink] Error getting shared link', {
error: error.message,
user,
conversationId,
});
throw new ShareServiceError('Error getting shared link', 'SHARE_FETCH_ERROR');
}
}
async function updateSharedLink(user, shareId) {
if (!user || !shareId) {
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
}
try {
const share = await SharedLink.findOne({ shareId }).select('-_id -__v -user').lean();
if (!share) {
throw new ShareServiceError('Share not found', 'SHARE_NOT_FOUND');
}
const [updatedMessages] = await Promise.all([
getMessages({ conversationId: share.conversationId }),
]);
const newShareId = nanoid();
const update = {
messages: updatedMessages,
user,
shareId: newShareId,
};
const updatedShare = await SharedLink.findOneAndUpdate({ shareId, user }, update, {
new: true,
upsert: false,
runValidators: true,
}).lean();
if (!updatedShare) {
throw new ShareServiceError('Share update failed', 'SHARE_UPDATE_ERROR');
}
anonymizeConvo(updatedShare);
return { shareId: newShareId, conversationId: updatedShare.conversationId };
} catch (error) {
logger.error('[updateSharedLink] Error updating shared link', {
error: error.message,
user,
shareId,
});
throw new ShareServiceError(
error.code === 'SHARE_UPDATE_ERROR' ? error.message : 'Error updating shared link',
error.code || 'SHARE_UPDATE_ERROR',
);
}
}
async function deleteSharedLink(user, shareId) {
if (!user || !shareId) {
throw new ShareServiceError('Missing required parameters', 'INVALID_PARAMS');
}
try {
const result = await SharedLink.findOneAndDelete({ shareId, user }).lean();
if (!result) {
return null;
}
return {
success: true,
shareId,
message: 'Share deleted successfully',
};
} catch (error) {
logger.error('[deleteSharedLink] Error deleting shared link', {
error: error.message,
user,
shareId,
});
throw new ShareServiceError('Error deleting shared link', 'SHARE_DELETE_ERROR');
}
}
module.exports = {
getSharedLink,
getSharedLinks,
createSharedLink,
updateSharedLink,
deleteSharedLink,
getSharedMessages,
deleteAllSharedLinks,
};

View file

@ -1,42 +0,0 @@
const { findToken, updateToken, createToken } = require('~/models');
const { encryptV2 } = require('~/server/utils/crypto');
/**
* Handles the OAuth token by creating or updating the token.
* @param {object} fields
* @param {string} fields.userId - The user's ID.
* @param {string} fields.token - The full token to store.
* @param {string} fields.identifier - Unique, alternative identifier for the token.
* @param {number} fields.expiresIn - The number of seconds until the token expires.
* @param {object} fields.metadata - Additional metadata to store with the token.
* @param {string} [fields.type="oauth"] - The type of token. Default is 'oauth'.
*/
async function handleOAuthToken({
token,
userId,
identifier,
expiresIn,
metadata,
type = 'oauth',
}) {
const encrypedToken = await encryptV2(token);
const tokenData = {
type,
userId,
metadata,
identifier,
token: encrypedToken,
expiresIn: parseInt(expiresIn, 10) || 3600,
};
const existingToken = await findToken({ userId, identifier });
if (existingToken) {
return await updateToken({ identifier }, tokenData);
} else {
return await createToken(tokenData);
}
}
module.exports = {
handleOAuthToken,
};

View file

@ -1,6 +1,6 @@
const mongoose = require('mongoose'); const mongoose = require('mongoose');
const { getRandomValues } = require('@librechat/api');
const { logger, hashToken } = require('@librechat/data-schemas'); const { logger, hashToken } = require('@librechat/data-schemas');
const { getRandomValues } = require('~/server/utils/crypto');
const { createToken, findToken } = require('~/models'); const { createToken, findToken } = require('~/models');
/** /**

View file

@ -78,7 +78,7 @@ const tokenValues = Object.assign(
'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 }, 'gpt-3.5-turbo-1106': { prompt: 1, completion: 2 },
'o4-mini': { prompt: 1.1, completion: 4.4 }, 'o4-mini': { prompt: 1.1, completion: 4.4 },
'o3-mini': { prompt: 1.1, completion: 4.4 }, 'o3-mini': { prompt: 1.1, completion: 4.4 },
o3: { prompt: 10, completion: 40 }, o3: { prompt: 2, completion: 8 },
'o1-mini': { prompt: 1.1, completion: 4.4 }, 'o1-mini': { prompt: 1.1, completion: 4.4 },
'o1-preview': { prompt: 15, completion: 60 }, 'o1-preview': { prompt: 15, completion: 60 },
o1: { prompt: 15, completion: 60 }, o1: { prompt: 15, completion: 60 },

View file

@ -12,6 +12,10 @@ const comparePassword = async (user, candidatePassword) => {
throw new Error('No user provided'); throw new Error('No user provided');
} }
if (!user.password) {
throw new Error('No password, likely an email first registered via Social/OIDC login');
}
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
bcrypt.compare(candidatePassword, user.password, (err, isMatch) => { bcrypt.compare(candidatePassword, user.password, (err, isMatch) => {
if (err) { if (err) {

View file

@ -34,21 +34,22 @@
}, },
"homepage": "https://librechat.ai", "homepage": "https://librechat.ai",
"dependencies": { "dependencies": {
"@anthropic-ai/sdk": "^0.37.0", "@anthropic-ai/sdk": "^0.52.0",
"@aws-sdk/client-s3": "^3.758.0", "@aws-sdk/client-s3": "^3.758.0",
"@aws-sdk/s3-request-presigner": "^3.758.0", "@aws-sdk/s3-request-presigner": "^3.758.0",
"@azure/identity": "^4.7.0", "@azure/identity": "^4.7.0",
"@azure/search-documents": "^12.0.0", "@azure/search-documents": "^12.0.0",
"@azure/storage-blob": "^12.27.0", "@azure/storage-blob": "^12.27.0",
"@google/generative-ai": "^0.23.0", "@google/generative-ai": "^0.24.0",
"@googleapis/youtube": "^20.0.0", "@googleapis/youtube": "^20.0.0",
"@keyv/redis": "^4.3.3", "@keyv/redis": "^4.3.3",
"@langchain/community": "^0.3.44", "@langchain/community": "^0.3.47",
"@langchain/core": "^0.3.57", "@langchain/core": "^0.3.60",
"@langchain/google-genai": "^0.2.9", "@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.9", "@langchain/google-vertexai": "^0.2.13",
"@langchain/textsplitters": "^0.1.0", "@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.4.37", "@librechat/agents": "^2.4.41",
"@librechat/api": "*",
"@librechat/data-schemas": "*", "@librechat/data-schemas": "*",
"@node-saml/passport-saml": "^5.0.0", "@node-saml/passport-saml": "^5.0.0",
"@waylaidwanderer/fetch-event-source": "^3.0.1", "@waylaidwanderer/fetch-event-source": "^3.0.1",
@ -81,15 +82,15 @@
"keyv-file": "^5.1.2", "keyv-file": "^5.1.2",
"klona": "^2.0.6", "klona": "^2.0.6",
"librechat-data-provider": "*", "librechat-data-provider": "*",
"librechat-mcp": "*",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"meilisearch": "^0.38.0", "meilisearch": "^0.38.0",
"memorystore": "^1.6.7", "memorystore": "^1.6.7",
"mime": "^3.0.0", "mime": "^3.0.0",
"module-alias": "^2.2.3", "module-alias": "^2.2.3",
"mongoose": "^8.12.1", "mongoose": "^8.12.1",
"multer": "^2.0.0", "multer": "^2.0.1",
"nanoid": "^3.3.7", "nanoid": "^3.3.7",
"node-fetch": "^2.7.0",
"nodemailer": "^6.9.15", "nodemailer": "^6.9.15",
"ollama": "^0.5.0", "ollama": "^0.5.0",
"openai": "^4.96.2", "openai": "^4.96.2",
@ -109,8 +110,9 @@
"tiktoken": "^1.0.15", "tiktoken": "^1.0.15",
"traverse": "^0.6.7", "traverse": "^0.6.7",
"ua-parser-js": "^1.0.36", "ua-parser-js": "^1.0.36",
"undici": "^7.10.0",
"winston": "^3.11.0", "winston": "^3.11.0",
"winston-daily-rotate-file": "^4.7.1", "winston-daily-rotate-file": "^5.0.0",
"youtube-transcript": "^1.2.1", "youtube-transcript": "^1.2.1",
"zod": "^3.22.4" "zod": "^3.22.4"
}, },

View file

@ -220,6 +220,9 @@ function disposeClient(client) {
if (client.maxResponseTokens) { if (client.maxResponseTokens) {
client.maxResponseTokens = null; client.maxResponseTokens = null;
} }
if (client.processMemory) {
client.processMemory = null;
}
if (client.run) { if (client.run) {
// Break circular references in run // Break circular references in run
if (client.run.Graph) { if (client.run.Graph) {

View file

@ -1,9 +1,11 @@
const { logger } = require('@librechat/data-schemas');
const { CacheKeys, AuthType } = require('librechat-data-provider'); const { CacheKeys, AuthType } = require('librechat-data-provider');
const { getCustomConfig, getCachedTools } = require('~/server/services/Config');
const { getToolkitKey } = require('~/server/services/ToolService'); const { getToolkitKey } = require('~/server/services/ToolService');
const { getCustomConfig } = require('~/server/services/Config'); const { getMCPManager, getFlowStateManager } = require('~/config');
const { availableTools } = require('~/app/clients/tools'); const { availableTools } = require('~/app/clients/tools');
const { getMCPManager } = require('~/config');
const { getLogStores } = require('~/cache'); const { getLogStores } = require('~/cache');
const { Constants } = require('librechat-data-provider');
/** /**
* Filters out duplicate plugins from the list of plugins. * Filters out duplicate plugins from the list of plugins.
@ -84,6 +86,45 @@ const getAvailablePluginsController = async (req, res) => {
} }
}; };
function createServerToolsCallback() {
/**
* @param {string} serverName
* @param {TPlugin[] | null} serverTools
*/
return async function (serverName, serverTools) {
try {
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
if (!serverName || !mcpToolsCache) {
return;
}
await mcpToolsCache.set(serverName, serverTools);
logger.debug(`MCP tools for ${serverName} added to cache.`);
} catch (error) {
logger.error('Error retrieving MCP tools from cache:', error);
}
};
}
function createGetServerTools() {
/**
* Retrieves cached server tools
* @param {string} serverName
* @returns {Promise<TPlugin[] | null>}
*/
return async function (serverName) {
try {
const mcpToolsCache = getLogStores(CacheKeys.MCP_TOOLS);
if (!mcpToolsCache) {
return null;
}
return await mcpToolsCache.get(serverName);
} catch (error) {
logger.error('Error retrieving MCP tools from cache:', error);
return null;
}
};
}
/** /**
* Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file. * Retrieves and returns a list of available tools, either from a cache or by reading a plugin manifest file.
* *
@ -109,7 +150,16 @@ const getAvailableTools = async (req, res) => {
const customConfig = await getCustomConfig(); const customConfig = await getCustomConfig();
if (customConfig?.mcpServers != null) { if (customConfig?.mcpServers != null) {
const mcpManager = getMCPManager(); const mcpManager = getMCPManager();
pluginManifest = await mcpManager.loadManifestTools(pluginManifest); const flowsCache = getLogStores(CacheKeys.FLOWS);
const flowManager = flowsCache ? getFlowStateManager(flowsCache) : null;
const serverToolsCallback = createServerToolsCallback();
const getServerTools = createGetServerTools();
const mcpTools = await mcpManager.loadManifestTools({
flowManager,
serverToolsCallback,
getServerTools,
});
pluginManifest = [...mcpTools, ...pluginManifest];
} }
/** @type {TPlugin[]} */ /** @type {TPlugin[]} */
@ -123,17 +173,57 @@ const getAvailableTools = async (req, res) => {
} }
}); });
const toolDefinitions = req.app.locals.availableTools; const toolDefinitions = await getCachedTools({ includeGlobal: true });
const tools = authenticatedPlugins.filter(
(plugin) =>
toolDefinitions[plugin.pluginKey] !== undefined ||
(plugin.toolkit === true &&
Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey)),
);
await cache.set(CacheKeys.TOOLS, tools); const toolsOutput = [];
res.status(200).json(tools); for (const plugin of authenticatedPlugins) {
const isToolDefined = toolDefinitions[plugin.pluginKey] !== undefined;
const isToolkit =
plugin.toolkit === true &&
Object.keys(toolDefinitions).some((key) => getToolkitKey(key) === plugin.pluginKey);
if (!isToolDefined && !isToolkit) {
continue;
}
const toolToAdd = { ...plugin };
if (!plugin.pluginKey.includes(Constants.mcp_delimiter)) {
toolsOutput.push(toolToAdd);
continue;
}
const parts = plugin.pluginKey.split(Constants.mcp_delimiter);
const serverName = parts[parts.length - 1];
const serverConfig = customConfig?.mcpServers?.[serverName];
if (!serverConfig?.customUserVars) {
toolsOutput.push(toolToAdd);
continue;
}
const customVarKeys = Object.keys(serverConfig.customUserVars);
if (customVarKeys.length === 0) {
toolToAdd.authConfig = [];
toolToAdd.authenticated = true;
} else {
toolToAdd.authConfig = Object.entries(serverConfig.customUserVars).map(([key, value]) => ({
authField: key,
label: value.title || key,
description: value.description || '',
}));
toolToAdd.authenticated = false;
}
toolsOutput.push(toolToAdd);
}
const finalTools = filterUniquePlugins(toolsOutput);
await cache.set(CacheKeys.TOOLS, finalTools);
res.status(200).json(finalTools);
} catch (error) { } catch (error) {
logger.error('[getAvailableTools]', error);
res.status(500).json({ message: error.message }); res.status(500).json({ message: error.message });
} }
}; };

View file

@ -1,3 +1,4 @@
const { encryptV3 } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas'); const { logger } = require('@librechat/data-schemas');
const { const {
verifyTOTP, verifyTOTP,
@ -7,7 +8,6 @@ const {
generateBackupCodes, generateBackupCodes,
} = require('~/server/services/twoFactorService'); } = require('~/server/services/twoFactorService');
const { getUserById, updateUser } = require('~/models'); const { getUserById, updateUser } = require('~/models');
const { encryptV3 } = require('~/server/utils/crypto');
const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, ''); const safeAppTitle = (process.env.APP_TITLE || 'LibreChat').replace(/\s+/g, '');

View file

@ -1,5 +1,6 @@
const { const {
Tools, Tools,
Constants,
FileSources, FileSources,
webSearchKeys, webSearchKeys,
extractWebSearchEnvVars, extractWebSearchEnvVars,
@ -21,8 +22,9 @@ const { verifyEmail, resendVerificationEmail } = require('~/server/services/Auth
const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud'); const { needsRefresh, getNewS3URL } = require('~/server/services/Files/S3/crud');
const { processDeleteRequest } = require('~/server/services/Files/process'); const { processDeleteRequest } = require('~/server/services/Files/process');
const { Transaction, Balance, User } = require('~/db/models'); const { Transaction, Balance, User } = require('~/db/models');
const { deleteAllSharedLinks } = require('~/models/Share');
const { deleteToolCalls } = require('~/models/ToolCall'); const { deleteToolCalls } = require('~/models/ToolCall');
const { deleteAllSharedLinks } = require('~/models');
const { getMCPManager } = require('~/config');
const getUserController = async (req, res) => { const getUserController = async (req, res) => {
/** @type {MongoUser} */ /** @type {MongoUser} */
@ -102,10 +104,22 @@ const updateUserPluginsController = async (req, res) => {
} }
let keys = Object.keys(auth); let keys = Object.keys(auth);
if (keys.length === 0 && pluginKey !== Tools.web_search) { const values = Object.values(auth); // Used in 'install' block
const isMCPTool = pluginKey.startsWith('mcp_') || pluginKey.includes(Constants.mcp_delimiter);
// Early exit condition:
// If keys are empty (meaning auth: {} was likely sent for uninstall, or auth was empty for install)
// AND it's not web_search (which has special key handling to populate `keys` for uninstall)
// AND it's NOT (an uninstall action FOR an MCP tool - we need to proceed for this case to clear all its auth)
// THEN return.
if (
keys.length === 0 &&
pluginKey !== Tools.web_search &&
!(action === 'uninstall' && isMCPTool)
) {
return res.status(200).send(); return res.status(200).send();
} }
const values = Object.values(auth);
/** @type {number} */ /** @type {number} */
let status = 200; let status = 200;
@ -132,16 +146,53 @@ const updateUserPluginsController = async (req, res) => {
} }
} }
} else if (action === 'uninstall') { } else if (action === 'uninstall') {
for (let i = 0; i < keys.length; i++) { // const isMCPTool was defined earlier
authService = await deleteUserPluginAuth(user.id, keys[i]); if (isMCPTool && keys.length === 0) {
// This handles the case where auth: {} is sent for an MCP tool uninstall.
// It means "delete all credentials associated with this MCP pluginKey".
authService = await deleteUserPluginAuth(user.id, null, true, pluginKey);
if (authService instanceof Error) { if (authService instanceof Error) {
logger.error('[authService]', authService); logger.error(
`[authService] Error deleting all auth for MCP tool ${pluginKey}:`,
authService,
);
({ status, message } = authService); ({ status, message } = authService);
} }
} else {
// This handles:
// 1. Web_search uninstall (keys will be populated with all webSearchKeys if auth was {}).
// 2. Other tools uninstall (if keys were provided).
// 3. MCP tool uninstall if specific keys were provided in `auth` (not current frontend behavior).
// If keys is empty for non-MCP tools (and not web_search), this loop won't run, and nothing is deleted.
for (let i = 0; i < keys.length; i++) {
authService = await deleteUserPluginAuth(user.id, keys[i]); // Deletes by authField name
if (authService instanceof Error) {
logger.error('[authService] Error deleting specific auth key:', authService);
({ status, message } = authService);
}
}
} }
} }
if (status === 200) { if (status === 200) {
// If auth was updated successfully, disconnect MCP sessions as they might use these credentials
if (pluginKey.startsWith(Constants.mcp_prefix)) {
try {
const mcpManager = getMCPManager(user.id);
if (mcpManager) {
logger.info(
`[updateUserPluginsController] Disconnecting MCP connections for user ${user.id} after plugin auth update for ${pluginKey}.`,
);
await mcpManager.disconnectUserConnections(user.id);
}
} catch (disconnectError) {
logger.error(
`[updateUserPluginsController] Error disconnecting MCP connections for user ${user.id} after plugin auth update:`,
disconnectError,
);
// Do not fail the request for this, but log it.
}
}
return res.status(status).send(); return res.status(status).send();
} }
@ -163,7 +214,11 @@ const deleteUserController = async (req, res) => {
await Balance.deleteMany({ user: user._id }); // delete user balances await Balance.deleteMany({ user: user._id }); // delete user balances
await deletePresets(user.id); // delete user presets await deletePresets(user.id); // delete user presets
/* TODO: Delete Assistant Threads */ /* TODO: Delete Assistant Threads */
try {
await deleteConvos(user.id); // delete user convos await deleteConvos(user.id); // delete user convos
} catch (error) {
logger.error('[deleteUserController] Error deleting user convos, likely no convos', error);
}
await deleteUserPluginAuth(user.id, null, true); // delete user plugin auth await deleteUserPluginAuth(user.id, null, true); // delete user plugin auth
await deleteUserById(user.id); // delete user await deleteUserById(user.id); // delete user
await deleteAllSharedLinks(user.id); // delete user shared links await deleteAllSharedLinks(user.id); // delete user shared links

View file

@ -1,4 +1,6 @@
const { nanoid } = require('nanoid'); const { nanoid } = require('nanoid');
const { sendEvent } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { Tools, StepTypes, FileContext } = require('librechat-data-provider'); const { Tools, StepTypes, FileContext } = require('librechat-data-provider');
const { const {
EnvVar, EnvVar,
@ -12,7 +14,6 @@ const {
const { processCodeOutput } = require('~/server/services/Files/Code/process'); const { processCodeOutput } = require('~/server/services/Files/Code/process');
const { loadAuthValues } = require('~/server/services/Tools/credentials'); const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { saveBase64Image } = require('~/server/services/Files/process'); const { saveBase64Image } = require('~/server/services/Files/process');
const { logger, sendEvent } = require('~/config');
class ModelEndHandler { class ModelEndHandler {
/** /**
@ -240,9 +241,7 @@ function createToolEndCallback({ req, res, artifactPromises }) {
if (output.artifact[Tools.web_search]) { if (output.artifact[Tools.web_search]) {
artifactPromises.push( artifactPromises.push(
(async () => { (async () => {
const name = `${output.name}_${output.tool_call_id}_${nanoid()}`;
const attachment = { const attachment = {
name,
type: Tools.web_search, type: Tools.web_search,
messageId: metadata.run_id, messageId: metadata.run_id,
toolCallId: output.tool_call_id, toolCallId: output.tool_call_id,

View file

@ -1,13 +1,12 @@
// const { HttpsProxyAgent } = require('https-proxy-agent');
// const {
// Constants,
// ImageDetail,
// EModelEndpoint,
// resolveHeaders,
// validateVisionModel,
// mapModelToAzureConfig,
// } = require('librechat-data-provider');
require('events').EventEmitter.defaultMaxListeners = 100; require('events').EventEmitter.defaultMaxListeners = 100;
const { logger } = require('@librechat/data-schemas');
const {
sendEvent,
createRun,
Tokenizer,
memoryInstructions,
createMemoryProcessor,
} = require('@librechat/api');
const { const {
Callback, Callback,
GraphEvents, GraphEvents,
@ -19,25 +18,34 @@ const {
} = require('@librechat/agents'); } = require('@librechat/agents');
const { const {
Constants, Constants,
Permissions,
VisionModes, VisionModes,
ContentTypes, ContentTypes,
EModelEndpoint, EModelEndpoint,
KnownEndpoints, KnownEndpoints,
PermissionTypes,
isAgentsEndpoint, isAgentsEndpoint,
AgentCapabilities, AgentCapabilities,
bedrockInputSchema, bedrockInputSchema,
removeNullishValues, removeNullishValues,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getCustomEndpointConfig, checkCapability } = require('~/server/services/Config'); const { DynamicStructuredTool } = require('@langchain/core/tools');
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
const { getBufferString, HumanMessage } = require('@langchain/core/messages'); const { getBufferString, HumanMessage } = require('@langchain/core/messages');
const {
getCustomEndpointConfig,
createGetMCPAuthMap,
checkCapability,
} = require('~/server/services/Config');
const { addCacheControl, createContextHandlers } = require('~/app/clients/prompts');
const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
const { getFormattedMemories, deleteMemory, setMemory } = require('~/models');
const { encodeAndFormat } = require('~/server/services/Files/images/encode'); const { encodeAndFormat } = require('~/server/services/Files/images/encode');
const initOpenAI = require('~/server/services/Endpoints/openAI/initialize'); const initOpenAI = require('~/server/services/Endpoints/openAI/initialize');
const Tokenizer = require('~/server/services/Tokenizer'); const { checkAccess } = require('~/server/middleware/roles/access');
const BaseClient = require('~/app/clients/BaseClient'); const BaseClient = require('~/app/clients/BaseClient');
const { logger, sendEvent } = require('~/config'); const { loadAgent } = require('~/models/Agent');
const { createRun } = require('./run'); const { getMCPManager } = require('~/config');
/** /**
* @param {ServerRequest} req * @param {ServerRequest} req
@ -57,12 +65,8 @@ const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deep
const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi]; const noSystemModelRegex = [/\b(o1-preview|o1-mini|amazon\.titan-text)\b/gi];
// const { processMemory, memoryInstructions } = require('~/server/services/Endpoints/agents/memory');
// const { getFormattedMemories } = require('~/models/Memory');
// const { getCurrentDateTime } = require('~/utils');
function createTokenCounter(encoding) { function createTokenCounter(encoding) {
return (message) => { return function (message) {
const countTokens = (text) => Tokenizer.getTokenCount(text, encoding); const countTokens = (text) => Tokenizer.getTokenCount(text, encoding);
return getTokenCountForMessage(message, countTokens); return getTokenCountForMessage(message, countTokens);
}; };
@ -123,6 +127,8 @@ class AgentClient extends BaseClient {
this.usage; this.usage;
/** @type {Record<string, number>} */ /** @type {Record<string, number>} */
this.indexTokenCountMap = {}; this.indexTokenCountMap = {};
/** @type {(messages: BaseMessage[]) => Promise<void>} */
this.processMemory;
} }
/** /**
@ -137,55 +143,10 @@ class AgentClient extends BaseClient {
} }
/** /**
* * `AgentClient` is not opinionated about vision requests, so we don't do anything here
* Checks if the model is a vision model based on request attachments and sets the appropriate options:
* - Sets `this.modelOptions.model` to `gpt-4-vision-preview` if the request is a vision request.
* - Sets `this.isVisionModel` to `true` if vision request.
* - Deletes `this.modelOptions.stop` if vision request.
* @param {MongoFile[]} attachments * @param {MongoFile[]} attachments
*/ */
checkVisionRequest(attachments) { checkVisionRequest() {}
// if (!attachments) {
// return;
// }
// const availableModels = this.options.modelsConfig?.[this.options.endpoint];
// if (!availableModels) {
// return;
// }
// let visionRequestDetected = false;
// for (const file of attachments) {
// if (file?.type?.includes('image')) {
// visionRequestDetected = true;
// break;
// }
// }
// if (!visionRequestDetected) {
// return;
// }
// this.isVisionModel = validateVisionModel({ model: this.modelOptions.model, availableModels });
// if (this.isVisionModel) {
// delete this.modelOptions.stop;
// return;
// }
// for (const model of availableModels) {
// if (!validateVisionModel({ model, availableModels })) {
// continue;
// }
// this.modelOptions.model = model;
// this.isVisionModel = true;
// delete this.modelOptions.stop;
// return;
// }
// if (!availableModels.includes(this.defaultVisionModel)) {
// return;
// }
// if (!validateVisionModel({ model: this.defaultVisionModel, availableModels })) {
// return;
// }
// this.modelOptions.model = this.defaultVisionModel;
// this.isVisionModel = true;
// delete this.modelOptions.stop;
}
getSaveOptions() { getSaveOptions() {
// TODO: // TODO:
@ -269,24 +230,6 @@ class AgentClient extends BaseClient {
.filter(Boolean) .filter(Boolean)
.join('\n') .join('\n')
.trim(); .trim();
// this.systemMessage = getCurrentDateTime();
// const { withKeys, withoutKeys } = await getFormattedMemories({
// userId: this.options.req.user.id,
// });
// processMemory({
// userId: this.options.req.user.id,
// message: this.options.req.body.text,
// parentMessageId,
// memory: withKeys,
// thread_id: this.conversationId,
// }).catch((error) => {
// logger.error('Memory Agent failed to process memory', error);
// });
// this.systemMessage += '\n\n' + memoryInstructions;
// if (withoutKeys) {
// this.systemMessage += `\n\n# Existing memory about the user:\n${withoutKeys}`;
// }
if (this.options.attachments) { if (this.options.attachments) {
const attachments = await this.options.attachments; const attachments = await this.options.attachments;
@ -370,6 +313,37 @@ class AgentClient extends BaseClient {
systemContent = this.augmentedPrompt + systemContent; systemContent = this.augmentedPrompt + systemContent;
} }
// Inject MCP server instructions if available
const ephemeralAgent = this.options.req.body.ephemeralAgent;
let mcpServers = [];
// Check for ephemeral agent MCP servers
if (ephemeralAgent && ephemeralAgent.mcp && ephemeralAgent.mcp.length > 0) {
mcpServers = ephemeralAgent.mcp;
}
// Check for regular agent MCP tools
else if (this.options.agent && this.options.agent.tools) {
mcpServers = this.options.agent.tools
.filter(
(tool) =>
tool instanceof DynamicStructuredTool && tool.name.includes(Constants.mcp_delimiter),
)
.map((tool) => tool.name.split(Constants.mcp_delimiter).pop())
.filter(Boolean);
}
if (mcpServers.length > 0) {
try {
const mcpInstructions = getMCPManager().formatInstructionsForContext(mcpServers);
if (mcpInstructions) {
systemContent = [systemContent, mcpInstructions].filter(Boolean).join('\n\n');
logger.debug('[AgentClient] Injected MCP instructions for servers:', mcpServers);
}
} catch (error) {
logger.error('[AgentClient] Failed to inject MCP instructions:', error);
}
}
if (systemContent) { if (systemContent) {
this.options.agent.instructions = systemContent; this.options.agent.instructions = systemContent;
} }
@ -399,9 +373,150 @@ class AgentClient extends BaseClient {
opts.getReqData({ promptTokens }); opts.getReqData({ promptTokens });
} }
const withoutKeys = await this.useMemory();
if (withoutKeys) {
systemContent += `${memoryInstructions}\n\n# Existing memory about the user:\n${withoutKeys}`;
}
if (systemContent) {
this.options.agent.instructions = systemContent;
}
return result; return result;
} }
/**
* @returns {Promise<string | undefined>}
*/
async useMemory() {
const user = this.options.req.user;
if (user.personalization?.memories === false) {
return;
}
const hasAccess = await checkAccess(user, PermissionTypes.MEMORIES, [Permissions.USE]);
if (!hasAccess) {
logger.debug(
`[api/server/controllers/agents/client.js #useMemory] User ${user.id} does not have USE permission for memories`,
);
return;
}
/** @type {TCustomConfig['memory']} */
const memoryConfig = this.options.req?.app?.locals?.memory;
if (!memoryConfig || memoryConfig.disabled === true) {
return;
}
/** @type {Agent} */
let prelimAgent;
const allowedProviders = new Set(
this.options.req?.app?.locals?.[EModelEndpoint.agents]?.allowedProviders,
);
try {
if (memoryConfig.agent?.id != null && memoryConfig.agent.id !== this.options.agent.id) {
prelimAgent = await loadAgent({
req: this.options.req,
agent_id: memoryConfig.agent.id,
endpoint: EModelEndpoint.agents,
});
} else if (
memoryConfig.agent?.id == null &&
memoryConfig.agent?.model != null &&
memoryConfig.agent?.provider != null
) {
prelimAgent = { id: Constants.EPHEMERAL_AGENT_ID, ...memoryConfig.agent };
}
} catch (error) {
logger.error(
'[api/server/controllers/agents/client.js #useMemory] Error loading agent for memory',
error,
);
}
const agent = await initializeAgent({
req: this.options.req,
res: this.options.res,
agent: prelimAgent,
allowedProviders,
});
if (!agent) {
logger.warn(
'[api/server/controllers/agents/client.js #useMemory] No agent found for memory',
memoryConfig,
);
return;
}
const llmConfig = Object.assign(
{
provider: agent.provider,
model: agent.model,
},
agent.model_parameters,
);
/** @type {import('@librechat/api').MemoryConfig} */
const config = {
validKeys: memoryConfig.validKeys,
instructions: agent.instructions,
llmConfig,
tokenLimit: memoryConfig.tokenLimit,
};
const userId = this.options.req.user.id + '';
const messageId = this.responseMessageId + '';
const conversationId = this.conversationId + '';
const [withoutKeys, processMemory] = await createMemoryProcessor({
userId,
config,
messageId,
conversationId,
memoryMethods: {
setMemory,
deleteMemory,
getFormattedMemories,
},
res: this.options.res,
});
this.processMemory = processMemory;
return withoutKeys;
}
/**
* @param {BaseMessage[]} messages
* @returns {Promise<void | (TAttachment | null)[]>}
*/
async runMemory(messages) {
try {
if (this.processMemory == null) {
return;
}
/** @type {TCustomConfig['memory']} */
const memoryConfig = this.options.req?.app?.locals?.memory;
const messageWindowSize = memoryConfig?.messageWindowSize ?? 5;
let messagesToProcess = [...messages];
if (messages.length > messageWindowSize) {
for (let i = messages.length - messageWindowSize; i >= 0; i--) {
const potentialWindow = messages.slice(i, i + messageWindowSize);
if (potentialWindow[0]?.role === 'user') {
messagesToProcess = [...potentialWindow];
break;
}
}
if (messagesToProcess.length === messages.length) {
messagesToProcess = [...messages.slice(-messageWindowSize)];
}
}
return await this.processMemory(messagesToProcess);
} catch (error) {
logger.error('Memory Agent failed to process memory', error);
}
}
/** @type {sendCompletion} */ /** @type {sendCompletion} */
async sendCompletion(payload, opts = {}) { async sendCompletion(payload, opts = {}) {
await this.chatCompletion({ await this.chatCompletion({
@ -544,100 +659,13 @@ class AgentClient extends BaseClient {
let config; let config;
/** @type {ReturnType<createRun>} */ /** @type {ReturnType<createRun>} */
let run; let run;
/** @type {Promise<(TAttachment | null)[] | undefined>} */
let memoryPromise;
try { try {
if (!abortController) { if (!abortController) {
abortController = new AbortController(); abortController = new AbortController();
} }
// if (this.options.headers) {
// opts.defaultHeaders = { ...opts.defaultHeaders, ...this.options.headers };
// }
// if (this.options.proxy) {
// opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
// }
// if (this.isVisionModel) {
// modelOptions.max_tokens = 4000;
// }
// /** @type {TAzureConfig | undefined} */
// const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];
// if (
// (this.azure && this.isVisionModel && azureConfig) ||
// (azureConfig && this.isVisionModel && this.options.endpoint === EModelEndpoint.azureOpenAI)
// ) {
// const { modelGroupMap, groupMap } = azureConfig;
// const {
// azureOptions,
// baseURL,
// headers = {},
// serverless,
// } = mapModelToAzureConfig({
// modelName: modelOptions.model,
// modelGroupMap,
// groupMap,
// });
// opts.defaultHeaders = resolveHeaders(headers);
// this.langchainProxy = extractBaseURL(baseURL);
// this.apiKey = azureOptions.azureOpenAIApiKey;
// const groupName = modelGroupMap[modelOptions.model].group;
// this.options.addParams = azureConfig.groupMap[groupName].addParams;
// this.options.dropParams = azureConfig.groupMap[groupName].dropParams;
// // Note: `forcePrompt` not re-assigned as only chat models are vision models
// this.azure = !serverless && azureOptions;
// this.azureEndpoint =
// !serverless && genAzureChatCompletion(this.azure, modelOptions.model, this);
// }
// if (this.azure || this.options.azure) {
// /* Azure Bug, extremely short default `max_tokens` response */
// if (!modelOptions.max_tokens && modelOptions.model === 'gpt-4-vision-preview') {
// modelOptions.max_tokens = 4000;
// }
// /* Azure does not accept `model` in the body, so we need to remove it. */
// delete modelOptions.model;
// opts.baseURL = this.langchainProxy
// ? constructAzureURL({
// baseURL: this.langchainProxy,
// azureOptions: this.azure,
// })
// : this.azureEndpoint.split(/(?<!\/)\/(chat|completion)\//)[0];
// opts.defaultQuery = { 'api-version': this.azure.azureOpenAIApiVersion };
// opts.defaultHeaders = { ...opts.defaultHeaders, 'api-key': this.apiKey };
// }
// if (process.env.OPENAI_ORGANIZATION) {
// opts.organization = process.env.OPENAI_ORGANIZATION;
// }
// if (this.options.addParams && typeof this.options.addParams === 'object') {
// modelOptions = {
// ...modelOptions,
// ...this.options.addParams,
// };
// logger.debug('[api/server/controllers/agents/client.js #chatCompletion] added params', {
// addParams: this.options.addParams,
// modelOptions,
// });
// }
// if (this.options.dropParams && Array.isArray(this.options.dropParams)) {
// this.options.dropParams.forEach((param) => {
// delete modelOptions[param];
// });
// logger.debug('[api/server/controllers/agents/client.js #chatCompletion] dropped params', {
// dropParams: this.options.dropParams,
// modelOptions,
// });
// }
/** @type {TCustomConfig['endpoints']['agents']} */ /** @type {TCustomConfig['endpoints']['agents']} */
const agentsEConfig = this.options.req.app.locals[EModelEndpoint.agents]; const agentsEConfig = this.options.req.app.locals[EModelEndpoint.agents];
@ -647,6 +675,7 @@ class AgentClient extends BaseClient {
last_agent_index: this.agentConfigs?.size ?? 0, last_agent_index: this.agentConfigs?.size ?? 0,
user_id: this.user ?? this.options.req.user?.id, user_id: this.user ?? this.options.req.user?.id,
hide_sequential_outputs: this.options.agent.hide_sequential_outputs, hide_sequential_outputs: this.options.agent.hide_sequential_outputs,
user: this.options.req.user,
}, },
recursionLimit: agentsEConfig?.recursionLimit, recursionLimit: agentsEConfig?.recursionLimit,
signal: abortController.signal, signal: abortController.signal,
@ -654,6 +683,8 @@ class AgentClient extends BaseClient {
version: 'v2', version: 'v2',
}; };
const getUserMCPAuthMap = await createGetMCPAuthMap();
const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name)); const toolSet = new Set((this.options.agent.tools ?? []).map((tool) => tool && tool.name));
let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages( let { messages: initialMessages, indexTokenCountMap } = formatAgentMessages(
payload, payload,
@ -734,6 +765,10 @@ class AgentClient extends BaseClient {
messages = addCacheControl(messages); messages = addCacheControl(messages);
} }
if (i === 0) {
memoryPromise = this.runMemory(messages);
}
run = await createRun({ run = await createRun({
agent, agent,
req: this.options.req, req: this.options.req,
@ -769,10 +804,23 @@ class AgentClient extends BaseClient {
run.Graph.contentData = contentData; run.Graph.contentData = contentData;
} }
const encoding = this.getEncoding(); try {
if (getUserMCPAuthMap) {
config.configurable.userMCPAuthMap = await getUserMCPAuthMap({
tools: agent.tools,
userId: this.options.req.user.id,
});
}
} catch (err) {
logger.error(
`[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent ${agent.id}`,
err,
);
}
await run.processStream({ messages }, config, { await run.processStream({ messages }, config, {
keepContent: i !== 0, keepContent: i !== 0,
tokenCounter: createTokenCounter(encoding), tokenCounter: createTokenCounter(this.getEncoding()),
indexTokenCountMap: currentIndexCountMap, indexTokenCountMap: currentIndexCountMap,
maxContextTokens: agent.maxContextTokens, maxContextTokens: agent.maxContextTokens,
callbacks: { callbacks: {
@ -887,6 +935,12 @@ class AgentClient extends BaseClient {
}); });
try { try {
if (memoryPromise) {
const attachments = await memoryPromise;
if (attachments && attachments.length > 0) {
this.artifactPromises.push(...attachments);
}
}
await this.recordCollectedUsage({ context: 'message' }); await this.recordCollectedUsage({ context: 'message' });
} catch (err) { } catch (err) {
logger.error( logger.error(
@ -895,6 +949,12 @@ class AgentClient extends BaseClient {
); );
} }
} catch (err) { } catch (err) {
if (memoryPromise) {
const attachments = await memoryPromise;
if (attachments && attachments.length > 0) {
this.artifactPromises.push(...attachments);
}
}
logger.error( logger.error(
'[api/server/controllers/agents/client.js #sendCompletion] Operation aborted', '[api/server/controllers/agents/client.js #sendCompletion] Operation aborted',
err, err,

View file

@ -1,94 +0,0 @@
const { Run, Providers } = require('@librechat/agents');
const { providerEndpointMap, KnownEndpoints } = require('librechat-data-provider');
/**
* @typedef {import('@librechat/agents').t} t
* @typedef {import('@librechat/agents').StandardGraphConfig} StandardGraphConfig
* @typedef {import('@librechat/agents').StreamEventData} StreamEventData
* @typedef {import('@librechat/agents').EventHandler} EventHandler
* @typedef {import('@librechat/agents').GraphEvents} GraphEvents
* @typedef {import('@librechat/agents').LLMConfig} LLMConfig
* @typedef {import('@librechat/agents').IState} IState
*/
const customProviders = new Set([
Providers.XAI,
Providers.OLLAMA,
Providers.DEEPSEEK,
Providers.OPENROUTER,
]);
/**
* Creates a new Run instance with custom handlers and configuration.
*
* @param {Object} options - The options for creating the Run instance.
* @param {ServerRequest} [options.req] - The server request.
* @param {string | undefined} [options.runId] - Optional run ID; otherwise, a new run ID will be generated.
* @param {Agent} options.agent - The agent for this run.
* @param {AbortSignal} options.signal - The signal for this run.
* @param {Record<GraphEvents, EventHandler> | undefined} [options.customHandlers] - Custom event handlers.
* @param {boolean} [options.streaming=true] - Whether to use streaming.
* @param {boolean} [options.streamUsage=true] - Whether to stream usage information.
* @returns {Promise<Run<IState>>} A promise that resolves to a new Run instance.
*/
async function createRun({
runId,
agent,
signal,
customHandlers,
streaming = true,
streamUsage = true,
}) {
const provider = providerEndpointMap[agent.provider] ?? agent.provider;
/** @type {LLMConfig} */
const llmConfig = Object.assign(
{
provider,
streaming,
streamUsage,
},
agent.model_parameters,
);
/** Resolves issues with new OpenAI usage field */
if (
customProviders.has(agent.provider) ||
(agent.provider === Providers.OPENAI && agent.endpoint !== agent.provider)
) {
llmConfig.streamUsage = false;
llmConfig.usage = true;
}
/** @type {'reasoning_content' | 'reasoning'} */
let reasoningKey;
if (
llmConfig.configuration?.baseURL?.includes(KnownEndpoints.openrouter) ||
(agent.endpoint && agent.endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
) {
reasoningKey = 'reasoning';
}
/** @type {StandardGraphConfig} */
const graphConfig = {
signal,
llmConfig,
reasoningKey,
tools: agent.tools,
instructions: agent.instructions,
additional_instructions: agent.additional_instructions,
// toolEnd: agent.end_after_tools,
};
// TEMPORARY FOR TESTING
if (agent.provider === Providers.ANTHROPIC || agent.provider === Providers.BEDROCK) {
graphConfig.streamBuffer = 2000;
}
return Run.create({
runId,
graphConfig,
customHandlers,
});
}
module.exports = { createRun };

View file

@ -1,9 +1,9 @@
const fs = require('fs').promises; const fs = require('fs').promises;
const { nanoid } = require('nanoid'); const { nanoid } = require('nanoid');
const { logger } = require('@librechat/data-schemas');
const { const {
Tools, Tools,
Constants, Constants,
FileContext,
FileSources, FileSources,
SystemRoles, SystemRoles,
EToolResources, EToolResources,
@ -16,16 +16,16 @@ const {
deleteAgent, deleteAgent,
getListAgents, getListAgents,
} = require('~/models/Agent'); } = require('~/models/Agent');
const { uploadImageBuffer, filterFile } = require('~/server/services/Files/process');
const { getStrategyFunctions } = require('~/server/services/Files/strategies'); const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { resizeAvatar } = require('~/server/services/Files/images/avatar'); const { resizeAvatar } = require('~/server/services/Files/images/avatar');
const { refreshS3Url } = require('~/server/services/Files/S3/crud'); const { refreshS3Url } = require('~/server/services/Files/S3/crud');
const { filterFile } = require('~/server/services/Files/process');
const { updateAction, getActions } = require('~/models/Action'); const { updateAction, getActions } = require('~/models/Action');
const { getCachedTools } = require('~/server/services/Config');
const { updateAgentProjects } = require('~/models/Agent'); const { updateAgentProjects } = require('~/models/Agent');
const { getProjectByName } = require('~/models/Project'); const { getProjectByName } = require('~/models/Project');
const { deleteFileByFilter } = require('~/models/File');
const { revertAgentVersion } = require('~/models/Agent'); const { revertAgentVersion } = require('~/models/Agent');
const { logger } = require('~/config'); const { deleteFileByFilter } = require('~/models/File');
const systemTools = { const systemTools = {
[Tools.execute_code]: true, [Tools.execute_code]: true,
@ -47,8 +47,9 @@ const createAgentHandler = async (req, res) => {
agentData.tools = []; agentData.tools = [];
const availableTools = await getCachedTools({ includeGlobal: true });
for (const tool of tools) { for (const tool of tools) {
if (req.app.locals.availableTools[tool]) { if (availableTools[tool]) {
agentData.tools.push(tool); agentData.tools.push(tool);
} }
@ -169,12 +170,18 @@ const updateAgentHandler = async (req, res) => {
}); });
} }
/** @type {boolean} */
const isProjectUpdate = (projectIds?.length ?? 0) > 0 || (removeProjectIds?.length ?? 0) > 0;
let updatedAgent = let updatedAgent =
Object.keys(updateData).length > 0 Object.keys(updateData).length > 0
? await updateAgent({ id }, updateData, { updatingUserId: req.user.id }) ? await updateAgent({ id }, updateData, {
updatingUserId: req.user.id,
skipVersioning: isProjectUpdate,
})
: existingAgent; : existingAgent;
if (projectIds || removeProjectIds) { if (isProjectUpdate) {
updatedAgent = await updateAgentProjects({ updatedAgent = await updateAgentProjects({
user: req.user, user: req.user,
agentId: id, agentId: id,
@ -387,6 +394,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
buffer: resizedBuffer, buffer: resizedBuffer,
userId: req.user.id, userId: req.user.id,
manual: 'false', manual: 'false',
agentId: agent_id,
}); });
const image = { const image = {
@ -438,7 +446,7 @@ const uploadAgentAvatarHandler = async (req, res) => {
try { try {
await fs.unlink(req.file.path); await fs.unlink(req.file.path);
logger.debug('[/:agent_id/avatar] Temp. image upload file deleted'); logger.debug('[/:agent_id/avatar] Temp. image upload file deleted');
} catch (error) { } catch {
logger.debug('[/:agent_id/avatar] Temp. image upload file already deleted'); logger.debug('[/:agent_id/avatar] Temp. image upload file already deleted');
} }
} }

View file

@ -1,4 +1,5 @@
const fs = require('fs').promises; const fs = require('fs').promises;
const { logger } = require('@librechat/data-schemas');
const { FileContext } = require('librechat-data-provider'); const { FileContext } = require('librechat-data-provider');
const { uploadImageBuffer, filterFile } = require('~/server/services/Files/process'); const { uploadImageBuffer, filterFile } = require('~/server/services/Files/process');
const validateAuthor = require('~/server/middleware/assistants/validateAuthor'); const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
@ -6,9 +7,9 @@ const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { deleteAssistantActions } = require('~/server/services/ActionService'); const { deleteAssistantActions } = require('~/server/services/ActionService');
const { updateAssistantDoc, getAssistants } = require('~/models/Assistant'); const { updateAssistantDoc, getAssistants } = require('~/models/Assistant');
const { getOpenAIClient, fetchAssistants } = require('./helpers'); const { getOpenAIClient, fetchAssistants } = require('./helpers');
const { getCachedTools } = require('~/server/services/Config');
const { manifestToolMap } = require('~/app/clients/tools'); const { manifestToolMap } = require('~/app/clients/tools');
const { deleteFileByFilter } = require('~/models/File'); const { deleteFileByFilter } = require('~/models/File');
const { logger } = require('~/config');
/** /**
* Create an assistant. * Create an assistant.
@ -30,21 +31,20 @@ const createAssistant = async (req, res) => {
delete assistantData.conversation_starters; delete assistantData.conversation_starters;
delete assistantData.append_current_datetime; delete assistantData.append_current_datetime;
const toolDefinitions = await getCachedTools({ includeGlobal: true });
assistantData.tools = tools assistantData.tools = tools
.map((tool) => { .map((tool) => {
if (typeof tool !== 'string') { if (typeof tool !== 'string') {
return tool; return tool;
} }
const toolDefinitions = req.app.locals.availableTools;
const toolDef = toolDefinitions[tool]; const toolDef = toolDefinitions[tool];
if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) { if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
return ( return Object.entries(toolDefinitions)
Object.entries(toolDefinitions)
.filter(([key]) => key.startsWith(`${tool}_`)) .filter(([key]) => key.startsWith(`${tool}_`))
// eslint-disable-next-line no-unused-vars
.map(([_, val]) => val) .map(([_, val]) => val);
);
} }
return toolDef; return toolDef;
@ -135,21 +135,21 @@ const patchAssistant = async (req, res) => {
append_current_datetime, append_current_datetime,
...updateData ...updateData
} = req.body; } = req.body;
const toolDefinitions = await getCachedTools({ includeGlobal: true });
updateData.tools = (updateData.tools ?? []) updateData.tools = (updateData.tools ?? [])
.map((tool) => { .map((tool) => {
if (typeof tool !== 'string') { if (typeof tool !== 'string') {
return tool; return tool;
} }
const toolDefinitions = req.app.locals.availableTools;
const toolDef = toolDefinitions[tool]; const toolDef = toolDefinitions[tool];
if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) { if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
return ( return Object.entries(toolDefinitions)
Object.entries(toolDefinitions)
.filter(([key]) => key.startsWith(`${tool}_`)) .filter(([key]) => key.startsWith(`${tool}_`))
// eslint-disable-next-line no-unused-vars
.map(([_, val]) => val) .map(([_, val]) => val);
);
} }
return toolDef; return toolDef;

View file

@ -1,10 +1,11 @@
const { logger } = require('@librechat/data-schemas');
const { ToolCallTypes } = require('librechat-data-provider'); const { ToolCallTypes } = require('librechat-data-provider');
const validateAuthor = require('~/server/middleware/assistants/validateAuthor'); const validateAuthor = require('~/server/middleware/assistants/validateAuthor');
const { validateAndUpdateTool } = require('~/server/services/ActionService'); const { validateAndUpdateTool } = require('~/server/services/ActionService');
const { getCachedTools } = require('~/server/services/Config');
const { updateAssistantDoc } = require('~/models/Assistant'); const { updateAssistantDoc } = require('~/models/Assistant');
const { manifestToolMap } = require('~/app/clients/tools'); const { manifestToolMap } = require('~/app/clients/tools');
const { getOpenAIClient } = require('./helpers'); const { getOpenAIClient } = require('./helpers');
const { logger } = require('~/config');
/** /**
* Create an assistant. * Create an assistant.
@ -27,21 +28,20 @@ const createAssistant = async (req, res) => {
delete assistantData.conversation_starters; delete assistantData.conversation_starters;
delete assistantData.append_current_datetime; delete assistantData.append_current_datetime;
const toolDefinitions = await getCachedTools({ includeGlobal: true });
assistantData.tools = tools assistantData.tools = tools
.map((tool) => { .map((tool) => {
if (typeof tool !== 'string') { if (typeof tool !== 'string') {
return tool; return tool;
} }
const toolDefinitions = req.app.locals.availableTools;
const toolDef = toolDefinitions[tool]; const toolDef = toolDefinitions[tool];
if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) { if (!toolDef && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
return ( return Object.entries(toolDefinitions)
Object.entries(toolDefinitions)
.filter(([key]) => key.startsWith(`${tool}_`)) .filter(([key]) => key.startsWith(`${tool}_`))
// eslint-disable-next-line no-unused-vars
.map(([_, val]) => val) .map(([_, val]) => val);
);
} }
return toolDef; return toolDef;
@ -125,13 +125,13 @@ const updateAssistant = async ({ req, openai, assistant_id, updateData }) => {
let hasFileSearch = false; let hasFileSearch = false;
for (const tool of updateData.tools ?? []) { for (const tool of updateData.tools ?? []) {
const toolDefinitions = req.app.locals.availableTools; const toolDefinitions = await getCachedTools({ includeGlobal: true });
let actualTool = typeof tool === 'string' ? toolDefinitions[tool] : tool; let actualTool = typeof tool === 'string' ? toolDefinitions[tool] : tool;
if (!actualTool && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) { if (!actualTool && manifestToolMap[tool] && manifestToolMap[tool].toolkit === true) {
actualTool = Object.entries(toolDefinitions) actualTool = Object.entries(toolDefinitions)
.filter(([key]) => key.startsWith(`${tool}_`)) .filter(([key]) => key.startsWith(`${tool}_`))
// eslint-disable-next-line no-unused-vars
.map(([_, val]) => val); .map(([_, val]) => val);
} else if (!actualTool) { } else if (!actualTool) {
continue; continue;

View file

@ -1,22 +1,22 @@
require('dotenv').config(); require('dotenv').config();
const fs = require('fs');
const path = require('path'); const path = require('path');
require('module-alias')({ base: path.resolve(__dirname, '..') }); require('module-alias')({ base: path.resolve(__dirname, '..') });
const cors = require('cors'); const cors = require('cors');
const axios = require('axios'); const axios = require('axios');
const express = require('express'); const express = require('express');
const compression = require('compression');
const passport = require('passport'); const passport = require('passport');
const mongoSanitize = require('express-mongo-sanitize'); const compression = require('compression');
const fs = require('fs');
const cookieParser = require('cookie-parser'); const cookieParser = require('cookie-parser');
const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const mongoSanitize = require('express-mongo-sanitize');
const { connectDb, indexSync } = require('~/db'); const { connectDb, indexSync } = require('~/db');
const { jwtLogin, passportLogin } = require('~/strategies');
const { isEnabled } = require('~/server/utils');
const { ldapLogin } = require('~/strategies');
const { logger } = require('~/config');
const validateImageRequest = require('./middleware/validateImageRequest'); const validateImageRequest = require('./middleware/validateImageRequest');
const { jwtLogin, ldapLogin, passportLogin } = require('~/strategies');
const errorController = require('./controllers/ErrorController'); const errorController = require('./controllers/ErrorController');
const initializeMCP = require('./services/initializeMCP');
const configureSocialLogins = require('./socialLogins'); const configureSocialLogins = require('./socialLogins');
const AppService = require('./services/AppService'); const AppService = require('./services/AppService');
const staticCache = require('./utils/staticCache'); const staticCache = require('./utils/staticCache');
@ -39,7 +39,9 @@ const startServer = async () => {
await connectDb(); await connectDb();
logger.info('Connected to MongoDB'); logger.info('Connected to MongoDB');
await indexSync(); indexSync().catch((err) => {
logger.error('[indexSync] Background sync failed:', err);
});
app.disable('x-powered-by'); app.disable('x-powered-by');
app.set('trust proxy', trusted_proxy); app.set('trust proxy', trusted_proxy);
@ -117,8 +119,9 @@ const startServer = async () => {
app.use('/api/agents', routes.agents); app.use('/api/agents', routes.agents);
app.use('/api/banner', routes.banner); app.use('/api/banner', routes.banner);
app.use('/api/bedrock', routes.bedrock); app.use('/api/bedrock', routes.bedrock);
app.use('/api/memories', routes.memories);
app.use('/api/tags', routes.tags); app.use('/api/tags', routes.tags);
app.use('/api/mcp', routes.mcp);
app.use((req, res) => { app.use((req, res) => {
res.set({ res.set({
@ -142,6 +145,8 @@ const startServer = async () => {
} else { } else {
logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`); logger.info(`Server listening at http://${host == '0.0.0.0' ? 'localhost' : host}:${port}`);
} }
initializeMCP(app);
}); });
}; };
@ -184,5 +189,5 @@ process.on('uncaughtException', (err) => {
process.exit(1); process.exit(1);
}); });
// export app for easier testing purposes /** Export app for easier testing purposes */
module.exports = app; module.exports = app;

View file

@ -1,5 +1,5 @@
const checkAdmin = require('./checkAdmin'); const checkAdmin = require('./admin');
const { checkAccess, generateCheckAccess } = require('./generateCheckAccess'); const { checkAccess, generateCheckAccess } = require('./access');
module.exports = { module.exports = {
checkAdmin, checkAdmin,

View file

@ -1,8 +1,8 @@
const { isEnabled } = require('@librechat/api');
const { Constants, ViolationTypes, Time } = require('librechat-data-provider'); const { Constants, ViolationTypes, Time } = require('librechat-data-provider');
const { searchConversation } = require('~/models/Conversation'); const { searchConversation } = require('~/models/Conversation');
const denyRequest = require('~/server/middleware/denyRequest'); const denyRequest = require('~/server/middleware/denyRequest');
const { logViolation, getLogStores } = require('~/cache'); const { logViolation, getLogStores } = require('~/cache');
const { isEnabled } = require('~/server/utils');
const { USE_REDIS, CONVO_ACCESS_VIOLATION_SCORE: score = 0 } = process.env ?? {}; const { USE_REDIS, CONVO_ACCESS_VIOLATION_SCORE: score = 0 } = process.env ?? {};

View file

@ -1,8 +1,10 @@
const express = require('express'); const express = require('express');
const jwt = require('jsonwebtoken'); const jwt = require('jsonwebtoken');
const { getAccessToken } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys } = require('librechat-data-provider'); const { CacheKeys } = require('librechat-data-provider');
const { getAccessToken } = require('~/server/services/TokenService'); const { findToken, updateToken, createToken } = require('~/models');
const { logger, getFlowStateManager } = require('~/config'); const { getFlowStateManager } = require('~/config');
const { getLogStores } = require('~/cache'); const { getLogStores } = require('~/cache');
const router = express.Router(); const router = express.Router();
@ -28,18 +30,19 @@ router.get('/:action_id/oauth/callback', async (req, res) => {
try { try {
decodedState = jwt.verify(state, JWT_SECRET); decodedState = jwt.verify(state, JWT_SECRET);
} catch (err) { } catch (err) {
logger.error('Error verifying state parameter:', err);
await flowManager.failFlow(identifier, 'oauth', 'Invalid or expired state parameter'); await flowManager.failFlow(identifier, 'oauth', 'Invalid or expired state parameter');
return res.status(400).send('Invalid or expired state parameter'); return res.redirect('/oauth/error?error=invalid_state');
} }
if (decodedState.action_id !== action_id) { if (decodedState.action_id !== action_id) {
await flowManager.failFlow(identifier, 'oauth', 'Mismatched action ID in state parameter'); await flowManager.failFlow(identifier, 'oauth', 'Mismatched action ID in state parameter');
return res.status(400).send('Mismatched action ID in state parameter'); return res.redirect('/oauth/error?error=invalid_state');
} }
if (!decodedState.user) { if (!decodedState.user) {
await flowManager.failFlow(identifier, 'oauth', 'Invalid user ID in state parameter'); await flowManager.failFlow(identifier, 'oauth', 'Invalid user ID in state parameter');
return res.status(400).send('Invalid user ID in state parameter'); return res.redirect('/oauth/error?error=invalid_state');
} }
identifier = `${decodedState.user}:${action_id}`; identifier = `${decodedState.user}:${action_id}`;
const flowState = await flowManager.getFlowState(identifier, 'oauth'); const flowState = await flowManager.getFlowState(identifier, 'oauth');
@ -47,90 +50,34 @@ router.get('/:action_id/oauth/callback', async (req, res) => {
throw new Error('OAuth flow not found'); throw new Error('OAuth flow not found');
} }
const tokenData = await getAccessToken({ const tokenData = await getAccessToken(
{
code, code,
userId: decodedState.user, userId: decodedState.user,
identifier, identifier,
client_url: flowState.metadata.client_url, client_url: flowState.metadata.client_url,
redirect_uri: flowState.metadata.redirect_uri, redirect_uri: flowState.metadata.redirect_uri,
token_exchange_method: flowState.metadata.token_exchange_method,
/** Encrypted values */ /** Encrypted values */
encrypted_oauth_client_id: flowState.metadata.encrypted_oauth_client_id, encrypted_oauth_client_id: flowState.metadata.encrypted_oauth_client_id,
encrypted_oauth_client_secret: flowState.metadata.encrypted_oauth_client_secret, encrypted_oauth_client_secret: flowState.metadata.encrypted_oauth_client_secret,
}); },
{
findToken,
updateToken,
createToken,
},
);
await flowManager.completeFlow(identifier, 'oauth', tokenData); await flowManager.completeFlow(identifier, 'oauth', tokenData);
res.send(`
<!DOCTYPE html>
<html>
<head>
<title>Authentication Successful</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<style>
body {
font-family: ui-sans-serif, system-ui, -apple-system, BlinkMacSystemFont;
background-color: rgb(249, 250, 251);
margin: 0;
padding: 2rem;
display: flex;
justify-content: center;
align-items: center;
min-height: 100vh;
}
.card {
background-color: white;
border-radius: 0.5rem;
padding: 2rem;
max-width: 28rem;
width: 100%;
box-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1);
text-align: center;
}
.heading {
color: rgb(17, 24, 39);
font-size: 1.875rem;
font-weight: 700;
margin: 0 0 1rem;
}
.description {
color: rgb(75, 85, 99);
font-size: 0.875rem;
margin: 0.5rem 0;
}
.countdown {
color: rgb(99, 102, 241);
font-weight: 500;
}
</style>
</head>
<body>
<div class="card">
<h1 class="heading">Authentication Successful</h1>
<p class="description">
Your authentication was successful. This window will close in
<span class="countdown" id="countdown">3</span> seconds.
</p>
</div>
<script>
let secondsLeft = 3;
const countdownElement = document.getElementById('countdown');
const countdown = setInterval(() => { /** Redirect to React success page */
secondsLeft--; const serverName = flowState.metadata?.action_name || `Action ${action_id}`;
countdownElement.textContent = secondsLeft; const redirectUrl = `/oauth/success?serverName=${encodeURIComponent(serverName)}`;
res.redirect(redirectUrl);
if (secondsLeft <= 0) {
clearInterval(countdown);
window.close();
}
}, 1000);
</script>
</body>
</html>
`);
} catch (error) { } catch (error) {
logger.error('Error in OAuth callback:', error); logger.error('Error in OAuth callback:', error);
await flowManager.failFlow(identifier, 'oauth', error); await flowManager.failFlow(identifier, 'oauth', error);
res.status(500).send('Authentication failed. Please try again.'); res.redirect('/oauth/error?error=callback_failed');
} }
}); });

View file

@ -1,10 +1,11 @@
const express = require('express'); const express = require('express');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys, defaultSocialLogins, Constants } = require('librechat-data-provider'); const { CacheKeys, defaultSocialLogins, Constants } = require('librechat-data-provider');
const { getCustomConfig } = require('~/server/services/Config/getCustomConfig');
const { getLdapConfig } = require('~/server/services/Config/ldap'); const { getLdapConfig } = require('~/server/services/Config/ldap');
const { getProjectByName } = require('~/models/Project'); const { getProjectByName } = require('~/models/Project');
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('~/server/utils');
const { getLogStores } = require('~/cache'); const { getLogStores } = require('~/cache');
const { logger } = require('~/config');
const router = express.Router(); const router = express.Router();
const emailLoginEnabled = const emailLoginEnabled =
@ -21,6 +22,7 @@ const publicSharedLinksEnabled =
router.get('/', async function (req, res) { router.get('/', async function (req, res) {
const cache = getLogStores(CacheKeys.CONFIG_STORE); const cache = getLogStores(CacheKeys.CONFIG_STORE);
const cachedStartupConfig = await cache.get(CacheKeys.STARTUP_CONFIG); const cachedStartupConfig = await cache.get(CacheKeys.STARTUP_CONFIG);
if (cachedStartupConfig) { if (cachedStartupConfig) {
res.send(cachedStartupConfig); res.send(cachedStartupConfig);
@ -96,6 +98,18 @@ router.get('/', async function (req, res) {
bundlerURL: process.env.SANDPACK_BUNDLER_URL, bundlerURL: process.env.SANDPACK_BUNDLER_URL,
staticBundlerURL: process.env.SANDPACK_STATIC_BUNDLER_URL, staticBundlerURL: process.env.SANDPACK_STATIC_BUNDLER_URL,
}; };
payload.mcpServers = {};
const config = await getCustomConfig();
if (config?.mcpServers != null) {
for (const serverName in config.mcpServers) {
const serverConfig = config.mcpServers[serverName];
payload.mcpServers[serverName] = {
customUserVars: serverConfig?.customUserVars || {},
};
}
}
/** @type {TCustomConfig['webSearch']} */ /** @type {TCustomConfig['webSearch']} */
const webSearchConfig = req.app.locals.webSearch; const webSearchConfig = req.app.locals.webSearch;
if ( if (

View file

@ -65,8 +65,14 @@ router.post('/gen_title', async (req, res) => {
let title = await titleCache.get(key); let title = await titleCache.get(key);
if (!title) { if (!title) {
await sleep(2500); // Retry every 1s for up to 20s
for (let i = 0; i < 20; i++) {
await sleep(1000);
title = await titleCache.get(key); title = await titleCache.get(key);
if (title) {
break;
}
}
} }
if (title) { if (title) {

View file

@ -2,8 +2,8 @@ const fs = require('fs');
const path = require('path'); const path = require('path');
const crypto = require('crypto'); const crypto = require('crypto');
const multer = require('multer'); const multer = require('multer');
const { sanitizeFilename } = require('@librechat/api');
const { fileConfig: defaultFileConfig, mergeFileConfig } = require('librechat-data-provider'); const { fileConfig: defaultFileConfig, mergeFileConfig } = require('librechat-data-provider');
const { sanitizeFilename } = require('~/server/utils/handleText');
const { getCustomConfig } = require('~/server/services/Config'); const { getCustomConfig } = require('~/server/services/Config');
const storage = multer.diskStorage({ const storage = multer.diskStorage({

View file

@ -0,0 +1,571 @@
/* eslint-disable no-unused-vars */
/* eslint-disable jest/no-done-callback */
const fs = require('fs');
const os = require('os');
const path = require('path');
const crypto = require('crypto');
const { createMulterInstance, storage, importFileFilter } = require('./multer');
// Mock only the config service that requires external dependencies
jest.mock('~/server/services/Config', () => ({
getCustomConfig: jest.fn(() =>
Promise.resolve({
fileConfig: {
endpoints: {
openAI: {
supportedMimeTypes: ['image/jpeg', 'image/png', 'application/pdf'],
},
default: {
supportedMimeTypes: ['image/jpeg', 'image/png', 'text/plain'],
},
},
serverFileSizeLimit: 10000000, // 10MB
},
}),
),
}));
describe('Multer Configuration', () => {
let tempDir;
let mockReq;
let mockFile;
beforeEach(() => {
// Create a temporary directory for each test
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'multer-test-'));
mockReq = {
user: { id: 'test-user-123' },
app: {
locals: {
paths: {
uploads: tempDir,
},
},
},
body: {},
originalUrl: '/api/files/upload',
};
mockFile = {
originalname: 'test-file.jpg',
mimetype: 'image/jpeg',
size: 1024,
};
// Clear mocks
jest.clearAllMocks();
});
afterEach(() => {
// Clean up temporary directory
if (fs.existsSync(tempDir)) {
fs.rmSync(tempDir, { recursive: true, force: true });
}
});
describe('Storage Configuration', () => {
describe('destination function', () => {
it('should create the correct destination path', (done) => {
const cb = jest.fn((err, destination) => {
expect(err).toBeNull();
expect(destination).toBe(path.join(tempDir, 'temp', 'test-user-123'));
expect(fs.existsSync(destination)).toBe(true);
done();
});
storage.getDestination(mockReq, mockFile, cb);
});
it("should create directory recursively if it doesn't exist", (done) => {
const deepPath = path.join(tempDir, 'deep', 'nested', 'path');
mockReq.app.locals.paths.uploads = deepPath;
const cb = jest.fn((err, destination) => {
expect(err).toBeNull();
expect(destination).toBe(path.join(deepPath, 'temp', 'test-user-123'));
expect(fs.existsSync(destination)).toBe(true);
done();
});
storage.getDestination(mockReq, mockFile, cb);
});
});
describe('filename function', () => {
it('should generate a UUID for req.file_id', (done) => {
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(mockReq.file_id).toBeDefined();
expect(mockReq.file_id).toMatch(
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i,
);
done();
});
storage.getFilename(mockReq, mockFile, cb);
});
it('should decode URI components in filename', (done) => {
const encodedFile = {
...mockFile,
originalname: encodeURIComponent('test file with spaces.jpg'),
};
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(encodedFile.originalname).toBe('test file with spaces.jpg');
done();
});
storage.getFilename(mockReq, encodedFile, cb);
});
it('should call real sanitizeFilename with properly encoded filename', (done) => {
// Test with a properly URI-encoded filename that needs sanitization
const unsafeFile = {
...mockFile,
originalname: encodeURIComponent('test@#$%^&*()file with spaces!.jpg'),
};
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
// The actual sanitizeFilename should have cleaned this up after decoding
expect(filename).not.toContain('@');
expect(filename).not.toContain('#');
expect(filename).not.toContain('*');
expect(filename).not.toContain('!');
// Should still preserve dots and hyphens
expect(filename).toContain('.jpg');
done();
});
storage.getFilename(mockReq, unsafeFile, cb);
});
it('should handle very long filenames with actual crypto', (done) => {
const longFile = {
...mockFile,
originalname: 'a'.repeat(300) + '.jpg',
};
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(filename.length).toBeLessThanOrEqual(255);
expect(filename).toMatch(/\.jpg$/); // Should still end with .jpg
// Should contain a hex suffix if truncated
if (filename.length === 255) {
expect(filename).toMatch(/-[a-f0-9]{6}\.jpg$/);
}
done();
});
storage.getFilename(mockReq, longFile, cb);
});
it('should generate unique file_id for each call', (done) => {
let firstFileId;
const firstCb = jest.fn((err, filename) => {
expect(err).toBeNull();
firstFileId = mockReq.file_id;
// Reset req for second call
delete mockReq.file_id;
const secondCb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(mockReq.file_id).toBeDefined();
expect(mockReq.file_id).not.toBe(firstFileId);
done();
});
storage.getFilename(mockReq, mockFile, secondCb);
});
storage.getFilename(mockReq, mockFile, firstCb);
});
});
});
describe('Import File Filter', () => {
it('should accept JSON files by mimetype', (done) => {
const jsonFile = {
...mockFile,
mimetype: 'application/json',
originalname: 'data.json',
};
const cb = jest.fn((err, result) => {
expect(err).toBeNull();
expect(result).toBe(true);
done();
});
importFileFilter(mockReq, jsonFile, cb);
});
it('should accept files with .json extension', (done) => {
const jsonFile = {
...mockFile,
mimetype: 'text/plain',
originalname: 'data.json',
};
const cb = jest.fn((err, result) => {
expect(err).toBeNull();
expect(result).toBe(true);
done();
});
importFileFilter(mockReq, jsonFile, cb);
});
it('should reject non-JSON files', (done) => {
const textFile = {
...mockFile,
mimetype: 'text/plain',
originalname: 'document.txt',
};
const cb = jest.fn((err, result) => {
expect(err).toBeInstanceOf(Error);
expect(err.message).toBe('Only JSON files are allowed');
expect(result).toBe(false);
done();
});
importFileFilter(mockReq, textFile, cb);
});
it('should handle files with uppercase .JSON extension', (done) => {
const jsonFile = {
...mockFile,
mimetype: 'text/plain',
originalname: 'DATA.JSON',
};
const cb = jest.fn((err, result) => {
expect(err).toBeNull();
expect(result).toBe(true);
done();
});
importFileFilter(mockReq, jsonFile, cb);
});
});
describe('File Filter with Real defaultFileConfig', () => {
it('should use real fileConfig.checkType for validation', async () => {
// Test with actual librechat-data-provider functions
const {
fileConfig,
imageMimeTypes,
applicationMimeTypes,
} = require('librechat-data-provider');
// Test that the real checkType function works with regex patterns
expect(fileConfig.checkType('image/jpeg', [imageMimeTypes])).toBe(true);
expect(fileConfig.checkType('video/mp4', [imageMimeTypes])).toBe(false);
expect(fileConfig.checkType('application/pdf', [applicationMimeTypes])).toBe(true);
expect(fileConfig.checkType('application/pdf', [])).toBe(false);
});
it('should handle audio files for speech-to-text endpoint with real config', async () => {
mockReq.originalUrl = '/api/speech/stt';
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
expect(typeof multerInstance.single).toBe('function');
});
it('should reject unsupported file types using real config', async () => {
// Mock defaultFileConfig for this specific test
const originalCheckType = require('librechat-data-provider').fileConfig.checkType;
const mockCheckType = jest.fn().mockReturnValue(false);
require('librechat-data-provider').fileConfig.checkType = mockCheckType;
try {
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
// Test the actual file filter behavior would reject unsupported files
expect(mockCheckType).toBeDefined();
} finally {
// Restore original function
require('librechat-data-provider').fileConfig.checkType = originalCheckType;
}
});
it('should use real mergeFileConfig function', async () => {
const { mergeFileConfig, mbToBytes } = require('librechat-data-provider');
// Test with actual merge function - note that it converts MB to bytes
const testConfig = {
serverFileSizeLimit: 5, // 5 MB
endpoints: {
custom: {
supportedMimeTypes: ['text/plain'],
},
},
};
const result = mergeFileConfig(testConfig);
// The function converts MB to bytes, so 5 MB becomes 5 * 1024 * 1024 bytes
expect(result.serverFileSizeLimit).toBe(mbToBytes(5));
expect(result.endpoints.custom.supportedMimeTypes).toBeDefined();
// Should still have the default endpoints
expect(result.endpoints.default).toBeDefined();
});
});
describe('createMulterInstance with Real Functions', () => {
it('should create a multer instance with correct configuration', async () => {
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
expect(typeof multerInstance.single).toBe('function');
expect(typeof multerInstance.array).toBe('function');
expect(typeof multerInstance.fields).toBe('function');
});
it('should use real config merging', async () => {
const { getCustomConfig } = require('~/server/services/Config');
const multerInstance = await createMulterInstance();
expect(getCustomConfig).toHaveBeenCalled();
expect(multerInstance).toBeDefined();
});
it('should create multer instance with expected interface', async () => {
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
expect(typeof multerInstance.single).toBe('function');
expect(typeof multerInstance.array).toBe('function');
expect(typeof multerInstance.fields).toBe('function');
});
});
describe('Real Crypto Integration', () => {
it('should use actual crypto.randomUUID()', (done) => {
// Spy on crypto.randomUUID to ensure it's called
const uuidSpy = jest.spyOn(crypto, 'randomUUID');
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(uuidSpy).toHaveBeenCalled();
expect(mockReq.file_id).toMatch(
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i,
);
uuidSpy.mockRestore();
done();
});
storage.getFilename(mockReq, mockFile, cb);
});
it('should generate different UUIDs on subsequent calls', (done) => {
const uuids = [];
let callCount = 0;
const totalCalls = 5;
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
uuids.push(mockReq.file_id);
callCount++;
if (callCount === totalCalls) {
// Check that all UUIDs are unique
const uniqueUuids = new Set(uuids);
expect(uniqueUuids.size).toBe(totalCalls);
done();
} else {
// Reset for next call
delete mockReq.file_id;
storage.getFilename(mockReq, mockFile, cb);
}
});
// Start the chain
storage.getFilename(mockReq, mockFile, cb);
});
it('should generate cryptographically secure UUIDs', (done) => {
const generatedUuids = new Set();
let callCount = 0;
const totalCalls = 10;
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
// Verify UUID format and uniqueness
expect(mockReq.file_id).toMatch(
/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i,
);
generatedUuids.add(mockReq.file_id);
callCount++;
if (callCount === totalCalls) {
// All UUIDs should be unique
expect(generatedUuids.size).toBe(totalCalls);
done();
} else {
// Reset for next call
delete mockReq.file_id;
storage.getFilename(mockReq, mockFile, cb);
}
});
// Start the chain
storage.getFilename(mockReq, mockFile, cb);
});
});
describe('Error Handling', () => {
it('should handle CVE-2024-28870: empty field name DoS vulnerability', async () => {
// Test for the CVE where empty field name could cause unhandled exception
const multerInstance = await createMulterInstance();
// Create a mock request with empty field name (the vulnerability scenario)
const mockReqWithEmptyField = {
...mockReq,
headers: {
'content-type': 'multipart/form-data',
},
};
const mockRes = {
status: jest.fn().mockReturnThis(),
json: jest.fn(),
end: jest.fn(),
};
// This should not crash or throw unhandled exceptions
const uploadMiddleware = multerInstance.single(''); // Empty field name
const mockNext = jest.fn((err) => {
// If there's an error, it should be handled gracefully, not crash
if (err) {
expect(err).toBeInstanceOf(Error);
// The error should be handled, not crash the process
}
});
// This should complete without crashing the process
expect(() => {
uploadMiddleware(mockReqWithEmptyField, mockRes, mockNext);
}).not.toThrow();
});
it('should handle file system errors when directory creation fails', (done) => {
// Test with a non-existent parent directory to simulate fs issues
const invalidPath = '/nonexistent/path/that/should/not/exist';
mockReq.app.locals.paths.uploads = invalidPath;
try {
// Call getDestination which should fail due to permission/path issues
storage.getDestination(mockReq, mockFile, (err, destination) => {
// If callback is reached, we didn't get the expected error
done(new Error('Expected mkdirSync to throw an error but callback was called'));
});
// If we get here without throwing, something unexpected happened
done(new Error('Expected mkdirSync to throw an error but no error was thrown'));
} catch (error) {
// This is the expected behavior - mkdirSync throws synchronously for invalid paths
expect(error.code).toBe('EACCES');
done();
}
});
it('should handle malformed filenames with real sanitization', (done) => {
const malformedFile = {
...mockFile,
originalname: null, // This should be handled gracefully
};
const cb = jest.fn((err, filename) => {
// The function should handle this gracefully
expect(typeof err === 'object' || err === null).toBe(true);
done();
});
try {
storage.getFilename(mockReq, malformedFile, cb);
} catch (error) {
// If it throws, that's also acceptable behavior
done();
}
});
it('should handle edge cases in filename sanitization', (done) => {
const edgeCaseFiles = [
{ originalname: '', expected: /_/ },
{ originalname: '.hidden', expected: /^_\.hidden/ },
{ originalname: '../../../etc/passwd', expected: /passwd/ },
{ originalname: 'file\x00name.txt', expected: /file_name\.txt/ },
];
let testCount = 0;
const testNextFile = (fileData) => {
const fileToTest = { ...mockFile, originalname: fileData.originalname };
const cb = jest.fn((err, filename) => {
expect(err).toBeNull();
expect(filename).toMatch(fileData.expected);
testCount++;
if (testCount === edgeCaseFiles.length) {
done();
} else {
testNextFile(edgeCaseFiles[testCount]);
}
});
storage.getFilename(mockReq, fileToTest, cb);
};
testNextFile(edgeCaseFiles[0]);
});
});
describe('Real Configuration Testing', () => {
it('should handle missing custom config gracefully with real mergeFileConfig', async () => {
const { getCustomConfig } = require('~/server/services/Config');
// Mock getCustomConfig to return undefined
getCustomConfig.mockResolvedValueOnce(undefined);
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
expect(typeof multerInstance.single).toBe('function');
});
it('should properly integrate real fileConfig with custom endpoints', async () => {
const { getCustomConfig } = require('~/server/services/Config');
// Mock a custom config with additional endpoints
getCustomConfig.mockResolvedValueOnce({
fileConfig: {
endpoints: {
anthropic: {
supportedMimeTypes: ['text/plain', 'image/png'],
},
},
serverFileSizeLimit: 20, // 20 MB
},
});
const multerInstance = await createMulterInstance();
expect(multerInstance).toBeDefined();
// Verify that getCustomConfig was called (we can't spy on the actual merge function easily)
expect(getCustomConfig).toHaveBeenCalled();
});
});
});

View file

@ -4,6 +4,7 @@ const tokenizer = require('./tokenizer');
const endpoints = require('./endpoints'); const endpoints = require('./endpoints');
const staticRoute = require('./static'); const staticRoute = require('./static');
const messages = require('./messages'); const messages = require('./messages');
const memories = require('./memories');
const presets = require('./presets'); const presets = require('./presets');
const prompts = require('./prompts'); const prompts = require('./prompts');
const balance = require('./balance'); const balance = require('./balance');
@ -26,6 +27,7 @@ const edit = require('./edit');
const keys = require('./keys'); const keys = require('./keys');
const user = require('./user'); const user = require('./user');
const ask = require('./ask'); const ask = require('./ask');
const mcp = require('./mcp');
module.exports = { module.exports = {
ask, ask,
@ -51,9 +53,11 @@ module.exports = {
presets, presets,
balance, balance,
messages, messages,
memories,
endpoints, endpoints,
tokenizer, tokenizer,
assistants, assistants,
categories, categories,
staticRoute, staticRoute,
mcp,
}; };

205
api/server/routes/mcp.js Normal file
View file

@ -0,0 +1,205 @@
const { Router } = require('express');
const { MCPOAuthHandler } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { CacheKeys } = require('librechat-data-provider');
const { requireJwtAuth } = require('~/server/middleware');
const { getFlowStateManager } = require('~/config');
const { getLogStores } = require('~/cache');
const router = Router();
/**
* Initiate OAuth flow
* This endpoint is called when the user clicks the auth link in the UI
*/
router.get('/:serverName/oauth/initiate', requireJwtAuth, async (req, res) => {
try {
const { serverName } = req.params;
const { userId, flowId } = req.query;
const user = req.user;
// Verify the userId matches the authenticated user
if (userId !== user.id) {
return res.status(403).json({ error: 'User mismatch' });
}
logger.debug('[MCP OAuth] Initiate request', { serverName, userId, flowId });
const flowsCache = getLogStores(CacheKeys.FLOWS);
const flowManager = getFlowStateManager(flowsCache);
/** Flow state to retrieve OAuth config */
const flowState = await flowManager.getFlowState(flowId, 'mcp_oauth');
if (!flowState) {
logger.error('[MCP OAuth] Flow state not found', { flowId });
return res.status(404).json({ error: 'Flow not found' });
}
const { serverUrl, oauth: oauthConfig } = flowState.metadata || {};
if (!serverUrl || !oauthConfig) {
logger.error('[MCP OAuth] Missing server URL or OAuth config in flow state');
return res.status(400).json({ error: 'Invalid flow state' });
}
const { authorizationUrl, flowId: oauthFlowId } = await MCPOAuthHandler.initiateOAuthFlow(
serverName,
serverUrl,
userId,
oauthConfig,
);
logger.debug('[MCP OAuth] OAuth flow initiated', { oauthFlowId, authorizationUrl });
// Redirect user to the authorization URL
res.redirect(authorizationUrl);
} catch (error) {
logger.error('[MCP OAuth] Failed to initiate OAuth', error);
res.status(500).json({ error: 'Failed to initiate OAuth' });
}
});
/**
* OAuth callback handler
* This handles the OAuth callback after the user has authorized the application
*/
router.get('/:serverName/oauth/callback', async (req, res) => {
try {
const { serverName } = req.params;
const { code, state, error: oauthError } = req.query;
logger.debug('[MCP OAuth] Callback received', {
serverName,
code: code ? 'present' : 'missing',
state,
error: oauthError,
});
if (oauthError) {
logger.error('[MCP OAuth] OAuth error received', { error: oauthError });
return res.redirect(`/oauth/error?error=${encodeURIComponent(String(oauthError))}`);
}
if (!code || typeof code !== 'string') {
logger.error('[MCP OAuth] Missing or invalid code');
return res.redirect('/oauth/error?error=missing_code');
}
if (!state || typeof state !== 'string') {
logger.error('[MCP OAuth] Missing or invalid state');
return res.redirect('/oauth/error?error=missing_state');
}
// Extract flow ID from state
const flowId = state;
logger.debug('[MCP OAuth] Using flow ID from state', { flowId });
const flowsCache = getLogStores(CacheKeys.FLOWS);
const flowManager = getFlowStateManager(flowsCache);
logger.debug('[MCP OAuth] Getting flow state for flowId: ' + flowId);
const flowState = await MCPOAuthHandler.getFlowState(flowId, flowManager);
if (!flowState) {
logger.error('[MCP OAuth] Flow state not found for flowId:', flowId);
return res.redirect('/oauth/error?error=invalid_state');
}
logger.debug('[MCP OAuth] Flow state details', {
serverName: flowState.serverName,
userId: flowState.userId,
hasMetadata: !!flowState.metadata,
hasClientInfo: !!flowState.clientInfo,
hasCodeVerifier: !!flowState.codeVerifier,
});
// Complete the OAuth flow
logger.debug('[MCP OAuth] Completing OAuth flow');
const tokens = await MCPOAuthHandler.completeOAuthFlow(flowId, code, flowManager);
logger.info('[MCP OAuth] OAuth flow completed, tokens received in callback route');
// For system-level OAuth, we need to store the tokens and retry the connection
if (flowState.userId === 'system') {
logger.debug(`[MCP OAuth] System-level OAuth completed for ${serverName}`);
}
/** ID of the flow that the tool/connection is waiting for */
const toolFlowId = flowState.metadata?.toolFlowId;
if (toolFlowId) {
logger.debug('[MCP OAuth] Completing tool flow', { toolFlowId });
await flowManager.completeFlow(toolFlowId, 'mcp_oauth', tokens);
}
/** Redirect to success page with flowId and serverName */
const redirectUrl = `/oauth/success?serverName=${encodeURIComponent(serverName)}`;
res.redirect(redirectUrl);
} catch (error) {
logger.error('[MCP OAuth] OAuth callback error', error);
res.redirect('/oauth/error?error=callback_failed');
}
});
/**
* Get OAuth tokens for a completed flow
* This is primarily for user-level OAuth flows
*/
router.get('/oauth/tokens/:flowId', requireJwtAuth, async (req, res) => {
try {
const { flowId } = req.params;
const user = req.user;
if (!user?.id) {
return res.status(401).json({ error: 'User not authenticated' });
}
// Allow system flows or user-owned flows
if (!flowId.startsWith(`${user.id}:`) && !flowId.startsWith('system:')) {
return res.status(403).json({ error: 'Access denied' });
}
const flowsCache = getLogStores(CacheKeys.FLOWS);
const flowManager = getFlowStateManager(flowsCache);
const flowState = await flowManager.getFlowState(flowId, 'mcp_oauth');
if (!flowState) {
return res.status(404).json({ error: 'Flow not found' });
}
if (flowState.status !== 'COMPLETED') {
return res.status(400).json({ error: 'Flow not completed' });
}
res.json({ tokens: flowState.result });
} catch (error) {
logger.error('[MCP OAuth] Failed to get tokens', error);
res.status(500).json({ error: 'Failed to get tokens' });
}
});
/**
* Check OAuth flow status
* This endpoint can be used to poll the status of an OAuth flow
*/
router.get('/oauth/status/:flowId', async (req, res) => {
try {
const { flowId } = req.params;
const flowsCache = getLogStores(CacheKeys.FLOWS);
const flowManager = getFlowStateManager(flowsCache);
const flowState = await flowManager.getFlowState(flowId, 'mcp_oauth');
if (!flowState) {
return res.status(404).json({ error: 'Flow not found' });
}
res.json({
status: flowState.status,
completed: flowState.status === 'COMPLETED',
failed: flowState.status === 'FAILED',
error: flowState.error,
});
} catch (error) {
logger.error('[MCP OAuth] Failed to get flow status', error);
res.status(500).json({ error: 'Failed to get flow status' });
}
});
module.exports = router;

View file

@ -0,0 +1,231 @@
const express = require('express');
const { Tokenizer } = require('@librechat/api');
const { PermissionTypes, Permissions } = require('librechat-data-provider');
const {
getAllUserMemories,
toggleUserMemories,
createMemory,
setMemory,
deleteMemory,
} = require('~/models');
const { requireJwtAuth, generateCheckAccess } = require('~/server/middleware');
const router = express.Router();
const checkMemoryRead = generateCheckAccess(PermissionTypes.MEMORIES, [
Permissions.USE,
Permissions.READ,
]);
const checkMemoryCreate = generateCheckAccess(PermissionTypes.MEMORIES, [
Permissions.USE,
Permissions.CREATE,
]);
const checkMemoryUpdate = generateCheckAccess(PermissionTypes.MEMORIES, [
Permissions.USE,
Permissions.UPDATE,
]);
const checkMemoryDelete = generateCheckAccess(PermissionTypes.MEMORIES, [
Permissions.USE,
Permissions.UPDATE,
]);
const checkMemoryOptOut = generateCheckAccess(PermissionTypes.MEMORIES, [
Permissions.USE,
Permissions.OPT_OUT,
]);
router.use(requireJwtAuth);
/**
* GET /memories
* Returns all memories for the authenticated user, sorted by updated_at (newest first).
* Also includes memory usage percentage based on token limit.
*/
router.get('/', checkMemoryRead, async (req, res) => {
try {
const memories = await getAllUserMemories(req.user.id);
const sortedMemories = memories.sort(
(a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime(),
);
const totalTokens = memories.reduce((sum, memory) => {
return sum + (memory.tokenCount || 0);
}, 0);
const memoryConfig = req.app.locals?.memory;
const tokenLimit = memoryConfig?.tokenLimit;
let usagePercentage = null;
if (tokenLimit && tokenLimit > 0) {
usagePercentage = Math.min(100, Math.round((totalTokens / tokenLimit) * 100));
}
res.json({
memories: sortedMemories,
totalTokens,
tokenLimit: tokenLimit || null,
usagePercentage,
});
} catch (error) {
res.status(500).json({ error: error.message });
}
});
/**
* POST /memories
* Creates a new memory entry for the authenticated user.
* Body: { key: string, value: string }
* Returns 201 and { created: true, memory: <createdDoc> } when successful.
*/
router.post('/', checkMemoryCreate, async (req, res) => {
const { key, value } = req.body;
if (typeof key !== 'string' || key.trim() === '') {
return res.status(400).json({ error: 'Key is required and must be a non-empty string.' });
}
if (typeof value !== 'string' || value.trim() === '') {
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
}
try {
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
const memories = await getAllUserMemories(req.user.id);
// Check token limit
const memoryConfig = req.app.locals?.memory;
const tokenLimit = memoryConfig?.tokenLimit;
if (tokenLimit) {
const currentTotalTokens = memories.reduce(
(sum, memory) => sum + (memory.tokenCount || 0),
0,
);
if (currentTotalTokens + tokenCount > tokenLimit) {
return res.status(400).json({
error: `Adding this memory would exceed the token limit of ${tokenLimit}. Current usage: ${currentTotalTokens} tokens.`,
});
}
}
const result = await createMemory({
userId: req.user.id,
key: key.trim(),
value: value.trim(),
tokenCount,
});
if (!result.ok) {
return res.status(500).json({ error: 'Failed to create memory.' });
}
const updatedMemories = await getAllUserMemories(req.user.id);
const newMemory = updatedMemories.find((m) => m.key === key.trim());
res.status(201).json({ created: true, memory: newMemory });
} catch (error) {
if (error.message && error.message.includes('already exists')) {
return res.status(409).json({ error: 'Memory with this key already exists.' });
}
res.status(500).json({ error: error.message });
}
});
/**
* PATCH /memories/preferences
* Updates the user's memory preferences (e.g., enabling/disabling memories).
* Body: { memories: boolean }
* Returns 200 and { updated: true, preferences: { memories: boolean } } when successful.
*/
router.patch('/preferences', checkMemoryOptOut, async (req, res) => {
const { memories } = req.body;
if (typeof memories !== 'boolean') {
return res.status(400).json({ error: 'memories must be a boolean value.' });
}
try {
const updatedUser = await toggleUserMemories(req.user.id, memories);
if (!updatedUser) {
return res.status(404).json({ error: 'User not found.' });
}
res.json({
updated: true,
preferences: {
memories: updatedUser.personalization?.memories ?? true,
},
});
} catch (error) {
res.status(500).json({ error: error.message });
}
});
/**
* PATCH /memories/:key
* Updates the value of an existing memory entry for the authenticated user.
* Body: { value: string }
* Returns 200 and { updated: true, memory: <updatedDoc> } when successful.
*/
router.patch('/:key', checkMemoryUpdate, async (req, res) => {
const { key } = req.params;
const { value } = req.body || {};
if (typeof value !== 'string' || value.trim() === '') {
return res.status(400).json({ error: 'Value is required and must be a non-empty string.' });
}
try {
const tokenCount = Tokenizer.getTokenCount(value, 'o200k_base');
const memories = await getAllUserMemories(req.user.id);
const existingMemory = memories.find((m) => m.key === key);
if (!existingMemory) {
return res.status(404).json({ error: 'Memory not found.' });
}
const result = await setMemory({
userId: req.user.id,
key,
value,
tokenCount,
});
if (!result.ok) {
return res.status(500).json({ error: 'Failed to update memory.' });
}
const updatedMemories = await getAllUserMemories(req.user.id);
const updatedMemory = updatedMemories.find((m) => m.key === key);
res.json({ updated: true, memory: updatedMemory });
} catch (error) {
res.status(500).json({ error: error.message });
}
});
/**
* DELETE /memories/:key
* Deletes a memory entry for the authenticated user.
* Returns 200 and { deleted: true } when successful.
*/
router.delete('/:key', checkMemoryDelete, async (req, res) => {
const { key } = req.params;
try {
const result = await deleteMemory({ userId: req.user.id, key });
if (!result.ok) {
return res.status(404).json({ error: 'Memory not found.' });
}
res.json({ deleted: true });
} catch (error) {
res.status(500).json({ error: error.message });
}
});
module.exports = router;

View file

@ -47,7 +47,9 @@ const oauthHandler = async (req, res) => {
router.get('/error', (req, res) => { router.get('/error', (req, res) => {
// A single error message is pushed by passport when authentication fails. // A single error message is pushed by passport when authentication fails.
logger.error('Error in OAuth authentication:', { message: req.session.messages.pop() }); logger.error('Error in OAuth authentication:', {
message: req.session?.messages?.pop() || 'Unknown error',
});
// Redirect to login page with auth_failed parameter to prevent infinite redirect loops // Redirect to login page with auth_failed parameter to prevent infinite redirect loops
res.redirect(`${domains.client}/login?redirect=false`); res.redirect(`${domains.client}/login?redirect=false`);

View file

@ -1,6 +1,7 @@
const express = require('express'); const express = require('express');
const { const {
promptPermissionsSchema, promptPermissionsSchema,
memoryPermissionsSchema,
agentPermissionsSchema, agentPermissionsSchema,
PermissionTypes, PermissionTypes,
roleDefaults, roleDefaults,
@ -118,4 +119,43 @@ router.put('/:roleName/agents', checkAdmin, async (req, res) => {
} }
}); });
/**
* PUT /api/roles/:roleName/memories
* Update memory permissions for a specific role
*/
router.put('/:roleName/memories', checkAdmin, async (req, res) => {
const { roleName: _r } = req.params;
// TODO: TEMP, use a better parsing for roleName
const roleName = _r.toUpperCase();
/** @type {TRole['permissions']['MEMORIES']} */
const updates = req.body;
try {
const parsedUpdates = memoryPermissionsSchema.partial().parse(updates);
const role = await getRoleByName(roleName);
if (!role) {
return res.status(404).send({ message: 'Role not found' });
}
const currentPermissions =
role.permissions?.[PermissionTypes.MEMORIES] || role[PermissionTypes.MEMORIES] || {};
const mergedUpdates = {
permissions: {
...role.permissions,
[PermissionTypes.MEMORIES]: {
...currentPermissions,
...parsedUpdates,
},
},
};
const updatedRole = await updateRoleByName(roleName, mergedUpdates);
res.status(200).send(updatedRole);
} catch (error) {
return res.status(400).send({ message: 'Invalid memory permissions.', error: error.errors });
}
});
module.exports = router; module.exports = router;

View file

@ -1,15 +1,15 @@
const express = require('express'); const express = require('express');
const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { const {
getSharedLink,
getSharedMessages, getSharedMessages,
createSharedLink, createSharedLink,
updateSharedLink, updateSharedLink,
getSharedLinks,
deleteSharedLink, deleteSharedLink,
} = require('~/models/Share'); getSharedLinks,
getSharedLink,
} = require('~/models');
const requireJwtAuth = require('~/server/middleware/requireJwtAuth'); const requireJwtAuth = require('~/server/middleware/requireJwtAuth');
const { isEnabled } = require('~/server/utils');
const router = express.Router(); const router = express.Router();
/** /**
@ -35,6 +35,7 @@ if (allowSharedLinks) {
res.status(404).end(); res.status(404).end();
} }
} catch (error) { } catch (error) {
logger.error('Error getting shared messages:', error);
res.status(500).json({ message: 'Error getting shared messages' }); res.status(500).json({ message: 'Error getting shared messages' });
} }
}, },
@ -54,9 +55,7 @@ router.get('/', requireJwtAuth, async (req, res) => {
sortDirection: ['asc', 'desc'].includes(req.query.sortDirection) sortDirection: ['asc', 'desc'].includes(req.query.sortDirection)
? req.query.sortDirection ? req.query.sortDirection
: 'desc', : 'desc',
search: req.query.search search: req.query.search ? decodeURIComponent(req.query.search.trim()) : undefined,
? decodeURIComponent(req.query.search.trim())
: undefined,
}; };
const result = await getSharedLinks( const result = await getSharedLinks(
@ -75,7 +74,7 @@ router.get('/', requireJwtAuth, async (req, res) => {
hasNextPage: result.hasNextPage, hasNextPage: result.hasNextPage,
}); });
} catch (error) { } catch (error) {
console.error('Error getting shared links:', error); logger.error('Error getting shared links:', error);
res.status(500).json({ res.status(500).json({
message: 'Error getting shared links', message: 'Error getting shared links',
error: error.message, error: error.message,
@ -93,6 +92,7 @@ router.get('/link/:conversationId', requireJwtAuth, async (req, res) => {
conversationId: req.params.conversationId, conversationId: req.params.conversationId,
}); });
} catch (error) { } catch (error) {
logger.error('Error getting shared link:', error);
res.status(500).json({ message: 'Error getting shared link' }); res.status(500).json({ message: 'Error getting shared link' });
} }
}); });
@ -106,6 +106,7 @@ router.post('/:conversationId', requireJwtAuth, async (req, res) => {
res.status(404).end(); res.status(404).end();
} }
} catch (error) { } catch (error) {
logger.error('Error creating shared link:', error);
res.status(500).json({ message: 'Error creating shared link' }); res.status(500).json({ message: 'Error creating shared link' });
} }
}); });
@ -119,6 +120,7 @@ router.patch('/:shareId', requireJwtAuth, async (req, res) => {
res.status(404).end(); res.status(404).end();
} }
} catch (error) { } catch (error) {
logger.error('Error updating shared link:', error);
res.status(500).json({ message: 'Error updating shared link' }); res.status(500).json({ message: 'Error updating shared link' });
} }
}); });
@ -133,7 +135,8 @@ router.delete('/:shareId', requireJwtAuth, async (req, res) => {
return res.status(200).json(result); return res.status(200).json(result);
} catch (error) { } catch (error) {
return res.status(400).json({ message: error.message }); logger.error('Error deleting shared link:', error);
return res.status(400).json({ message: 'Error deleting shared link' });
} }
}); });

View file

@ -1,7 +1,15 @@
const jwt = require('jsonwebtoken'); const jwt = require('jsonwebtoken');
const { nanoid } = require('nanoid'); const { nanoid } = require('nanoid');
const { tool } = require('@langchain/core/tools'); const { tool } = require('@langchain/core/tools');
const { logger } = require('@librechat/data-schemas');
const { GraphEvents, sleep } = require('@librechat/agents'); const { GraphEvents, sleep } = require('@librechat/agents');
const {
sendEvent,
encryptV2,
decryptV2,
logAxiosError,
refreshAccessToken,
} = require('@librechat/api');
const { const {
Time, Time,
CacheKeys, CacheKeys,
@ -12,14 +20,11 @@ const {
isImageVisionTool, isImageVisionTool,
actionDomainSeparator, actionDomainSeparator,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { refreshAccessToken } = require('~/server/services/TokenService'); const { findToken, updateToken, createToken } = require('~/models');
const { logger, getFlowStateManager, sendEvent } = require('~/config');
const { encryptV2, decryptV2 } = require('~/server/utils/crypto');
const { getActions, deleteActions } = require('~/models/Action'); const { getActions, deleteActions } = require('~/models/Action');
const { deleteAssistant } = require('~/models/Assistant'); const { deleteAssistant } = require('~/models/Assistant');
const { logAxiosError } = require('~/utils'); const { getFlowStateManager } = require('~/config');
const { getLogStores } = require('~/cache'); const { getLogStores } = require('~/cache');
const { findToken } = require('~/models');
const JWT_SECRET = process.env.JWT_SECRET; const JWT_SECRET = process.env.JWT_SECRET;
const toolNameRegex = /^[a-zA-Z0-9_-]+$/; const toolNameRegex = /^[a-zA-Z0-9_-]+$/;
@ -208,6 +213,7 @@ async function createActionTool({
userId: userId, userId: userId,
client_url: metadata.auth.client_url, client_url: metadata.auth.client_url,
redirect_uri: `${process.env.DOMAIN_SERVER}/api/actions/${action_id}/oauth/callback`, redirect_uri: `${process.env.DOMAIN_SERVER}/api/actions/${action_id}/oauth/callback`,
token_exchange_method: metadata.auth.token_exchange_method,
/** Encrypted values */ /** Encrypted values */
encrypted_oauth_client_id: encrypted.oauth_client_id, encrypted_oauth_client_id: encrypted.oauth_client_id,
encrypted_oauth_client_secret: encrypted.oauth_client_secret, encrypted_oauth_client_secret: encrypted.oauth_client_secret,
@ -256,14 +262,22 @@ async function createActionTool({
try { try {
const refresh_token = await decryptV2(refreshTokenData.token); const refresh_token = await decryptV2(refreshTokenData.token);
const refreshTokens = async () => const refreshTokens = async () =>
await refreshAccessToken({ await refreshAccessToken(
{
userId, userId,
identifier, identifier,
refresh_token, refresh_token,
client_url: metadata.auth.client_url, client_url: metadata.auth.client_url,
encrypted_oauth_client_id: encrypted.oauth_client_id, encrypted_oauth_client_id: encrypted.oauth_client_id,
token_exchange_method: metadata.auth.token_exchange_method,
encrypted_oauth_client_secret: encrypted.oauth_client_secret, encrypted_oauth_client_secret: encrypted.oauth_client_secret,
}); },
{
findToken,
updateToken,
createToken,
},
);
const flowsCache = getLogStores(CacheKeys.FLOWS); const flowsCache = getLogStores(CacheKeys.FLOWS);
const flowManager = getFlowStateManager(flowsCache); const flowManager = getFlowStateManager(flowsCache);
const refreshData = await flowManager.createFlowWithHandler( const refreshData = await flowManager.createFlowWithHandler(

View file

@ -1,11 +1,12 @@
const { const {
FileSources, FileSources,
loadOCRConfig, loadOCRConfig,
processMCPEnv,
EModelEndpoint, EModelEndpoint,
loadMemoryConfig,
getConfigDefaults, getConfigDefaults,
loadWebSearchConfig, loadWebSearchConfig,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { agentsConfigSetup } = require('@librechat/api');
const { const {
checkHealth, checkHealth,
checkConfig, checkConfig,
@ -24,10 +25,9 @@ const { azureConfigSetup } = require('./start/azureOpenAI');
const { processModelSpecs } = require('./start/modelSpecs'); const { processModelSpecs } = require('./start/modelSpecs');
const { initializeS3 } = require('./Files/S3/initialize'); const { initializeS3 } = require('./Files/S3/initialize');
const { loadAndFormatTools } = require('./ToolService'); const { loadAndFormatTools } = require('./ToolService');
const { agentsConfigSetup } = require('./start/agents');
const { isEnabled } = require('~/server/utils'); const { isEnabled } = require('~/server/utils');
const { initializeRoles } = require('~/models'); const { initializeRoles } = require('~/models');
const { getMCPManager } = require('~/config'); const { setCachedTools } = require('./Config');
const paths = require('~/config/paths'); const paths = require('~/config/paths');
/** /**
@ -44,6 +44,7 @@ const AppService = async (app) => {
const ocr = loadOCRConfig(config.ocr); const ocr = loadOCRConfig(config.ocr);
const webSearch = loadWebSearchConfig(config.webSearch); const webSearch = loadWebSearchConfig(config.webSearch);
checkWebSearchConfig(webSearch); checkWebSearchConfig(webSearch);
const memory = loadMemoryConfig(config.memory);
const filteredTools = config.filteredTools; const filteredTools = config.filteredTools;
const includedTools = config.includedTools; const includedTools = config.includedTools;
const fileStrategy = config.fileStrategy ?? configDefaults.fileStrategy; const fileStrategy = config.fileStrategy ?? configDefaults.fileStrategy;
@ -74,11 +75,10 @@ const AppService = async (app) => {
directory: paths.structuredTools, directory: paths.structuredTools,
}); });
if (config.mcpServers != null) { await setCachedTools(availableTools, { isGlobal: true });
const mcpManager = getMCPManager();
await mcpManager.initializeMCP(config.mcpServers, processMCPEnv); // Store MCP config for later initialization
await mcpManager.mapAvailableTools(availableTools); const mcpConfig = config.mcpServers || null;
}
const socialLogins = const socialLogins =
config?.registration?.socialLogins ?? configDefaults?.registration?.socialLogins; config?.registration?.socialLogins ?? configDefaults?.registration?.socialLogins;
@ -88,20 +88,26 @@ const AppService = async (app) => {
const defaultLocals = { const defaultLocals = {
ocr, ocr,
paths, paths,
memory,
webSearch, webSearch,
fileStrategy, fileStrategy,
socialLogins, socialLogins,
filteredTools, filteredTools,
includedTools, includedTools,
availableTools,
imageOutputType, imageOutputType,
interfaceConfig, interfaceConfig,
turnstileConfig, turnstileConfig,
balance, balance,
mcpConfig,
}; };
const agentsDefaults = agentsConfigSetup(config);
if (!Object.keys(config).length) { if (!Object.keys(config).length) {
app.locals = defaultLocals; app.locals = {
...defaultLocals,
[EModelEndpoint.agents]: agentsDefaults,
};
return; return;
} }
@ -136,9 +142,7 @@ const AppService = async (app) => {
); );
} }
if (endpoints?.[EModelEndpoint.agents]) { endpointLocals[EModelEndpoint.agents] = agentsConfigSetup(config, agentsDefaults);
endpointLocals[EModelEndpoint.agents] = agentsConfigSetup(config);
}
const endpointKeys = [ const endpointKeys = [
EModelEndpoint.openAI, EModelEndpoint.openAI,

View file

@ -2,8 +2,10 @@ const {
FileSources, FileSources,
EModelEndpoint, EModelEndpoint,
EImageOutputType, EImageOutputType,
AgentCapabilities,
defaultSocialLogins, defaultSocialLogins,
validateAzureGroups, validateAzureGroups,
defaultAgentCapabilities,
deprecatedAzureVariables, deprecatedAzureVariables,
conflictingAzureVariables, conflictingAzureVariables,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
@ -30,6 +32,25 @@ jest.mock('~/models', () => ({
jest.mock('~/models/Role', () => ({ jest.mock('~/models/Role', () => ({
updateAccessPermissions: jest.fn(), updateAccessPermissions: jest.fn(),
})); }));
jest.mock('./Config', () => ({
setCachedTools: jest.fn(),
getCachedTools: jest.fn().mockResolvedValue({
ExampleTool: {
type: 'function',
function: {
description: 'Example tool function',
name: 'exampleFunction',
parameters: {
type: 'object',
properties: {
param1: { type: 'string', description: 'An example parameter' },
},
required: ['param1'],
},
},
},
}),
}));
jest.mock('./ToolService', () => ({ jest.mock('./ToolService', () => ({
loadAndFormatTools: jest.fn().mockReturnValue({ loadAndFormatTools: jest.fn().mockReturnValue({
ExampleTool: { ExampleTool: {
@ -119,22 +140,9 @@ describe('AppService', () => {
sidePanel: true, sidePanel: true,
presets: true, presets: true,
}), }),
mcpConfig: null,
turnstileConfig: mockedTurnstileConfig, turnstileConfig: mockedTurnstileConfig,
modelSpecs: undefined, modelSpecs: undefined,
availableTools: {
ExampleTool: {
type: 'function',
function: expect.objectContaining({
description: 'Example tool function',
name: 'exampleFunction',
parameters: expect.objectContaining({
type: 'object',
properties: expect.any(Object),
required: expect.arrayContaining(['param1']),
}),
}),
},
},
paths: expect.anything(), paths: expect.anything(),
ocr: expect.anything(), ocr: expect.anything(),
imageOutputType: expect.any(String), imageOutputType: expect.any(String),
@ -151,6 +159,11 @@ describe('AppService', () => {
safeSearch: 1, safeSearch: 1,
serperApiKey: '${SERPER_API_KEY}', serperApiKey: '${SERPER_API_KEY}',
}, },
memory: undefined,
agents: {
disableBuilder: false,
capabilities: expect.arrayContaining([...defaultAgentCapabilities]),
},
}); });
}); });
@ -216,14 +229,41 @@ describe('AppService', () => {
it('should load and format tools accurately with defined structure', async () => { it('should load and format tools accurately with defined structure', async () => {
const { loadAndFormatTools } = require('./ToolService'); const { loadAndFormatTools } = require('./ToolService');
const { setCachedTools, getCachedTools } = require('./Config');
await AppService(app); await AppService(app);
expect(loadAndFormatTools).toHaveBeenCalledWith({ expect(loadAndFormatTools).toHaveBeenCalledWith({
adminFilter: undefined,
adminIncluded: undefined,
directory: expect.anything(), directory: expect.anything(),
}); });
expect(app.locals.availableTools.ExampleTool).toBeDefined(); // Verify setCachedTools was called with the tools
expect(app.locals.availableTools.ExampleTool).toEqual({ expect(setCachedTools).toHaveBeenCalledWith(
{
ExampleTool: {
type: 'function',
function: {
description: 'Example tool function',
name: 'exampleFunction',
parameters: {
type: 'object',
properties: {
param1: { type: 'string', description: 'An example parameter' },
},
required: ['param1'],
},
},
},
},
{ isGlobal: true },
);
// Verify we can retrieve the tools from cache
const cachedTools = await getCachedTools({ includeGlobal: true });
expect(cachedTools.ExampleTool).toBeDefined();
expect(cachedTools.ExampleTool).toEqual({
type: 'function', type: 'function',
function: { function: {
description: 'Example tool function', description: 'Example tool function',
@ -268,6 +308,71 @@ describe('AppService', () => {
); );
}); });
it('should correctly configure Agents endpoint based on custom config', async () => {
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
Promise.resolve({
endpoints: {
[EModelEndpoint.agents]: {
disableBuilder: true,
recursionLimit: 10,
maxRecursionLimit: 20,
allowedProviders: ['openai', 'anthropic'],
capabilities: [AgentCapabilities.tools, AgentCapabilities.actions],
},
},
}),
);
await AppService(app);
expect(app.locals).toHaveProperty(EModelEndpoint.agents);
expect(app.locals[EModelEndpoint.agents]).toEqual(
expect.objectContaining({
disableBuilder: true,
recursionLimit: 10,
maxRecursionLimit: 20,
allowedProviders: expect.arrayContaining(['openai', 'anthropic']),
capabilities: expect.arrayContaining([AgentCapabilities.tools, AgentCapabilities.actions]),
}),
);
});
it('should configure Agents endpoint with defaults when no config is provided', async () => {
require('./Config/loadCustomConfig').mockImplementationOnce(() => Promise.resolve({}));
await AppService(app);
expect(app.locals).toHaveProperty(EModelEndpoint.agents);
expect(app.locals[EModelEndpoint.agents]).toEqual(
expect.objectContaining({
disableBuilder: false,
capabilities: expect.arrayContaining([...defaultAgentCapabilities]),
}),
);
});
it('should configure Agents endpoint with defaults when endpoints exist but agents is not defined', async () => {
require('./Config/loadCustomConfig').mockImplementationOnce(() =>
Promise.resolve({
endpoints: {
[EModelEndpoint.openAI]: {
titleConvo: true,
},
},
}),
);
await AppService(app);
expect(app.locals).toHaveProperty(EModelEndpoint.agents);
expect(app.locals[EModelEndpoint.agents]).toEqual(
expect.objectContaining({
disableBuilder: false,
capabilities: expect.arrayContaining([...defaultAgentCapabilities]),
}),
);
});
it('should correctly configure minimum Azure OpenAI Assistant values', async () => { it('should correctly configure minimum Azure OpenAI Assistant values', async () => {
const assistantGroups = [azureGroups[0], { ...azureGroups[1], assistants: true }]; const assistantGroups = [azureGroups[0], { ...azureGroups[1], assistants: true }];
require('./Config/loadCustomConfig').mockImplementationOnce(() => require('./Config/loadCustomConfig').mockImplementationOnce(() =>
@ -463,7 +568,6 @@ describe('AppService updating app.locals and issuing warnings', () => {
expect(app.locals).toBeDefined(); expect(app.locals).toBeDefined();
expect(app.locals.paths).toBeDefined(); expect(app.locals.paths).toBeDefined();
expect(app.locals.availableTools).toBeDefined();
expect(app.locals.fileStrategy).toEqual(FileSources.local); expect(app.locals.fileStrategy).toEqual(FileSources.local);
expect(app.locals.socialLogins).toEqual(defaultSocialLogins); expect(app.locals.socialLogins).toEqual(defaultSocialLogins);
expect(app.locals.balance).toEqual( expect(app.locals.balance).toEqual(
@ -496,7 +600,6 @@ describe('AppService updating app.locals and issuing warnings', () => {
expect(app.locals).toBeDefined(); expect(app.locals).toBeDefined();
expect(app.locals.paths).toBeDefined(); expect(app.locals.paths).toBeDefined();
expect(app.locals.availableTools).toBeDefined();
expect(app.locals.fileStrategy).toEqual(customConfig.fileStrategy); expect(app.locals.fileStrategy).toEqual(customConfig.fileStrategy);
expect(app.locals.socialLogins).toEqual(customConfig.registration.socialLogins); expect(app.locals.socialLogins).toEqual(customConfig.registration.socialLogins);
expect(app.locals.balance).toEqual(customConfig.balance); expect(app.locals.balance).toEqual(customConfig.balance);

View file

@ -1,5 +1,7 @@
const bcrypt = require('bcryptjs'); const bcrypt = require('bcryptjs');
const { webcrypto } = require('node:crypto'); const { webcrypto } = require('node:crypto');
const { isEnabled } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { SystemRoles, errorsToString } = require('librechat-data-provider'); const { SystemRoles, errorsToString } = require('librechat-data-provider');
const { const {
findUser, findUser,
@ -17,11 +19,10 @@ const {
deleteUserById, deleteUserById,
generateRefreshToken, generateRefreshToken,
} = require('~/models'); } = require('~/models');
const { isEnabled, checkEmailConfig, sendEmail } = require('~/server/utils');
const { isEmailDomainAllowed } = require('~/server/services/domains'); const { isEmailDomainAllowed } = require('~/server/services/domains');
const { checkEmailConfig, sendEmail } = require('~/server/utils');
const { getBalanceConfig } = require('~/server/services/Config'); const { getBalanceConfig } = require('~/server/services/Config');
const { registerSchema } = require('~/strategies/validators'); const { registerSchema } = require('~/strategies/validators');
const { logger } = require('~/config');
const domains = { const domains = {
client: process.env.DOMAIN_CLIENT, client: process.env.DOMAIN_CLIENT,
@ -409,7 +410,9 @@ const setOpenIDAuthTokens = (tokenset, res) => {
return; return;
} }
const { REFRESH_TOKEN_EXPIRY } = process.env ?? {}; const { REFRESH_TOKEN_EXPIRY } = process.env ?? {};
const expiryInMilliseconds = eval(REFRESH_TOKEN_EXPIRY) ?? 1000 * 60 * 60 * 24 * 7; // 7 days default const expiryInMilliseconds = REFRESH_TOKEN_EXPIRY
? eval(REFRESH_TOKEN_EXPIRY)
: 1000 * 60 * 60 * 24 * 7; // 7 days default
const expirationDate = new Date(Date.now() + expiryInMilliseconds); const expirationDate = new Date(Date.now() + expiryInMilliseconds);
if (tokenset == null) { if (tokenset == null) {
logger.error('[setOpenIDAuthTokens] No tokenset found in request'); logger.error('[setOpenIDAuthTokens] No tokenset found in request');

View file

@ -0,0 +1,258 @@
const { CacheKeys } = require('librechat-data-provider');
const getLogStores = require('~/cache/getLogStores');
/**
* Cache key generators for different tool access patterns
* These will support future permission-based caching
*/
const ToolCacheKeys = {
/** Global tools available to all users */
GLOBAL: 'tools:global',
/** Tools available to a specific user */
USER: (userId) => `tools:user:${userId}`,
/** Tools available to a specific role */
ROLE: (roleId) => `tools:role:${roleId}`,
/** Tools available to a specific group */
GROUP: (groupId) => `tools:group:${groupId}`,
/** Combined effective tools for a user (computed from all sources) */
EFFECTIVE: (userId) => `tools:effective:${userId}`,
};
/**
* Retrieves available tools from cache
* @function getCachedTools
* @param {Object} options - Options for retrieving tools
* @param {string} [options.userId] - User ID for user-specific tools
* @param {string[]} [options.roleIds] - Role IDs for role-based tools
* @param {string[]} [options.groupIds] - Group IDs for group-based tools
* @param {boolean} [options.includeGlobal=true] - Whether to include global tools
* @returns {Promise<Object|null>} The available tools object or null if not cached
*/
async function getCachedTools(options = {}) {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
const { userId, roleIds = [], groupIds = [], includeGlobal = true } = options;
// For now, return global tools (current behavior)
// This will be expanded to merge tools from different sources
if (!userId && includeGlobal) {
return await cache.get(ToolCacheKeys.GLOBAL);
}
// Future implementation will merge tools from multiple sources
// based on user permissions, roles, and groups
if (userId) {
// Check if we have pre-computed effective tools for this user
const effectiveTools = await cache.get(ToolCacheKeys.EFFECTIVE(userId));
if (effectiveTools) {
return effectiveTools;
}
// Otherwise, compute from individual sources
const toolSources = [];
if (includeGlobal) {
const globalTools = await cache.get(ToolCacheKeys.GLOBAL);
if (globalTools) {
toolSources.push(globalTools);
}
}
// User-specific tools
const userTools = await cache.get(ToolCacheKeys.USER(userId));
if (userTools) {
toolSources.push(userTools);
}
// Role-based tools
for (const roleId of roleIds) {
const roleTools = await cache.get(ToolCacheKeys.ROLE(roleId));
if (roleTools) {
toolSources.push(roleTools);
}
}
// Group-based tools
for (const groupId of groupIds) {
const groupTools = await cache.get(ToolCacheKeys.GROUP(groupId));
if (groupTools) {
toolSources.push(groupTools);
}
}
// Merge all tool sources (for now, simple merge - future will handle conflicts)
if (toolSources.length > 0) {
return mergeToolSources(toolSources);
}
}
return null;
}
/**
* Sets available tools in cache
* @function setCachedTools
* @param {Object} tools - The tools object to cache
* @param {Object} options - Options for caching tools
* @param {string} [options.userId] - User ID for user-specific tools
* @param {string} [options.roleId] - Role ID for role-based tools
* @param {string} [options.groupId] - Group ID for group-based tools
* @param {boolean} [options.isGlobal=false] - Whether these are global tools
* @param {number} [options.ttl] - Time to live in milliseconds
* @returns {Promise<boolean>} Whether the operation was successful
*/
async function setCachedTools(tools, options = {}) {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
const { userId, roleId, groupId, isGlobal = false, ttl } = options;
let cacheKey;
if (isGlobal || (!userId && !roleId && !groupId)) {
cacheKey = ToolCacheKeys.GLOBAL;
} else if (userId) {
cacheKey = ToolCacheKeys.USER(userId);
} else if (roleId) {
cacheKey = ToolCacheKeys.ROLE(roleId);
} else if (groupId) {
cacheKey = ToolCacheKeys.GROUP(groupId);
}
if (!cacheKey) {
throw new Error('Invalid cache key options provided');
}
return await cache.set(cacheKey, tools, ttl);
}
/**
* Invalidates cached tools
* @function invalidateCachedTools
* @param {Object} options - Options for invalidating tools
* @param {string} [options.userId] - User ID to invalidate
* @param {string} [options.roleId] - Role ID to invalidate
* @param {string} [options.groupId] - Group ID to invalidate
* @param {boolean} [options.invalidateGlobal=false] - Whether to invalidate global tools
* @param {boolean} [options.invalidateEffective=true] - Whether to invalidate effective tools
* @returns {Promise<void>}
*/
async function invalidateCachedTools(options = {}) {
const cache = getLogStores(CacheKeys.CONFIG_STORE);
const { userId, roleId, groupId, invalidateGlobal = false, invalidateEffective = true } = options;
const keysToDelete = [];
if (invalidateGlobal) {
keysToDelete.push(ToolCacheKeys.GLOBAL);
}
if (userId) {
keysToDelete.push(ToolCacheKeys.USER(userId));
if (invalidateEffective) {
keysToDelete.push(ToolCacheKeys.EFFECTIVE(userId));
}
}
if (roleId) {
keysToDelete.push(ToolCacheKeys.ROLE(roleId));
// TODO: In future, invalidate all users with this role
}
if (groupId) {
keysToDelete.push(ToolCacheKeys.GROUP(groupId));
// TODO: In future, invalidate all users in this group
}
await Promise.all(keysToDelete.map((key) => cache.delete(key)));
}
/**
* Computes and caches effective tools for a user
* @function computeEffectiveTools
* @param {string} userId - The user ID
* @param {Object} context - Context containing user's roles and groups
* @param {string[]} [context.roleIds=[]] - User's role IDs
* @param {string[]} [context.groupIds=[]] - User's group IDs
* @param {number} [ttl] - Time to live for the computed result
* @returns {Promise<Object>} The computed effective tools
*/
async function computeEffectiveTools(userId, context = {}, ttl) {
const { roleIds = [], groupIds = [] } = context;
// Get all tool sources
const tools = await getCachedTools({
userId,
roleIds,
groupIds,
includeGlobal: true,
});
if (tools) {
// Cache the computed result
const cache = getLogStores(CacheKeys.CONFIG_STORE);
await cache.set(ToolCacheKeys.EFFECTIVE(userId), tools, ttl);
}
return tools;
}
/**
* Merges multiple tool sources into a single tools object
* @function mergeToolSources
* @param {Object[]} sources - Array of tool objects to merge
* @returns {Object} Merged tools object
*/
function mergeToolSources(sources) {
// For now, simple merge that combines all tools
// Future implementation will handle:
// - Permission precedence (deny > allow)
// - Tool property conflicts
// - Metadata merging
const merged = {};
for (const source of sources) {
if (!source || typeof source !== 'object') {
continue;
}
for (const [toolId, toolConfig] of Object.entries(source)) {
// Simple last-write-wins for now
// Future: merge based on permission levels
merged[toolId] = toolConfig;
}
}
return merged;
}
/**
* Middleware-friendly function to get tools for a request
* @function getToolsForRequest
* @param {Object} req - Express request object
* @returns {Promise<Object|null>} Available tools for the request
*/
async function getToolsForRequest(req) {
const userId = req.user?.id;
// For now, return global tools if no user
if (!userId) {
return getCachedTools({ includeGlobal: true });
}
// Future: Extract roles and groups from req.user
const roleIds = req.user?.roles || [];
const groupIds = req.user?.groups || [];
return getCachedTools({
userId,
roleIds,
groupIds,
includeGlobal: true,
});
}
module.exports = {
ToolCacheKeys,
getCachedTools,
setCachedTools,
getToolsForRequest,
invalidateCachedTools,
computeEffectiveTools,
};

View file

@ -1,6 +1,10 @@
const { logger } = require('@librechat/data-schemas');
const { getUserMCPAuthMap } = require('@librechat/api');
const { CacheKeys, EModelEndpoint } = require('librechat-data-provider'); const { CacheKeys, EModelEndpoint } = require('librechat-data-provider');
const { normalizeEndpointName, isEnabled } = require('~/server/utils'); const { normalizeEndpointName, isEnabled } = require('~/server/utils');
const loadCustomConfig = require('./loadCustomConfig'); const loadCustomConfig = require('./loadCustomConfig');
const { getCachedTools } = require('./getCachedTools');
const { findPluginAuthsByKeys } = require('~/models');
const getLogStores = require('~/cache/getLogStores'); const getLogStores = require('~/cache/getLogStores');
/** /**
@ -50,4 +54,46 @@ const getCustomEndpointConfig = async (endpoint) => {
); );
}; };
module.exports = { getCustomConfig, getBalanceConfig, getCustomEndpointConfig }; async function createGetMCPAuthMap() {
const customConfig = await getCustomConfig();
const mcpServers = customConfig?.mcpServers;
const hasCustomUserVars = Object.values(mcpServers ?? {}).some((server) => server.customUserVars);
if (!hasCustomUserVars) {
return;
}
/**
* @param {Object} params
* @param {GenericTool[]} [params.tools]
* @param {string} params.userId
* @returns {Promise<Record<string, Record<string, string>> | undefined>}
*/
return async function ({ tools, userId }) {
try {
if (!tools || tools.length === 0) {
return;
}
const appTools = await getCachedTools({
userId,
});
return await getUserMCPAuthMap({
tools,
userId,
appTools,
findPluginAuthsByKeys,
});
} catch (err) {
logger.error(
`[api/server/controllers/agents/client.js #chatCompletion] Error getting custom user vars for agent`,
err,
);
}
};
}
module.exports = {
getCustomConfig,
getBalanceConfig,
createGetMCPAuthMap,
getCustomEndpointConfig,
};

View file

@ -1,4 +1,5 @@
const { config } = require('./EndpointService'); const { config } = require('./EndpointService');
const getCachedTools = require('./getCachedTools');
const getCustomConfig = require('./getCustomConfig'); const getCustomConfig = require('./getCustomConfig');
const loadCustomConfig = require('./loadCustomConfig'); const loadCustomConfig = require('./loadCustomConfig');
const loadConfigModels = require('./loadConfigModels'); const loadConfigModels = require('./loadConfigModels');
@ -14,6 +15,7 @@ module.exports = {
loadDefaultModels, loadDefaultModels,
loadOverrideConfig, loadOverrideConfig,
loadAsyncEndpoints, loadAsyncEndpoints,
...getCachedTools,
...getCustomConfig, ...getCustomConfig,
...getEndpointsConfig, ...getEndpointsConfig,
}; };

View file

@ -0,0 +1,196 @@
const { Providers } = require('@librechat/agents');
const { primeResources, optionalChainWithEmptyCheck } = require('@librechat/api');
const {
ErrorTypes,
EModelEndpoint,
EToolResources,
replaceSpecialVars,
providerEndpointMap,
} = require('librechat-data-provider');
const initAnthropic = require('~/server/services/Endpoints/anthropic/initialize');
const getBedrockOptions = require('~/server/services/Endpoints/bedrock/options');
const initOpenAI = require('~/server/services/Endpoints/openAI/initialize');
const initCustom = require('~/server/services/Endpoints/custom/initialize');
const initGoogle = require('~/server/services/Endpoints/google/initialize');
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const { processFiles } = require('~/server/services/Files/process');
const { getConvoFiles } = require('~/models/Conversation');
const { getToolFilesByIds } = require('~/models/File');
const { getModelMaxTokens } = require('~/utils');
const { getFiles } = require('~/models/File');
const providerConfigMap = {
[Providers.XAI]: initCustom,
[Providers.OLLAMA]: initCustom,
[Providers.DEEPSEEK]: initCustom,
[Providers.OPENROUTER]: initCustom,
[EModelEndpoint.openAI]: initOpenAI,
[EModelEndpoint.google]: initGoogle,
[EModelEndpoint.azureOpenAI]: initOpenAI,
[EModelEndpoint.anthropic]: initAnthropic,
[EModelEndpoint.bedrock]: getBedrockOptions,
};
/**
* @param {object} params
* @param {ServerRequest} params.req
* @param {ServerResponse} params.res
* @param {Agent} params.agent
* @param {string | null} [params.conversationId]
* @param {Array<IMongoFile>} [params.requestFiles]
* @param {typeof import('~/server/services/ToolService').loadAgentTools | undefined} [params.loadTools]
* @param {TEndpointOption} [params.endpointOption]
* @param {Set<string>} [params.allowedProviders]
* @param {boolean} [params.isInitialAgent]
* @returns {Promise<Agent & { tools: StructuredTool[], attachments: Array<MongoFile>, toolContextMap: Record<string, unknown>, maxContextTokens: number }>}
*/
const initializeAgent = async ({
req,
res,
agent,
loadTools,
requestFiles,
conversationId,
endpointOption,
allowedProviders,
isInitialAgent = false,
}) => {
if (allowedProviders.size > 0 && !allowedProviders.has(agent.provider)) {
throw new Error(
`{ "type": "${ErrorTypes.INVALID_AGENT_PROVIDER}", "info": "${agent.provider}" }`,
);
}
let currentFiles;
const _modelOptions = structuredClone(
Object.assign(
{ model: agent.model },
agent.model_parameters ?? { model: agent.model },
isInitialAgent === true ? endpointOption?.model_parameters : {},
),
);
const { resendFiles = true, ...modelOptions } = _modelOptions;
if (isInitialAgent && conversationId != null && resendFiles) {
const fileIds = (await getConvoFiles(conversationId)) ?? [];
/** @type {Set<EToolResources>} */
const toolResourceSet = new Set();
for (const tool of agent.tools) {
if (EToolResources[tool]) {
toolResourceSet.add(EToolResources[tool]);
}
}
const toolFiles = await getToolFilesByIds(fileIds, toolResourceSet);
if (requestFiles.length || toolFiles.length) {
currentFiles = await processFiles(requestFiles.concat(toolFiles));
}
} else if (isInitialAgent && requestFiles.length) {
currentFiles = await processFiles(requestFiles);
}
const { attachments, tool_resources } = await primeResources({
req,
getFiles,
attachments: currentFiles,
tool_resources: agent.tool_resources,
requestFileSet: new Set(requestFiles?.map((file) => file.file_id)),
});
const provider = agent.provider;
const { tools, toolContextMap } =
(await loadTools?.({
req,
res,
provider,
agentId: agent.id,
tools: agent.tools,
model: agent.model,
tool_resources,
})) ?? {};
agent.endpoint = provider;
let getOptions = providerConfigMap[provider];
if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) {
agent.provider = provider.toLowerCase();
getOptions = providerConfigMap[agent.provider];
} else if (!getOptions) {
const customEndpointConfig = await getCustomEndpointConfig(provider);
if (!customEndpointConfig) {
throw new Error(`Provider ${provider} not supported`);
}
getOptions = initCustom;
agent.provider = Providers.OPENAI;
}
const _endpointOption =
isInitialAgent === true
? Object.assign({}, endpointOption, { model_parameters: modelOptions })
: { model_parameters: modelOptions };
const options = await getOptions({
req,
res,
optionsOnly: true,
overrideEndpoint: provider,
overrideModel: agent.model,
endpointOption: _endpointOption,
});
const tokensModel =
agent.provider === EModelEndpoint.azureOpenAI ? agent.model : modelOptions.model;
const maxTokens = optionalChainWithEmptyCheck(
modelOptions.maxOutputTokens,
modelOptions.maxTokens,
0,
);
const maxContextTokens = optionalChainWithEmptyCheck(
modelOptions.maxContextTokens,
modelOptions.max_context_tokens,
getModelMaxTokens(tokensModel, providerEndpointMap[provider]),
4096,
);
if (
agent.endpoint === EModelEndpoint.azureOpenAI &&
options.llmConfig?.azureOpenAIApiInstanceName == null
) {
agent.provider = Providers.OPENAI;
}
if (options.provider != null) {
agent.provider = options.provider;
}
/** @type {import('@librechat/agents').ClientOptions} */
agent.model_parameters = { ...options.llmConfig };
if (options.configOptions) {
agent.model_parameters.configuration = options.configOptions;
}
if (agent.instructions && agent.instructions !== '') {
agent.instructions = replaceSpecialVars({
text: agent.instructions,
user: req.user,
});
}
if (typeof agent.artifacts === 'string' && agent.artifacts !== '') {
agent.additional_instructions = generateArtifactsPrompt({
endpoint: agent.provider,
artifacts: agent.artifacts,
});
}
return {
...agent,
tools,
attachments,
resendFiles,
toolContextMap,
maxContextTokens: (maxContextTokens - maxTokens) * 0.9,
};
};
module.exports = { initializeAgent };

View file

@ -1,294 +1,41 @@
const { createContentAggregator, Providers } = require('@librechat/agents'); const { logger } = require('@librechat/data-schemas');
const { const { createContentAggregator } = require('@librechat/agents');
Constants, const { Constants, EModelEndpoint, getResponseSender } = require('librechat-data-provider');
ErrorTypes,
EModelEndpoint,
EToolResources,
getResponseSender,
AgentCapabilities,
replaceSpecialVars,
providerEndpointMap,
} = require('librechat-data-provider');
const { const {
getDefaultHandlers, getDefaultHandlers,
createToolEndCallback, createToolEndCallback,
} = require('~/server/controllers/agents/callbacks'); } = require('~/server/controllers/agents/callbacks');
const initAnthropic = require('~/server/services/Endpoints/anthropic/initialize'); const { initializeAgent } = require('~/server/services/Endpoints/agents/agent');
const getBedrockOptions = require('~/server/services/Endpoints/bedrock/options');
const initOpenAI = require('~/server/services/Endpoints/openAI/initialize');
const initCustom = require('~/server/services/Endpoints/custom/initialize');
const initGoogle = require('~/server/services/Endpoints/google/initialize');
const generateArtifactsPrompt = require('~/app/clients/prompts/artifacts');
const { getCustomEndpointConfig } = require('~/server/services/Config');
const { processFiles } = require('~/server/services/Files/process');
const { loadAgentTools } = require('~/server/services/ToolService'); const { loadAgentTools } = require('~/server/services/ToolService');
const AgentClient = require('~/server/controllers/agents/client'); const AgentClient = require('~/server/controllers/agents/client');
const { getConvoFiles } = require('~/models/Conversation');
const { getToolFilesByIds } = require('~/models/File');
const { getModelMaxTokens } = require('~/utils');
const { getAgent } = require('~/models/Agent'); const { getAgent } = require('~/models/Agent');
const { getFiles } = require('~/models/File');
const { logger } = require('~/config');
const providerConfigMap = { function createToolLoader() {
[Providers.XAI]: initCustom, /**
[Providers.OLLAMA]: initCustom,
[Providers.DEEPSEEK]: initCustom,
[Providers.OPENROUTER]: initCustom,
[EModelEndpoint.openAI]: initOpenAI,
[EModelEndpoint.google]: initGoogle,
[EModelEndpoint.azureOpenAI]: initOpenAI,
[EModelEndpoint.anthropic]: initAnthropic,
[EModelEndpoint.bedrock]: getBedrockOptions,
};
/**
* @param {Object} params
* @param {ServerRequest} params.req
* @param {Promise<Array<MongoFile | null>> | undefined} [params.attachments]
* @param {Set<string>} params.requestFileSet
* @param {AgentToolResources | undefined} [params.tool_resources]
* @returns {Promise<{ attachments: Array<MongoFile | undefined> | undefined, tool_resources: AgentToolResources | undefined }>}
*/
const primeResources = async ({
req,
attachments: _attachments,
tool_resources: _tool_resources,
requestFileSet,
}) => {
try {
/** @type {Array<MongoFile | undefined> | undefined} */
let attachments;
const tool_resources = _tool_resources ?? {};
const isOCREnabled = (req.app.locals?.[EModelEndpoint.agents]?.capabilities ?? []).includes(
AgentCapabilities.ocr,
);
if (tool_resources[EToolResources.ocr]?.file_ids && isOCREnabled) {
const context = await getFiles(
{
file_id: { $in: tool_resources.ocr.file_ids },
},
{},
{},
);
attachments = (attachments ?? []).concat(context);
}
if (!_attachments) {
return { attachments, tool_resources };
}
/** @type {Array<MongoFile | undefined> | undefined} */
const files = await _attachments;
if (!attachments) {
/** @type {Array<MongoFile | undefined>} */
attachments = [];
}
for (const file of files) {
if (!file) {
continue;
}
if (file.metadata?.fileIdentifier) {
const execute_code = tool_resources[EToolResources.execute_code] ?? {};
if (!execute_code.files) {
tool_resources[EToolResources.execute_code] = { ...execute_code, files: [] };
}
tool_resources[EToolResources.execute_code].files.push(file);
} else if (file.embedded === true) {
const file_search = tool_resources[EToolResources.file_search] ?? {};
if (!file_search.files) {
tool_resources[EToolResources.file_search] = { ...file_search, files: [] };
}
tool_resources[EToolResources.file_search].files.push(file);
} else if (
requestFileSet.has(file.file_id) &&
file.type.startsWith('image') &&
file.height &&
file.width
) {
const image_edit = tool_resources[EToolResources.image_edit] ?? {};
if (!image_edit.files) {
tool_resources[EToolResources.image_edit] = { ...image_edit, files: [] };
}
tool_resources[EToolResources.image_edit].files.push(file);
}
attachments.push(file);
}
return { attachments, tool_resources };
} catch (error) {
logger.error('Error priming resources', error);
return { attachments: _attachments, tool_resources: _tool_resources };
}
};
/**
* @param {...string | number} values
* @returns {string | number | undefined}
*/
function optionalChainWithEmptyCheck(...values) {
for (const value of values) {
if (value !== undefined && value !== null && value !== '') {
return value;
}
}
return values[values.length - 1];
}
/**
* @param {object} params * @param {object} params
* @param {ServerRequest} params.req * @param {ServerRequest} params.req
* @param {ServerResponse} params.res * @param {ServerResponse} params.res
* @param {Agent} params.agent * @param {string} params.agentId
* @param {Set<string>} [params.allowedProviders] * @param {string[]} params.tools
* @param {object} [params.endpointOption] * @param {string} params.provider
* @param {boolean} [params.isInitialAgent] * @param {string} params.model
* @returns {Promise<Agent>} * @param {AgentToolResources} params.tool_resources
* @returns {Promise<{ tools: StructuredTool[], toolContextMap: Record<string, unknown> } | undefined>}
*/ */
const initializeAgentOptions = async ({ return async function loadTools({ req, res, agentId, tools, provider, model, tool_resources }) {
const agent = { id: agentId, tools, provider, model };
try {
return await loadAgentTools({
req, req,
res, res,
agent, agent,
endpointOption,
allowedProviders,
isInitialAgent = false,
}) => {
if (allowedProviders.size > 0 && !allowedProviders.has(agent.provider)) {
throw new Error(
`{ "type": "${ErrorTypes.INVALID_AGENT_PROVIDER}", "info": "${agent.provider}" }`,
);
}
let currentFiles;
/** @type {Array<MongoFile>} */
const requestFiles = req.body.files ?? [];
if (
isInitialAgent &&
req.body.conversationId != null &&
(agent.model_parameters?.resendFiles ?? true) === true
) {
const fileIds = (await getConvoFiles(req.body.conversationId)) ?? [];
/** @type {Set<EToolResources>} */
const toolResourceSet = new Set();
for (const tool of agent.tools) {
if (EToolResources[tool]) {
toolResourceSet.add(EToolResources[tool]);
}
}
const toolFiles = await getToolFilesByIds(fileIds, toolResourceSet);
if (requestFiles.length || toolFiles.length) {
currentFiles = await processFiles(requestFiles.concat(toolFiles));
}
} else if (isInitialAgent && requestFiles.length) {
currentFiles = await processFiles(requestFiles);
}
const { attachments, tool_resources } = await primeResources({
req,
attachments: currentFiles,
tool_resources: agent.tool_resources,
requestFileSet: new Set(requestFiles.map((file) => file.file_id)),
});
const provider = agent.provider;
const { tools, toolContextMap } = await loadAgentTools({
req,
res,
agent: {
id: agent.id,
tools: agent.tools,
provider,
model: agent.model,
},
tool_resources, tool_resources,
}); });
} catch (error) {
agent.endpoint = provider; logger.error('Error loading tools for agent ' + agentId, error);
let getOptions = providerConfigMap[provider];
if (!getOptions && providerConfigMap[provider.toLowerCase()] != null) {
agent.provider = provider.toLowerCase();
getOptions = providerConfigMap[agent.provider];
} else if (!getOptions) {
const customEndpointConfig = await getCustomEndpointConfig(provider);
if (!customEndpointConfig) {
throw new Error(`Provider ${provider} not supported`);
} }
getOptions = initCustom;
agent.provider = Providers.OPENAI;
}
const model_parameters = Object.assign(
{},
agent.model_parameters ?? { model: agent.model },
isInitialAgent === true ? endpointOption?.model_parameters : {},
);
const _endpointOption =
isInitialAgent === true
? Object.assign({}, endpointOption, { model_parameters })
: { model_parameters };
const options = await getOptions({
req,
res,
optionsOnly: true,
overrideEndpoint: provider,
overrideModel: agent.model,
endpointOption: _endpointOption,
});
if (
agent.endpoint === EModelEndpoint.azureOpenAI &&
options.llmConfig?.azureOpenAIApiInstanceName == null
) {
agent.provider = Providers.OPENAI;
}
if (options.provider != null) {
agent.provider = options.provider;
}
/** @type {import('@librechat/agents').ClientOptions} */
agent.model_parameters = Object.assign(model_parameters, options.llmConfig);
if (options.configOptions) {
agent.model_parameters.configuration = options.configOptions;
}
if (!agent.model_parameters.model) {
agent.model_parameters.model = agent.model;
}
if (agent.instructions && agent.instructions !== '') {
agent.instructions = replaceSpecialVars({
text: agent.instructions,
user: req.user,
});
}
if (typeof agent.artifacts === 'string' && agent.artifacts !== '') {
agent.additional_instructions = generateArtifactsPrompt({
endpoint: agent.provider,
artifacts: agent.artifacts,
});
}
const tokensModel =
agent.provider === EModelEndpoint.azureOpenAI ? agent.model : agent.model_parameters.model;
const maxTokens = optionalChainWithEmptyCheck(
agent.model_parameters.maxOutputTokens,
agent.model_parameters.maxTokens,
0,
);
const maxContextTokens = optionalChainWithEmptyCheck(
agent.model_parameters.maxContextTokens,
agent.max_context_tokens,
getModelMaxTokens(tokensModel, providerEndpointMap[provider]),
4096,
);
return {
...agent,
tools,
attachments,
toolContextMap,
maxContextTokens: (maxContextTokens - maxTokens) * 0.9,
}; };
}; }
const initializeClient = async ({ req, res, endpointOption }) => { const initializeClient = async ({ req, res, endpointOption }) => {
if (!endpointOption) { if (!endpointOption) {
@ -313,7 +60,6 @@ const initializeClient = async ({ req, res, endpointOption }) => {
throw new Error('No agent promise provided'); throw new Error('No agent promise provided');
} }
// Initialize primary agent
const primaryAgent = await endpointOption.agent; const primaryAgent = await endpointOption.agent;
if (!primaryAgent) { if (!primaryAgent) {
throw new Error('Agent not found'); throw new Error('Agent not found');
@ -323,10 +69,18 @@ const initializeClient = async ({ req, res, endpointOption }) => {
/** @type {Set<string>} */ /** @type {Set<string>} */
const allowedProviders = new Set(req?.app?.locals?.[EModelEndpoint.agents]?.allowedProviders); const allowedProviders = new Set(req?.app?.locals?.[EModelEndpoint.agents]?.allowedProviders);
// Handle primary agent const loadTools = createToolLoader();
const primaryConfig = await initializeAgentOptions({ /** @type {Array<MongoFile>} */
const requestFiles = req.body.files ?? [];
/** @type {string} */
const conversationId = req.body.conversationId;
const primaryConfig = await initializeAgent({
req, req,
res, res,
loadTools,
requestFiles,
conversationId,
agent: primaryAgent, agent: primaryAgent,
endpointOption, endpointOption,
allowedProviders, allowedProviders,
@ -340,10 +94,13 @@ const initializeClient = async ({ req, res, endpointOption }) => {
if (!agent) { if (!agent) {
throw new Error(`Agent ${agentId} not found`); throw new Error(`Agent ${agentId} not found`);
} }
const config = await initializeAgentOptions({ const config = await initializeAgent({
req, req,
res, res,
agent, agent,
loadTools,
requestFiles,
conversationId,
endpointOption, endpointOption,
allowedProviders, allowedProviders,
}); });
@ -373,8 +130,8 @@ const initializeClient = async ({ req, res, endpointOption }) => {
iconURL: endpointOption.iconURL, iconURL: endpointOption.iconURL,
attachments: primaryConfig.attachments, attachments: primaryConfig.attachments,
endpointType: endpointOption.endpointType, endpointType: endpointOption.endpointType,
resendFiles: primaryConfig.resendFiles ?? true,
maxContextTokens: primaryConfig.maxContextTokens, maxContextTokens: primaryConfig.maxContextTokens,
resendFiles: primaryConfig.model_parameters?.resendFiles ?? true,
endpoint: endpoint:
primaryConfig.id === Constants.EPHEMERAL_AGENT_ID primaryConfig.id === Constants.EPHEMERAL_AGENT_ID
? primaryConfig.endpoint ? primaryConfig.endpoint

View file

@ -1,4 +1,4 @@
const { HttpsProxyAgent } = require('https-proxy-agent'); const { ProxyAgent } = require('undici');
const { anthropicSettings, removeNullishValues } = require('librechat-data-provider'); const { anthropicSettings, removeNullishValues } = require('librechat-data-provider');
const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers'); const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers');
@ -67,7 +67,10 @@ function getLLMConfig(apiKey, options = {}) {
} }
if (options.proxy) { if (options.proxy) {
requestOptions.clientOptions.httpAgent = new HttpsProxyAgent(options.proxy); const proxyAgent = new ProxyAgent(options.proxy);
requestOptions.clientOptions.fetchOptions = {
dispatcher: proxyAgent,
};
} }
if (options.reverseProxyUrl) { if (options.reverseProxyUrl) {

View file

@ -21,8 +21,12 @@ describe('getLLMConfig', () => {
proxy: 'http://proxy:8080', proxy: 'http://proxy:8080',
}); });
expect(result.llmConfig.clientOptions).toHaveProperty('httpAgent'); expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions');
expect(result.llmConfig.clientOptions.httpAgent).toHaveProperty('proxy', 'http://proxy:8080'); expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher');
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined();
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe(
'ProxyAgent',
);
}); });
it('should include reverse proxy URL when provided', () => { it('should include reverse proxy URL when provided', () => {

View file

@ -1,5 +1,6 @@
const OpenAI = require('openai'); const OpenAI = require('openai');
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { constructAzureURL, isUserProvided } = require('@librechat/api');
const { const {
ErrorTypes, ErrorTypes,
EModelEndpoint, EModelEndpoint,
@ -12,8 +13,6 @@ const {
checkUserKeyExpiry, checkUserKeyExpiry,
} = require('~/server/services/UserService'); } = require('~/server/services/UserService');
const OpenAIClient = require('~/app/clients/OpenAIClient'); const OpenAIClient = require('~/app/clients/OpenAIClient');
const { isUserProvided } = require('~/server/utils');
const { constructAzureURL } = require('~/utils');
class Files { class Files {
constructor(client) { constructor(client) {

View file

@ -1,4 +1,5 @@
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { createHandleLLMNewToken } = require('@librechat/api');
const { const {
AuthType, AuthType,
Constants, Constants,
@ -8,7 +9,6 @@ const {
removeNullishValues, removeNullishValues,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService'); const { getUserKey, checkUserKeyExpiry } = require('~/server/services/UserService');
const { createHandleLLMNewToken } = require('~/app/clients/generators');
const getOptions = async ({ req, overrideModel, endpointOption }) => { const getOptions = async ({ req, overrideModel, endpointOption }) => {
const { const {

View file

@ -6,10 +6,9 @@ const {
extractEnvVariable, extractEnvVariable,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { Providers } = require('@librechat/agents'); const { Providers } = require('@librechat/agents');
const { getOpenAIConfig, createHandleLLMNewToken } = require('@librechat/api');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService'); const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getLLMConfig } = require('~/server/services/Endpoints/openAI/llm');
const { getCustomEndpointConfig } = require('~/server/services/Config'); const { getCustomEndpointConfig } = require('~/server/services/Config');
const { createHandleLLMNewToken } = require('~/app/clients/generators');
const { fetchModels } = require('~/server/services/ModelService'); const { fetchModels } = require('~/server/services/ModelService');
const OpenAIClient = require('~/app/clients/OpenAIClient'); const OpenAIClient = require('~/app/clients/OpenAIClient');
const { isUserProvided } = require('~/server/utils'); const { isUserProvided } = require('~/server/utils');
@ -144,7 +143,7 @@ const initializeClient = async ({ req, res, endpointOption, optionsOnly, overrid
clientOptions, clientOptions,
); );
clientOptions.modelOptions.user = req.user.id; clientOptions.modelOptions.user = req.user.id;
const options = getLLMConfig(apiKey, clientOptions, endpoint); const options = getOpenAIConfig(apiKey, clientOptions, endpoint);
if (!customOptions.streamRate) { if (!customOptions.streamRate) {
return options; return options;
} }

View file

@ -94,7 +94,7 @@ function getLLMConfig(credentials, options = {}) {
// Extract from credentials // Extract from credentials
const serviceKeyRaw = creds[AuthKeys.GOOGLE_SERVICE_KEY] ?? {}; const serviceKeyRaw = creds[AuthKeys.GOOGLE_SERVICE_KEY] ?? {};
const serviceKey = const serviceKey =
typeof serviceKeyRaw === 'string' ? JSON.parse(serviceKeyRaw) : serviceKeyRaw ?? {}; typeof serviceKeyRaw === 'string' ? JSON.parse(serviceKeyRaw) : (serviceKeyRaw ?? {});
const project_id = serviceKey?.project_id ?? null; const project_id = serviceKey?.project_id ?? null;
const apiKey = creds[AuthKeys.GOOGLE_API_KEY] ?? null; const apiKey = creds[AuthKeys.GOOGLE_API_KEY] ?? null;
@ -156,10 +156,6 @@ function getLLMConfig(credentials, options = {}) {
} }
if (authHeader) { if (authHeader) {
/**
* NOTE: NOT SUPPORTED BY LANGCHAIN GENAI CLIENT,
* REQUIRES PR IN https://github.com/langchain-ai/langchainjs
*/
llmConfig.customHeaders = { llmConfig.customHeaders = {
Authorization: `Bearer ${apiKey}`, Authorization: `Bearer ${apiKey}`,
}; };

View file

@ -1,11 +1,10 @@
const { const {
EModelEndpoint, EModelEndpoint,
mapModelToAzureConfig,
resolveHeaders, resolveHeaders,
mapModelToAzureConfig,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { isEnabled, isUserProvided, getAzureCredentials } = require('@librechat/api');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService'); const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const { isEnabled, isUserProvided } = require('~/server/utils');
const { getAzureCredentials } = require('~/utils');
const { PluginsClient } = require('~/app'); const { PluginsClient } = require('~/app');
const initializeClient = async ({ req, res, endpointOption }) => { const initializeClient = async ({ req, res, endpointOption }) => {

View file

@ -4,12 +4,15 @@ const {
resolveHeaders, resolveHeaders,
mapModelToAzureConfig, mapModelToAzureConfig,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const {
isEnabled,
isUserProvided,
getOpenAIConfig,
getAzureCredentials,
createHandleLLMNewToken,
} = require('@librechat/api');
const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService'); const { getUserKeyValues, checkUserKeyExpiry } = require('~/server/services/UserService');
const { getLLMConfig } = require('~/server/services/Endpoints/openAI/llm');
const { createHandleLLMNewToken } = require('~/app/clients/generators');
const { isEnabled, isUserProvided } = require('~/server/utils');
const OpenAIClient = require('~/app/clients/OpenAIClient'); const OpenAIClient = require('~/app/clients/OpenAIClient');
const { getAzureCredentials } = require('~/utils');
const initializeClient = async ({ const initializeClient = async ({
req, req,
@ -140,7 +143,7 @@ const initializeClient = async ({
modelOptions.model = modelName; modelOptions.model = modelName;
clientOptions = Object.assign({ modelOptions }, clientOptions); clientOptions = Object.assign({ modelOptions }, clientOptions);
clientOptions.modelOptions.user = req.user.id; clientOptions.modelOptions.user = req.user.id;
const options = getLLMConfig(apiKey, clientOptions); const options = getOpenAIConfig(apiKey, clientOptions);
const streamRate = clientOptions.streamRate; const streamRate = clientOptions.streamRate;
if (!streamRate) { if (!streamRate) {
return options; return options;

View file

@ -1,170 +0,0 @@
const { HttpsProxyAgent } = require('https-proxy-agent');
const { KnownEndpoints } = require('librechat-data-provider');
const { sanitizeModelName, constructAzureURL } = require('~/utils');
const { isEnabled } = require('~/server/utils');
/**
* Generates configuration options for creating a language model (LLM) instance.
* @param {string} apiKey - The API key for authentication.
* @param {Object} options - Additional options for configuring the LLM.
* @param {Object} [options.modelOptions] - Model-specific options.
* @param {string} [options.modelOptions.model] - The name of the model to use.
* @param {string} [options.modelOptions.user] - The user ID
* @param {number} [options.modelOptions.temperature] - Controls randomness in output generation (0-2).
* @param {number} [options.modelOptions.top_p] - Controls diversity via nucleus sampling (0-1).
* @param {number} [options.modelOptions.frequency_penalty] - Reduces repetition of token sequences (-2 to 2).
* @param {number} [options.modelOptions.presence_penalty] - Encourages discussing new topics (-2 to 2).
* @param {number} [options.modelOptions.max_tokens] - The maximum number of tokens to generate.
* @param {string[]} [options.modelOptions.stop] - Sequences where the API will stop generating further tokens.
* @param {string} [options.reverseProxyUrl] - URL for a reverse proxy, if used.
* @param {boolean} [options.useOpenRouter] - Flag to use OpenRouter API.
* @param {Object} [options.headers] - Additional headers for API requests.
* @param {string} [options.proxy] - Proxy server URL.
* @param {Object} [options.azure] - Azure-specific configurations.
* @param {boolean} [options.streaming] - Whether to use streaming mode.
* @param {Object} [options.addParams] - Additional parameters to add to the model options.
* @param {string[]} [options.dropParams] - Parameters to remove from the model options.
* @param {string|null} [endpoint=null] - The endpoint name
* @returns {Object} Configuration options for creating an LLM instance.
*/
function getLLMConfig(apiKey, options = {}, endpoint = null) {
let {
modelOptions = {},
reverseProxyUrl,
defaultQuery,
headers,
proxy,
azure,
streaming = true,
addParams,
dropParams,
} = options;
/** @type {OpenAIClientOptions} */
let llmConfig = {
streaming,
};
Object.assign(llmConfig, modelOptions);
if (addParams && typeof addParams === 'object') {
Object.assign(llmConfig, addParams);
}
/** Note: OpenAI Web Search models do not support any known parameters besdies `max_tokens` */
if (modelOptions.model && /gpt-4o.*search/.test(modelOptions.model)) {
const searchExcludeParams = [
'frequency_penalty',
'presence_penalty',
'temperature',
'top_p',
'top_k',
'stop',
'logit_bias',
'seed',
'response_format',
'n',
'logprobs',
'user',
];
dropParams = dropParams || [];
dropParams = [...new Set([...dropParams, ...searchExcludeParams])];
}
if (dropParams && Array.isArray(dropParams)) {
dropParams.forEach((param) => {
if (llmConfig[param]) {
llmConfig[param] = undefined;
}
});
}
let useOpenRouter;
/** @type {OpenAIClientOptions['configuration']} */
const configOptions = {};
if (
(reverseProxyUrl && reverseProxyUrl.includes(KnownEndpoints.openrouter)) ||
(endpoint && endpoint.toLowerCase().includes(KnownEndpoints.openrouter))
) {
useOpenRouter = true;
llmConfig.include_reasoning = true;
configOptions.baseURL = reverseProxyUrl;
configOptions.defaultHeaders = Object.assign(
{
'HTTP-Referer': 'https://librechat.ai',
'X-Title': 'LibreChat',
},
headers,
);
} else if (reverseProxyUrl) {
configOptions.baseURL = reverseProxyUrl;
if (headers) {
configOptions.defaultHeaders = headers;
}
}
if (defaultQuery) {
configOptions.defaultQuery = defaultQuery;
}
if (proxy) {
const proxyAgent = new HttpsProxyAgent(proxy);
Object.assign(configOptions, {
httpAgent: proxyAgent,
httpsAgent: proxyAgent,
});
}
if (azure) {
const useModelName = isEnabled(process.env.AZURE_USE_MODEL_AS_DEPLOYMENT_NAME);
azure.azureOpenAIApiDeploymentName = useModelName
? sanitizeModelName(llmConfig.model)
: azure.azureOpenAIApiDeploymentName;
if (process.env.AZURE_OPENAI_DEFAULT_MODEL) {
llmConfig.model = process.env.AZURE_OPENAI_DEFAULT_MODEL;
}
if (configOptions.baseURL) {
const azureURL = constructAzureURL({
baseURL: configOptions.baseURL,
azureOptions: azure,
});
azure.azureOpenAIBasePath = azureURL.split(`/${azure.azureOpenAIApiDeploymentName}`)[0];
}
Object.assign(llmConfig, azure);
llmConfig.model = llmConfig.azureOpenAIApiDeploymentName;
} else {
llmConfig.apiKey = apiKey;
// Object.assign(llmConfig, {
// configuration: { apiKey },
// });
}
if (process.env.OPENAI_ORGANIZATION && this.azure) {
llmConfig.organization = process.env.OPENAI_ORGANIZATION;
}
if (useOpenRouter && llmConfig.reasoning_effort != null) {
llmConfig.reasoning = {
effort: llmConfig.reasoning_effort,
};
delete llmConfig.reasoning_effort;
}
if (llmConfig?.['max_tokens'] != null) {
/** @type {number} */
llmConfig.maxTokens = llmConfig['max_tokens'];
delete llmConfig['max_tokens'];
}
return {
/** @type {OpenAIClientOptions} */
llmConfig,
/** @type {OpenAIClientOptions['configuration']} */
configOptions,
};
}
module.exports = { getLLMConfig };

View file

@ -2,9 +2,9 @@ const axios = require('axios');
const fs = require('fs').promises; const fs = require('fs').promises;
const FormData = require('form-data'); const FormData = require('form-data');
const { Readable } = require('stream'); const { Readable } = require('stream');
const { genAzureEndpoint } = require('@librechat/api');
const { extractEnvVariable, STTProviders } = require('librechat-data-provider'); const { extractEnvVariable, STTProviders } = require('librechat-data-provider');
const { getCustomConfig } = require('~/server/services/Config'); const { getCustomConfig } = require('~/server/services/Config');
const { genAzureEndpoint } = require('~/utils');
const { logger } = require('~/config'); const { logger } = require('~/config');
/** /**

View file

@ -1,8 +1,8 @@
const axios = require('axios'); const axios = require('axios');
const { genAzureEndpoint } = require('@librechat/api');
const { extractEnvVariable, TTSProviders } = require('librechat-data-provider'); const { extractEnvVariable, TTSProviders } = require('librechat-data-provider');
const { getRandomVoiceId, createChunkProcessor, splitTextIntoChunks } = require('./streamAudio'); const { getRandomVoiceId, createChunkProcessor, splitTextIntoChunks } = require('./streamAudio');
const { getCustomConfig } = require('~/server/services/Config'); const { getCustomConfig } = require('~/server/services/Config');
const { genAzureEndpoint } = require('~/utils');
const { logger } = require('~/config'); const { logger } = require('~/config');
/** /**

View file

@ -91,15 +91,28 @@ async function prepareAzureImageURL(req, file) {
* @param {Buffer} params.buffer - The avatar image buffer. * @param {Buffer} params.buffer - The avatar image buffer.
* @param {string} params.userId - The user's id. * @param {string} params.userId - The user's id.
* @param {string} params.manual - Flag to indicate manual update. * @param {string} params.manual - Flag to indicate manual update.
* @param {string} [params.agentId] - Optional agent ID if this is an agent avatar.
* @param {string} [params.basePath='images'] - The base folder within the container. * @param {string} [params.basePath='images'] - The base folder within the container.
* @param {string} [params.containerName] - The Azure Blob container name. * @param {string} [params.containerName] - The Azure Blob container name.
* @returns {Promise<string>} The URL of the avatar. * @returns {Promise<string>} The URL of the avatar.
*/ */
async function processAzureAvatar({ buffer, userId, manual, basePath = 'images', containerName }) { async function processAzureAvatar({
buffer,
userId,
manual,
agentId,
basePath = 'images',
containerName,
}) {
try { try {
const metadata = await sharp(buffer).metadata(); const metadata = await sharp(buffer).metadata();
const extension = metadata.format === 'gif' ? 'gif' : 'png'; const extension = metadata.format === 'gif' ? 'gif' : 'png';
const fileName = `avatar.${extension}`; const timestamp = new Date().getTime();
/** Unique filename with timestamp and optional agent ID */
const fileName = agentId
? `agent-${agentId}-avatar-${timestamp}.${extension}`
: `avatar-${timestamp}.${extension}`;
const downloadURL = await saveBufferToAzure({ const downloadURL = await saveBufferToAzure({
userId, userId,
@ -110,9 +123,12 @@ async function processAzureAvatar({ buffer, userId, manual, basePath = 'images',
}); });
const isManual = manual === 'true'; const isManual = manual === 'true';
const url = `${downloadURL}?manual=${isManual}`; const url = `${downloadURL}?manual=${isManual}`;
if (isManual) {
// Only update user record if this is a user avatar (manual === 'true')
if (isManual && !agentId) {
await updateUser(userId, { avatar: url }); await updateUser(userId, { avatar: url });
} }
return url; return url;
} catch (error) { } catch (error) {
logger.error('[processAzureAvatar] Error uploading profile picture to Azure:', error); logger.error('[processAzureAvatar] Error uploading profile picture to Azure:', error);

View file

@ -1,7 +1,6 @@
const FormData = require('form-data'); const FormData = require('form-data');
const { getCodeBaseURL } = require('@librechat/agents'); const { getCodeBaseURL } = require('@librechat/agents');
const { createAxiosInstance } = require('~/config'); const { createAxiosInstance, logAxiosError } = require('@librechat/api');
const { logAxiosError } = require('~/utils');
const axios = createAxiosInstance(); const axios = createAxiosInstance();

View file

@ -1,6 +1,8 @@
const path = require('path'); const path = require('path');
const { v4 } = require('uuid'); const { v4 } = require('uuid');
const axios = require('axios'); const axios = require('axios');
const { logAxiosError } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { getCodeBaseURL } = require('@librechat/agents'); const { getCodeBaseURL } = require('@librechat/agents');
const { const {
Tools, Tools,
@ -12,8 +14,6 @@ const {
const { getStrategyFunctions } = require('~/server/services/Files/strategies'); const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { convertImage } = require('~/server/services/Files/images/convert'); const { convertImage } = require('~/server/services/Files/images/convert');
const { createFile, getFiles, updateFile } = require('~/models/File'); const { createFile, getFiles, updateFile } = require('~/models/File');
const { logAxiosError } = require('~/utils');
const { logger } = require('~/config');
/** /**
* Process OpenAI image files, convert to target format, save and return file metadata. * Process OpenAI image files, convert to target format, save and return file metadata.

View file

@ -82,14 +82,20 @@ async function prepareImageURL(req, file) {
* @param {Buffer} params.buffer - The Buffer containing the avatar image. * @param {Buffer} params.buffer - The Buffer containing the avatar image.
* @param {string} params.userId - The user ID. * @param {string} params.userId - The user ID.
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false'). * @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
* @param {string} [params.agentId] - Optional agent ID if this is an agent avatar.
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar. * @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
* @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading. * @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading.
*/ */
async function processFirebaseAvatar({ buffer, userId, manual }) { async function processFirebaseAvatar({ buffer, userId, manual, agentId }) {
try { try {
const metadata = await sharp(buffer).metadata(); const metadata = await sharp(buffer).metadata();
const extension = metadata.format === 'gif' ? 'gif' : 'png'; const extension = metadata.format === 'gif' ? 'gif' : 'png';
const fileName = `avatar.${extension}`; const timestamp = new Date().getTime();
/** Unique filename with timestamp and optional agent ID */
const fileName = agentId
? `agent-${agentId}-avatar-${timestamp}.${extension}`
: `avatar-${timestamp}.${extension}`;
const downloadURL = await saveBufferToFirebase({ const downloadURL = await saveBufferToFirebase({
userId, userId,
@ -98,10 +104,10 @@ async function processFirebaseAvatar({ buffer, userId, manual }) {
}); });
const isManual = manual === 'true'; const isManual = manual === 'true';
const url = `${downloadURL}?manual=${isManual}`; const url = `${downloadURL}?manual=${isManual}`;
if (isManual) { // Only update user record if this is a user avatar (manual === 'true')
if (isManual && !agentId) {
await updateUser(userId, { avatar: url }); await updateUser(userId, { avatar: url });
} }

View file

@ -201,6 +201,10 @@ const unlinkFile = async (filepath) => {
*/ */
const deleteLocalFile = async (req, file) => { const deleteLocalFile = async (req, file) => {
const { publicPath, uploads } = req.app.locals.paths; const { publicPath, uploads } = req.app.locals.paths;
/** Filepath stripped of query parameters (e.g., ?manual=true) */
const cleanFilepath = file.filepath.split('?')[0];
if (file.embedded && process.env.RAG_API_URL) { if (file.embedded && process.env.RAG_API_URL) {
const jwtToken = req.headers.authorization.split(' ')[1]; const jwtToken = req.headers.authorization.split(' ')[1];
axios.delete(`${process.env.RAG_API_URL}/documents`, { axios.delete(`${process.env.RAG_API_URL}/documents`, {
@ -213,32 +217,32 @@ const deleteLocalFile = async (req, file) => {
}); });
} }
if (file.filepath.startsWith(`/uploads/${req.user.id}`)) { if (cleanFilepath.startsWith(`/uploads/${req.user.id}`)) {
const userUploadDir = path.join(uploads, req.user.id); const userUploadDir = path.join(uploads, req.user.id);
const basePath = file.filepath.split(`/uploads/${req.user.id}/`)[1]; const basePath = cleanFilepath.split(`/uploads/${req.user.id}/`)[1];
if (!basePath) { if (!basePath) {
throw new Error(`Invalid file path: ${file.filepath}`); throw new Error(`Invalid file path: ${cleanFilepath}`);
} }
const filepath = path.join(userUploadDir, basePath); const filepath = path.join(userUploadDir, basePath);
const rel = path.relative(userUploadDir, filepath); const rel = path.relative(userUploadDir, filepath);
if (rel.startsWith('..') || path.isAbsolute(rel) || rel.includes(`..${path.sep}`)) { if (rel.startsWith('..') || path.isAbsolute(rel) || rel.includes(`..${path.sep}`)) {
throw new Error(`Invalid file path: ${file.filepath}`); throw new Error(`Invalid file path: ${cleanFilepath}`);
} }
await unlinkFile(filepath); await unlinkFile(filepath);
return; return;
} }
const parts = file.filepath.split(path.sep); const parts = cleanFilepath.split(path.sep);
const subfolder = parts[1]; const subfolder = parts[1];
if (!subfolder && parts[0] === EModelEndpoint.agents) { if (!subfolder && parts[0] === EModelEndpoint.agents) {
logger.warn(`Agent File ${file.file_id} is missing filepath, may have been deleted already`); logger.warn(`Agent File ${file.file_id} is missing filepath, may have been deleted already`);
return; return;
} }
const filepath = path.join(publicPath, file.filepath); const filepath = path.join(publicPath, cleanFilepath);
if (!isValidPath(req, publicPath, subfolder, filepath)) { if (!isValidPath(req, publicPath, subfolder, filepath)) {
throw new Error('Invalid file path'); throw new Error('Invalid file path');

View file

@ -112,10 +112,11 @@ async function prepareImagesLocal(req, file) {
* @param {Buffer} params.buffer - The Buffer containing the avatar image. * @param {Buffer} params.buffer - The Buffer containing the avatar image.
* @param {string} params.userId - The user ID. * @param {string} params.userId - The user ID.
* @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false'). * @param {string} params.manual - A string flag indicating whether the update is manual ('true' or 'false').
* @param {string} [params.agentId] - Optional agent ID if this is an agent avatar.
* @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar. * @returns {Promise<string>} - A promise that resolves with the URL of the uploaded avatar.
* @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading. * @throws {Error} - Throws an error if Firebase is not initialized or if there is an error in uploading.
*/ */
async function processLocalAvatar({ buffer, userId, manual }) { async function processLocalAvatar({ buffer, userId, manual, agentId }) {
const userDir = path.resolve( const userDir = path.resolve(
__dirname, __dirname,
'..', '..',
@ -132,7 +133,11 @@ async function processLocalAvatar({ buffer, userId, manual }) {
const metadata = await sharp(buffer).metadata(); const metadata = await sharp(buffer).metadata();
const extension = metadata.format === 'gif' ? 'gif' : 'png'; const extension = metadata.format === 'gif' ? 'gif' : 'png';
const fileName = `avatar-${new Date().getTime()}.${extension}`; const timestamp = new Date().getTime();
/** Unique filename with timestamp and optional agent ID */
const fileName = agentId
? `agent-${agentId}-avatar-${timestamp}.${extension}`
: `avatar-${timestamp}.${extension}`;
const urlRoute = `/images/${userId}/${fileName}`; const urlRoute = `/images/${userId}/${fileName}`;
const avatarPath = path.join(userDir, fileName); const avatarPath = path.join(userDir, fileName);
@ -142,7 +147,8 @@ async function processLocalAvatar({ buffer, userId, manual }) {
const isManual = manual === 'true'; const isManual = manual === 'true';
let url = `${urlRoute}?manual=${isManual}`; let url = `${urlRoute}?manual=${isManual}`;
if (isManual) { // Only update user record if this is a user avatar (manual === 'true')
if (isManual && !agentId) {
await updateUser(userId, { avatar: url }); await updateUser(userId, { avatar: url });
} }

View file

@ -1,238 +0,0 @@
// ~/server/services/Files/MistralOCR/crud.js
const fs = require('fs');
const path = require('path');
const FormData = require('form-data');
const {
FileSources,
envVarRegex,
extractEnvVariable,
extractVariableName,
} = require('librechat-data-provider');
const { loadAuthValues } = require('~/server/services/Tools/credentials');
const { logger, createAxiosInstance } = require('~/config');
const { logAxiosError } = require('~/utils/axios');
const axios = createAxiosInstance();
/**
* Uploads a document to Mistral API using file streaming to avoid loading the entire file into memory
*
* @param {Object} params Upload parameters
* @param {string} params.filePath The path to the file on disk
* @param {string} [params.fileName] Optional filename to use (defaults to the name from filePath)
* @param {string} params.apiKey Mistral API key
* @param {string} [params.baseURL=https://api.mistral.ai/v1] Mistral API base URL
* @returns {Promise<Object>} The response from Mistral API
*/
async function uploadDocumentToMistral({
filePath,
fileName = '',
apiKey,
baseURL = 'https://api.mistral.ai/v1',
}) {
const form = new FormData();
form.append('purpose', 'ocr');
const actualFileName = fileName || path.basename(filePath);
const fileStream = fs.createReadStream(filePath);
form.append('file', fileStream, { filename: actualFileName });
return axios
.post(`${baseURL}/files`, form, {
headers: {
Authorization: `Bearer ${apiKey}`,
...form.getHeaders(),
},
maxBodyLength: Infinity,
maxContentLength: Infinity,
})
.then((res) => res.data)
.catch((error) => {
throw error;
});
}
async function getSignedUrl({
apiKey,
fileId,
expiry = 24,
baseURL = 'https://api.mistral.ai/v1',
}) {
return axios
.get(`${baseURL}/files/${fileId}/url?expiry=${expiry}`, {
headers: {
Authorization: `Bearer ${apiKey}`,
},
})
.then((res) => res.data)
.catch((error) => {
logger.error('Error fetching signed URL:', error.message);
throw error;
});
}
/**
* @param {Object} params
* @param {string} params.apiKey
* @param {string} params.url - The document or image URL
* @param {string} [params.documentType='document_url'] - 'document_url' or 'image_url'
* @param {string} [params.model]
* @param {string} [params.baseURL]
* @returns {Promise<OCRResult>}
*/
async function performOCR({
apiKey,
url,
documentType = 'document_url',
model = 'mistral-ocr-latest',
baseURL = 'https://api.mistral.ai/v1',
}) {
const documentKey = documentType === 'image_url' ? 'image_url' : 'document_url';
return axios
.post(
`${baseURL}/ocr`,
{
model,
image_limit: 0,
include_image_base64: false,
document: {
type: documentType,
[documentKey]: url,
},
},
{
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
},
)
.then((res) => res.data)
.catch((error) => {
logger.error('Error performing OCR:', error.message);
throw error;
});
}
/**
* Uploads a file to the Mistral OCR API and processes the OCR result.
*
* @param {Object} params - The params object.
* @param {ServerRequest} params.req - The request object from Express. It should have a `user` property with an `id`
* representing the user
* @param {Express.Multer.File} params.file - The file object, which is part of the request. The file object should
* have a `mimetype` property that tells us the file type
* @param {string} params.file_id - The file ID.
* @param {string} [params.entity_id] - The entity ID, not used here but passed for consistency.
* @returns {Promise<{ filepath: string, bytes: number }>} - The result object containing the processed `text` and `images` (not currently used),
* along with the `filename` and `bytes` properties.
*/
const uploadMistralOCR = async ({ req, file, file_id, entity_id }) => {
try {
/** @type {TCustomConfig['ocr']} */
const ocrConfig = req.app.locals?.ocr;
const apiKeyConfig = ocrConfig.apiKey || '';
const baseURLConfig = ocrConfig.baseURL || '';
const isApiKeyEnvVar = envVarRegex.test(apiKeyConfig);
const isBaseURLEnvVar = envVarRegex.test(baseURLConfig);
const isApiKeyEmpty = !apiKeyConfig.trim();
const isBaseURLEmpty = !baseURLConfig.trim();
let apiKey, baseURL;
if (isApiKeyEnvVar || isBaseURLEnvVar || isApiKeyEmpty || isBaseURLEmpty) {
const apiKeyVarName = isApiKeyEnvVar ? extractVariableName(apiKeyConfig) : 'OCR_API_KEY';
const baseURLVarName = isBaseURLEnvVar ? extractVariableName(baseURLConfig) : 'OCR_BASEURL';
const authValues = await loadAuthValues({
userId: req.user.id,
authFields: [baseURLVarName, apiKeyVarName],
optional: new Set([baseURLVarName]),
});
apiKey = authValues[apiKeyVarName];
baseURL = authValues[baseURLVarName];
} else {
apiKey = apiKeyConfig;
baseURL = baseURLConfig;
}
const mistralFile = await uploadDocumentToMistral({
filePath: file.path,
fileName: file.originalname,
apiKey,
baseURL,
});
const modelConfig = ocrConfig.mistralModel || '';
const model = envVarRegex.test(modelConfig)
? extractEnvVariable(modelConfig)
: modelConfig.trim() || 'mistral-ocr-latest';
const signedUrlResponse = await getSignedUrl({
apiKey,
baseURL,
fileId: mistralFile.id,
});
const mimetype = (file.mimetype || '').toLowerCase();
const originalname = file.originalname || '';
const isImage =
mimetype.startsWith('image') || /\.(png|jpe?g|gif|bmp|webp|tiff?)$/i.test(originalname);
const documentType = isImage ? 'image_url' : 'document_url';
const ocrResult = await performOCR({
apiKey,
baseURL,
model,
url: signedUrlResponse.url,
documentType,
});
let aggregatedText = '';
const images = [];
ocrResult.pages.forEach((page, index) => {
if (ocrResult.pages.length > 1) {
aggregatedText += `# PAGE ${index + 1}\n`;
}
aggregatedText += page.markdown + '\n\n';
if (page.images && page.images.length > 0) {
page.images.forEach((image) => {
if (image.image_base64) {
images.push(image.image_base64);
}
});
}
});
return {
filename: file.originalname,
bytes: aggregatedText.length * 4,
filepath: FileSources.mistral_ocr,
text: aggregatedText,
images,
};
} catch (error) {
let message = 'Error uploading document to Mistral OCR API';
const detail = error?.response?.data?.detail;
if (detail && detail !== '') {
message = detail;
}
const responseMessage = error?.response?.data?.message;
throw new Error(
`${logAxiosError({ error, message })}${responseMessage && responseMessage !== '' ? ` - ${responseMessage}` : ''}`,
);
}
};
module.exports = {
uploadDocumentToMistral,
uploadMistralOCR,
getSignedUrl,
performOCR,
};

View file

@ -1,848 +0,0 @@
const fs = require('fs');
const mockAxios = {
interceptors: {
request: { use: jest.fn(), eject: jest.fn() },
response: { use: jest.fn(), eject: jest.fn() },
},
create: jest.fn().mockReturnValue({
defaults: {
proxy: null,
},
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
}),
get: jest.fn().mockResolvedValue({ data: {} }),
post: jest.fn().mockResolvedValue({ data: {} }),
put: jest.fn().mockResolvedValue({ data: {} }),
delete: jest.fn().mockResolvedValue({ data: {} }),
reset: jest.fn().mockImplementation(function () {
this.get.mockClear();
this.post.mockClear();
this.put.mockClear();
this.delete.mockClear();
this.create.mockClear();
}),
};
jest.mock('axios', () => mockAxios);
jest.mock('fs');
jest.mock('~/config', () => ({
logger: {
error: jest.fn(),
},
createAxiosInstance: () => mockAxios,
}));
jest.mock('~/server/services/Tools/credentials', () => ({
loadAuthValues: jest.fn(),
}));
const { uploadDocumentToMistral, uploadMistralOCR, getSignedUrl, performOCR } = require('./crud');
describe('MistralOCR Service', () => {
afterEach(() => {
mockAxios.reset();
jest.clearAllMocks();
});
describe('uploadDocumentToMistral', () => {
beforeEach(() => {
// Create a more complete mock for file streams that FormData can work with
const mockReadStream = {
on: jest.fn().mockImplementation(function (event, handler) {
// Simulate immediate 'end' event to make FormData complete processing
if (event === 'end') {
handler();
}
return this;
}),
pipe: jest.fn().mockImplementation(function () {
return this;
}),
pause: jest.fn(),
resume: jest.fn(),
emit: jest.fn(),
once: jest.fn(),
destroy: jest.fn(),
};
fs.createReadStream = jest.fn().mockReturnValue(mockReadStream);
// Mock FormData's append to avoid actual stream processing
jest.mock('form-data', () => {
const mockFormData = function () {
return {
append: jest.fn(),
getHeaders: jest
.fn()
.mockReturnValue({ 'content-type': 'multipart/form-data; boundary=---boundary' }),
getBuffer: jest.fn().mockReturnValue(Buffer.from('mock-form-data')),
getLength: jest.fn().mockReturnValue(100),
};
};
return mockFormData;
});
});
it('should upload a document to Mistral API using file streaming', async () => {
const mockResponse = { data: { id: 'file-123', purpose: 'ocr' } };
mockAxios.post.mockResolvedValueOnce(mockResponse);
const result = await uploadDocumentToMistral({
filePath: '/path/to/test.pdf',
fileName: 'test.pdf',
apiKey: 'test-api-key',
});
// Check that createReadStream was called with the correct file path
expect(fs.createReadStream).toHaveBeenCalledWith('/path/to/test.pdf');
// Since we're mocking FormData, we'll just check that axios was called correctly
expect(mockAxios.post).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/files',
expect.anything(),
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer test-api-key',
}),
maxBodyLength: Infinity,
maxContentLength: Infinity,
}),
);
expect(result).toEqual(mockResponse.data);
});
it('should handle errors during document upload', async () => {
const errorMessage = 'API error';
mockAxios.post.mockRejectedValueOnce(new Error(errorMessage));
await expect(
uploadDocumentToMistral({
filePath: '/path/to/test.pdf',
fileName: 'test.pdf',
apiKey: 'test-api-key',
}),
).rejects.toThrow(errorMessage);
});
});
describe('getSignedUrl', () => {
it('should fetch signed URL from Mistral API', async () => {
const mockResponse = { data: { url: 'https://document-url.com' } };
mockAxios.get.mockResolvedValueOnce(mockResponse);
const result = await getSignedUrl({
fileId: 'file-123',
apiKey: 'test-api-key',
});
expect(mockAxios.get).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/files/file-123/url?expiry=24',
{
headers: {
Authorization: 'Bearer test-api-key',
},
},
);
expect(result).toEqual(mockResponse.data);
});
it('should handle errors when fetching signed URL', async () => {
const errorMessage = 'API error';
mockAxios.get.mockRejectedValueOnce(new Error(errorMessage));
await expect(
getSignedUrl({
fileId: 'file-123',
apiKey: 'test-api-key',
}),
).rejects.toThrow();
const { logger } = require('~/config');
expect(logger.error).toHaveBeenCalledWith('Error fetching signed URL:', errorMessage);
});
});
describe('performOCR', () => {
it('should perform OCR using Mistral API (document_url)', async () => {
const mockResponse = {
data: {
pages: [{ markdown: 'Page 1 content' }, { markdown: 'Page 2 content' }],
},
};
mockAxios.post.mockResolvedValueOnce(mockResponse);
const result = await performOCR({
apiKey: 'test-api-key',
url: 'https://document-url.com',
model: 'mistral-ocr-latest',
documentType: 'document_url',
});
expect(mockAxios.post).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/ocr',
{
model: 'mistral-ocr-latest',
include_image_base64: false,
image_limit: 0,
document: {
type: 'document_url',
document_url: 'https://document-url.com',
},
},
{
headers: {
'Content-Type': 'application/json',
Authorization: 'Bearer test-api-key',
},
},
);
expect(result).toEqual(mockResponse.data);
});
it('should perform OCR using Mistral API (image_url)', async () => {
const mockResponse = {
data: {
pages: [{ markdown: 'Image OCR content' }],
},
};
mockAxios.post.mockResolvedValueOnce(mockResponse);
const result = await performOCR({
apiKey: 'test-api-key',
url: 'https://image-url.com/image.png',
model: 'mistral-ocr-latest',
documentType: 'image_url',
});
expect(mockAxios.post).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/ocr',
{
model: 'mistral-ocr-latest',
include_image_base64: false,
image_limit: 0,
document: {
type: 'image_url',
image_url: 'https://image-url.com/image.png',
},
},
{
headers: {
'Content-Type': 'application/json',
Authorization: 'Bearer test-api-key',
},
},
);
expect(result).toEqual(mockResponse.data);
});
it('should handle errors during OCR processing', async () => {
const errorMessage = 'OCR processing error';
mockAxios.post.mockRejectedValueOnce(new Error(errorMessage));
await expect(
performOCR({
apiKey: 'test-api-key',
url: 'https://document-url.com',
}),
).rejects.toThrow();
const { logger } = require('~/config');
expect(logger.error).toHaveBeenCalledWith('Error performing OCR:', errorMessage);
});
});
describe('uploadMistralOCR', () => {
beforeEach(() => {
const mockReadStream = {
on: jest.fn().mockImplementation(function (event, handler) {
if (event === 'end') {
handler();
}
return this;
}),
pipe: jest.fn().mockImplementation(function () {
return this;
}),
pause: jest.fn(),
resume: jest.fn(),
emit: jest.fn(),
once: jest.fn(),
destroy: jest.fn(),
};
fs.createReadStream = jest.fn().mockReturnValue(mockReadStream);
});
it('should process OCR for a file with standard configuration', async () => {
// Setup mocks
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
OCR_BASEURL: 'https://api.mistral.ai/v1',
});
// Mock file upload response
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-123', purpose: 'ocr' },
});
// Mock signed URL response
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com' },
});
// Mock OCR response with text and images
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [
{
markdown: 'Page 1 content',
images: [{ image_base64: 'base64image1' }],
},
{
markdown: 'Page 2 content',
images: [{ image_base64: 'base64image2' }],
},
],
},
});
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Use environment variable syntax to ensure loadAuthValues is called
apiKey: '${OCR_API_KEY}',
baseURL: '${OCR_BASEURL}',
mistralModel: 'mistral-medium',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
mimetype: 'application/pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['OCR_BASEURL', 'OCR_API_KEY'],
optional: expect.any(Set),
});
// Verify OCR result
expect(result).toEqual({
filename: 'document.pdf',
bytes: expect.any(Number),
filepath: 'mistral_ocr',
text: expect.stringContaining('# PAGE 1'),
images: ['base64image1', 'base64image2'],
});
});
it('should process OCR for an image file and use image_url type', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
OCR_BASEURL: 'https://api.mistral.ai/v1',
});
// Mock file upload response
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-456', purpose: 'ocr' },
});
// Mock signed URL response
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com/image.png' },
});
// Mock OCR response for image
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [
{
markdown: 'Image OCR result',
images: [{ image_base64: 'imgbase64' }],
},
],
},
});
const req = {
user: { id: 'user456' },
app: {
locals: {
ocr: {
apiKey: '${OCR_API_KEY}',
baseURL: '${OCR_BASEURL}',
mistralModel: 'mistral-medium',
},
},
},
};
const file = {
path: '/tmp/upload/image.png',
originalname: 'image.png',
mimetype: 'image/png',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file456',
entity_id: 'entity456',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/image.png');
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user456',
authFields: ['OCR_BASEURL', 'OCR_API_KEY'],
optional: expect.any(Set),
});
// Check that the OCR API was called with image_url type
expect(mockAxios.post).toHaveBeenCalledWith(
'https://api.mistral.ai/v1/ocr',
expect.objectContaining({
document: expect.objectContaining({
type: 'image_url',
image_url: 'https://signed-url.com/image.png',
}),
}),
expect.any(Object),
);
expect(result).toEqual({
filename: 'image.png',
bytes: expect.any(Number),
filepath: 'mistral_ocr',
text: expect.stringContaining('Image OCR result'),
images: ['imgbase64'],
});
});
it('should process variable references in configuration', async () => {
// Setup mocks with environment variables
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
CUSTOM_API_KEY: 'custom-api-key',
CUSTOM_BASEURL: 'https://custom-api.mistral.ai/v1',
});
// Mock API responses
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-123', purpose: 'ocr' },
});
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com' },
});
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [{ markdown: 'Content from custom API' }],
},
});
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
apiKey: '${CUSTOM_API_KEY}',
baseURL: '${CUSTOM_BASEURL}',
mistralModel: '${CUSTOM_MODEL}',
},
},
},
};
// Set environment variable for model
process.env.CUSTOM_MODEL = 'mistral-large';
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify that custom environment variables were extracted and used
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['CUSTOM_BASEURL', 'CUSTOM_API_KEY'],
optional: expect.any(Set),
});
// Check that mistral-large was used in the OCR API call
expect(mockAxios.post).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
model: 'mistral-large',
}),
expect.anything(),
);
expect(result.text).toEqual('Content from custom API\n\n');
});
it('should fall back to default values when variables are not properly formatted', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'default-api-key',
OCR_BASEURL: undefined, // Testing optional parameter
});
mockAxios.post.mockResolvedValueOnce({
data: { id: 'file-123', purpose: 'ocr' },
});
mockAxios.get.mockResolvedValueOnce({
data: { url: 'https://signed-url.com' },
});
mockAxios.post.mockResolvedValueOnce({
data: {
pages: [{ markdown: 'Default API result' }],
},
});
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Use environment variable syntax to ensure loadAuthValues is called
apiKey: '${INVALID_FORMAT}', // Using valid env var format but with an invalid name
baseURL: '${OCR_BASEURL}', // Using valid env var format
mistralModel: 'mistral-ocr-latest', // Plain string value
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Should use the default values
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['OCR_BASEURL', 'INVALID_FORMAT'],
optional: expect.any(Set),
});
// Should use the default model when not using environment variable format
expect(mockAxios.post).toHaveBeenCalledWith(
expect.anything(),
expect.objectContaining({
model: 'mistral-ocr-latest',
}),
expect.anything(),
);
});
it('should handle API errors during OCR process', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
});
// Mock file upload to fail
mockAxios.post.mockRejectedValueOnce(new Error('Upload failed'));
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
apiKey: 'OCR_API_KEY',
baseURL: 'OCR_BASEURL',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'document.pdf',
};
await expect(
uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
}),
).rejects.toThrow('Error uploading document to Mistral OCR API');
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
});
it('should handle single page documents without page numbering', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'test-api-key',
OCR_BASEURL: 'https://api.mistral.ai/v1', // Make sure this is included
});
// Clear all previous mocks
mockAxios.post.mockClear();
mockAxios.get.mockClear();
// 1. First mock: File upload response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
);
// 2. Second mock: Signed URL response
mockAxios.get.mockImplementationOnce(() =>
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
);
// 3. Third mock: OCR response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({
data: {
pages: [{ markdown: 'Single page content' }],
},
}),
);
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
apiKey: 'OCR_API_KEY',
baseURL: 'OCR_BASEURL',
mistralModel: 'mistral-ocr-latest',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'single-page.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify that single page documents don't include page numbering
expect(result.text).not.toContain('# PAGE');
expect(result.text).toEqual('Single page content\n\n');
});
it('should use literal values in configuration when provided directly', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
// We'll still mock this but it should not be used for literal values
loadAuthValues.mockResolvedValue({});
// Clear all previous mocks
mockAxios.post.mockClear();
mockAxios.get.mockClear();
// 1. First mock: File upload response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
);
// 2. Second mock: Signed URL response
mockAxios.get.mockImplementationOnce(() =>
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
);
// 3. Third mock: OCR response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({
data: {
pages: [{ markdown: 'Processed with literal config values' }],
},
}),
);
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Direct values that should be used as-is, without variable substitution
apiKey: 'actual-api-key-value',
baseURL: 'https://direct-api-url.mistral.ai/v1',
mistralModel: 'mistral-direct-model',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'direct-values.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify the correct URL was used with the direct baseURL value
expect(mockAxios.post).toHaveBeenCalledWith(
'https://direct-api-url.mistral.ai/v1/files',
expect.any(Object),
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer actual-api-key-value',
}),
}),
);
// Check the OCR call was made with the direct model value
expect(mockAxios.post).toHaveBeenCalledWith(
'https://direct-api-url.mistral.ai/v1/ocr',
expect.objectContaining({
model: 'mistral-direct-model',
}),
expect.any(Object),
);
// Verify the result
expect(result.text).toEqual('Processed with literal config values\n\n');
// Verify loadAuthValues was never called since we used direct values
expect(loadAuthValues).not.toHaveBeenCalled();
});
it('should handle empty configuration values and use defaults', async () => {
const { loadAuthValues } = require('~/server/services/Tools/credentials');
// Set up the mock values to be returned by loadAuthValues
loadAuthValues.mockResolvedValue({
OCR_API_KEY: 'default-from-env-key',
OCR_BASEURL: 'https://default-from-env.mistral.ai/v1',
});
// Clear all previous mocks
mockAxios.post.mockClear();
mockAxios.get.mockClear();
// 1. First mock: File upload response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({ data: { id: 'file-123', purpose: 'ocr' } }),
);
// 2. Second mock: Signed URL response
mockAxios.get.mockImplementationOnce(() =>
Promise.resolve({ data: { url: 'https://signed-url.com' } }),
);
// 3. Third mock: OCR response
mockAxios.post.mockImplementationOnce(() =>
Promise.resolve({
data: {
pages: [{ markdown: 'Content from default configuration' }],
},
}),
);
const req = {
user: { id: 'user123' },
app: {
locals: {
ocr: {
// Empty string values - should fall back to defaults
apiKey: '',
baseURL: '',
mistralModel: '',
},
},
},
};
const file = {
path: '/tmp/upload/file.pdf',
originalname: 'empty-config.pdf',
};
const result = await uploadMistralOCR({
req,
file,
file_id: 'file123',
entity_id: 'entity123',
});
expect(fs.createReadStream).toHaveBeenCalledWith('/tmp/upload/file.pdf');
// Verify loadAuthValues was called with the default variable names
expect(loadAuthValues).toHaveBeenCalledWith({
userId: 'user123',
authFields: ['OCR_BASEURL', 'OCR_API_KEY'],
optional: expect.any(Set),
});
// Verify the API calls used the default values from loadAuthValues
expect(mockAxios.post).toHaveBeenCalledWith(
'https://default-from-env.mistral.ai/v1/files',
expect.any(Object),
expect.objectContaining({
headers: expect.objectContaining({
Authorization: 'Bearer default-from-env-key',
}),
}),
);
// Verify the OCR model defaulted to mistral-ocr-latest
expect(mockAxios.post).toHaveBeenCalledWith(
'https://default-from-env.mistral.ai/v1/ocr',
expect.objectContaining({
model: 'mistral-ocr-latest',
}),
expect.any(Object),
);
// Check result
expect(result.text).toEqual('Content from default configuration\n\n');
});
});
});

View file

@ -1,5 +0,0 @@
const crud = require('./crud');
module.exports = {
...crud,
};

View file

@ -94,19 +94,28 @@ async function prepareImageURLS3(req, file) {
* @param {Buffer} params.buffer - Avatar image buffer. * @param {Buffer} params.buffer - Avatar image buffer.
* @param {string} params.userId - User's unique identifier. * @param {string} params.userId - User's unique identifier.
* @param {string} params.manual - 'true' or 'false' flag for manual update. * @param {string} params.manual - 'true' or 'false' flag for manual update.
* @param {string} [params.agentId] - Optional agent ID if this is an agent avatar.
* @param {string} [params.basePath='images'] - Base path in the bucket. * @param {string} [params.basePath='images'] - Base path in the bucket.
* @returns {Promise<string>} Signed URL of the uploaded avatar. * @returns {Promise<string>} Signed URL of the uploaded avatar.
*/ */
async function processS3Avatar({ buffer, userId, manual, basePath = defaultBasePath }) { async function processS3Avatar({ buffer, userId, manual, agentId, basePath = defaultBasePath }) {
try { try {
const metadata = await sharp(buffer).metadata(); const metadata = await sharp(buffer).metadata();
const extension = metadata.format === 'gif' ? 'gif' : 'png'; const extension = metadata.format === 'gif' ? 'gif' : 'png';
const fileName = `avatar.${extension}`; const timestamp = new Date().getTime();
/** Unique filename with timestamp and optional agent ID */
const fileName = agentId
? `agent-${agentId}-avatar-${timestamp}.${extension}`
: `avatar-${timestamp}.${extension}`;
const downloadURL = await saveBufferToS3({ userId, buffer, fileName, basePath }); const downloadURL = await saveBufferToS3({ userId, buffer, fileName, basePath });
if (manual === 'true') {
// Only update user record if this is a user avatar (manual === 'true')
if (manual === 'true' && !agentId) {
await updateUser(userId, { avatar: downloadURL }); await updateUser(userId, { avatar: downloadURL });
} }
return downloadURL; return downloadURL;
} catch (error) { } catch (error) {
logger.error('[processS3Avatar] Error processing S3 avatar:', error.message); logger.error('[processS3Avatar] Error processing S3 avatar:', error.message);

View file

@ -1,9 +1,9 @@
const fs = require('fs'); const fs = require('fs');
const axios = require('axios'); const axios = require('axios');
const FormData = require('form-data'); const FormData = require('form-data');
const { logAxiosError } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { FileSources } = require('librechat-data-provider'); const { FileSources } = require('librechat-data-provider');
const { logAxiosError } = require('~/utils');
const { logger } = require('~/config');
/** /**
* Deletes a file from the vector database. This function takes a file object, constructs the full path, and * Deletes a file from the vector database. This function takes a file object, constructs the full path, and

View file

@ -1,4 +1,5 @@
const axios = require('axios'); const axios = require('axios');
const { logAxiosError } = require('@librechat/api');
const { const {
FileSources, FileSources,
VisionModes, VisionModes,
@ -7,8 +8,6 @@ const {
EModelEndpoint, EModelEndpoint,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { getStrategyFunctions } = require('~/server/services/Files/strategies'); const { getStrategyFunctions } = require('~/server/services/Files/strategies');
const { logAxiosError } = require('~/utils');
const { logger } = require('~/config');
/** /**
* Converts a readable stream to a base64 encoded string. * Converts a readable stream to a base64 encoded string.

View file

@ -522,7 +522,7 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
throw new Error('OCR capability is not enabled for Agents'); throw new Error('OCR capability is not enabled for Agents');
} }
const { handleFileUpload: uploadMistralOCR } = getStrategyFunctions( const { handleFileUpload: uploadOCR } = getStrategyFunctions(
req.app.locals?.ocr?.strategy ?? FileSources.mistral_ocr, req.app.locals?.ocr?.strategy ?? FileSources.mistral_ocr,
); );
const { file_id, temp_file_id } = metadata; const { file_id, temp_file_id } = metadata;
@ -534,7 +534,7 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
images, images,
filename, filename,
filepath: ocrFileURL, filepath: ocrFileURL,
} = await uploadMistralOCR({ req, file, file_id, entity_id: agent_id, basePath }); } = await uploadOCR({ req, file, loadAuthValues });
const fileInfo = removeNullishValues({ const fileInfo = removeNullishValues({
text, text,

View file

@ -1,4 +1,5 @@
const { FileSources } = require('librechat-data-provider'); const { FileSources } = require('librechat-data-provider');
const { uploadMistralOCR, uploadAzureMistralOCR } = require('@librechat/api');
const { const {
getFirebaseURL, getFirebaseURL,
prepareImageURL, prepareImageURL,
@ -46,7 +47,6 @@ const {
const { uploadOpenAIFile, deleteOpenAIFile, getOpenAIFileStream } = require('./OpenAI'); const { uploadOpenAIFile, deleteOpenAIFile, getOpenAIFileStream } = require('./OpenAI');
const { getCodeOutputDownloadStream, uploadCodeEnvFile } = require('./Code'); const { getCodeOutputDownloadStream, uploadCodeEnvFile } = require('./Code');
const { uploadVectors, deleteVectors } = require('./VectorDB'); const { uploadVectors, deleteVectors } = require('./VectorDB');
const { uploadMistralOCR } = require('./MistralOCR');
/** /**
* Firebase Storage Strategy Functions * Firebase Storage Strategy Functions
@ -202,6 +202,26 @@ const mistralOCRStrategy = () => ({
handleFileUpload: uploadMistralOCR, handleFileUpload: uploadMistralOCR,
}); });
const azureMistralOCRStrategy = () => ({
/** @type {typeof saveFileFromURL | null} */
saveURL: null,
/** @type {typeof getLocalFileURL | null} */
getFileURL: null,
/** @type {typeof saveLocalBuffer | null} */
saveBuffer: null,
/** @type {typeof processLocalAvatar | null} */
processAvatar: null,
/** @type {typeof uploadLocalImage | null} */
handleImageUpload: null,
/** @type {typeof prepareImagesLocal | null} */
prepareImagePayload: null,
/** @type {typeof deleteLocalFile | null} */
deleteFile: null,
/** @type {typeof getLocalFileStream | null} */
getDownloadStream: null,
handleFileUpload: uploadAzureMistralOCR,
});
// Strategy Selector // Strategy Selector
const getStrategyFunctions = (fileSource) => { const getStrategyFunctions = (fileSource) => {
if (fileSource === FileSources.firebase) { if (fileSource === FileSources.firebase) {
@ -222,6 +242,8 @@ const getStrategyFunctions = (fileSource) => {
return codeOutputStrategy(); return codeOutputStrategy();
} else if (fileSource === FileSources.mistral_ocr) { } else if (fileSource === FileSources.mistral_ocr) {
return mistralOCRStrategy(); return mistralOCRStrategy();
} else if (fileSource === FileSources.azure_mistral_ocr) {
return azureMistralOCRStrategy();
} else { } else {
throw new Error('Invalid file source'); throw new Error('Invalid file source');
} }

View file

@ -1,27 +1,111 @@
const { z } = require('zod'); const { z } = require('zod');
const { tool } = require('@langchain/core/tools'); const { tool } = require('@langchain/core/tools');
const { normalizeServerName } = require('librechat-mcp'); const { logger } = require('@librechat/data-schemas');
const { Constants: AgentConstants, Providers } = require('@librechat/agents'); const { Time, CacheKeys, StepTypes } = require('librechat-data-provider');
const { sendEvent, normalizeServerName, MCPOAuthHandler } = require('@librechat/api');
const { Constants: AgentConstants, Providers, GraphEvents } = require('@librechat/agents');
const { const {
Constants, Constants,
ContentTypes, ContentTypes,
isAssistantsEndpoint, isAssistantsEndpoint,
convertJsonSchemaToZod, convertJsonSchemaToZod,
} = require('librechat-data-provider'); } = require('librechat-data-provider');
const { logger, getMCPManager } = require('~/config'); const { getMCPManager, getFlowStateManager } = require('~/config');
const { findToken, createToken, updateToken } = require('~/models');
const { getCachedTools } = require('./Config');
const { getLogStores } = require('~/cache');
/**
* @param {object} params
* @param {ServerResponse} params.res - The Express response object for sending events.
* @param {string} params.stepId - The ID of the step in the flow.
* @param {ToolCallChunk} params.toolCall - The tool call object containing tool information.
* @param {string} params.loginFlowId - The ID of the login flow.
* @param {FlowStateManager<any>} params.flowManager - The flow manager instance.
*/
function createOAuthStart({ res, stepId, toolCall, loginFlowId, flowManager, signal }) {
/**
* Creates a function to handle OAuth login requests.
* @param {string} authURL - The URL to redirect the user for OAuth authentication.
* @returns {Promise<boolean>} Returns true to indicate the event was sent successfully.
*/
return async function (authURL) {
/** @type {{ id: string; delta: AgentToolCallDelta }} */
const data = {
id: stepId,
delta: {
type: StepTypes.TOOL_CALLS,
tool_calls: [{ ...toolCall, args: '' }],
auth: authURL,
expires_at: Date.now() + Time.TWO_MINUTES,
},
};
/** Used to ensure the handler (use of `sendEvent`) is only invoked once */
await flowManager.createFlowWithHandler(
loginFlowId,
'oauth_login',
async () => {
sendEvent(res, { event: GraphEvents.ON_RUN_STEP_DELTA, data });
logger.debug('Sent OAuth login request to client');
return true;
},
signal,
);
};
}
/**
* @param {object} params
* @param {ServerResponse} params.res - The Express response object for sending events.
* @param {string} params.stepId - The ID of the step in the flow.
* @param {ToolCallChunk} params.toolCall - The tool call object containing tool information.
* @param {string} params.loginFlowId - The ID of the login flow.
* @param {FlowStateManager<any>} params.flowManager - The flow manager instance.
*/
function createOAuthEnd({ res, stepId, toolCall }) {
return async function () {
/** @type {{ id: string; delta: AgentToolCallDelta }} */
const data = {
id: stepId,
delta: {
type: StepTypes.TOOL_CALLS,
tool_calls: [{ ...toolCall }],
},
};
sendEvent(res, { event: GraphEvents.ON_RUN_STEP_DELTA, data });
logger.debug('Sent OAuth login success to client');
};
}
/**
* @param {object} params
* @param {string} params.userId - The ID of the user.
* @param {string} params.serverName - The name of the server.
* @param {string} params.toolName - The name of the tool.
* @param {FlowStateManager<any>} params.flowManager - The flow manager instance.
*/
function createAbortHandler({ userId, serverName, toolName, flowManager }) {
return function () {
logger.info(`[MCP][User: ${userId}][${serverName}][${toolName}] Tool call aborted`);
const flowId = MCPOAuthHandler.generateFlowId(userId, serverName);
flowManager.failFlow(flowId, 'mcp_oauth', new Error('Tool call aborted'));
};
}
/** /**
* Creates a general tool for an entire action set. * Creates a general tool for an entire action set.
* *
* @param {Object} params - The parameters for loading action sets. * @param {Object} params - The parameters for loading action sets.
* @param {ServerRequest} params.req - The Express request object, containing user/request info. * @param {ServerRequest} params.req - The Express request object, containing user/request info.
* @param {ServerResponse} params.res - The Express response object for sending events.
* @param {string} params.toolKey - The toolKey for the tool. * @param {string} params.toolKey - The toolKey for the tool.
* @param {import('@librechat/agents').Providers | EModelEndpoint} params.provider - The provider for the tool. * @param {import('@librechat/agents').Providers | EModelEndpoint} params.provider - The provider for the tool.
* @param {string} params.model - The model for the tool. * @param {string} params.model - The model for the tool.
* @returns { Promise<typeof tool | { _call: (toolInput: Object | string) => unknown}> } An object with `_call` method to execute the tool input. * @returns { Promise<typeof tool | { _call: (toolInput: Object | string) => unknown}> } An object with `_call` method to execute the tool input.
*/ */
async function createMCPTool({ req, toolKey, provider: _provider }) { async function createMCPTool({ req, res, toolKey, provider: _provider }) {
const toolDefinition = req.app.locals.availableTools[toolKey]?.function; const availableTools = await getCachedTools({ includeGlobal: true });
const toolDefinition = availableTools?.[toolKey]?.function;
if (!toolDefinition) { if (!toolDefinition) {
logger.error(`Tool ${toolKey} not found in available tools`); logger.error(`Tool ${toolKey} not found in available tools`);
return null; return null;
@ -50,19 +134,61 @@ async function createMCPTool({ req, toolKey, provider: _provider }) {
/** @type {(toolArguments: Object | string, config?: GraphRunnableConfig) => Promise<unknown>} */ /** @type {(toolArguments: Object | string, config?: GraphRunnableConfig) => Promise<unknown>} */
const _call = async (toolArguments, config) => { const _call = async (toolArguments, config) => {
const userId = config?.configurable?.user?.id || config?.configurable?.user_id;
/** @type {ReturnType<typeof createAbortHandler>} */
let abortHandler = null;
/** @type {AbortSignal} */
let derivedSignal = null;
try { try {
const derivedSignal = config?.signal ? AbortSignal.any([config.signal]) : undefined; const flowsCache = getLogStores(CacheKeys.FLOWS);
const mcpManager = getMCPManager(config?.configurable?.user_id); const flowManager = getFlowStateManager(flowsCache);
derivedSignal = config?.signal ? AbortSignal.any([config.signal]) : undefined;
const mcpManager = getMCPManager(userId);
const provider = (config?.metadata?.provider || _provider)?.toLowerCase(); const provider = (config?.metadata?.provider || _provider)?.toLowerCase();
const { args: _args, stepId, ...toolCall } = config.toolCall ?? {};
const loginFlowId = `${serverName}:oauth_login:${config.metadata.thread_id}:${config.metadata.run_id}`;
const oauthStart = createOAuthStart({
res,
stepId,
toolCall,
loginFlowId,
flowManager,
signal: derivedSignal,
});
const oauthEnd = createOAuthEnd({
res,
stepId,
toolCall,
});
if (derivedSignal) {
abortHandler = createAbortHandler({ userId, serverName, toolName, flowManager });
derivedSignal.addEventListener('abort', abortHandler, { once: true });
}
const customUserVars =
config?.configurable?.userMCPAuthMap?.[`${Constants.mcp_prefix}${serverName}`];
const result = await mcpManager.callTool({ const result = await mcpManager.callTool({
serverName, serverName,
toolName, toolName,
provider, provider,
toolArguments, toolArguments,
options: { options: {
userId: config?.configurable?.user_id,
signal: derivedSignal, signal: derivedSignal,
}, },
user: config?.configurable?.user,
customUserVars,
flowManager,
tokenMethods: {
findToken,
createToken,
updateToken,
},
oauthStart,
oauthEnd,
}); });
if (isAssistantsEndpoint(provider) && Array.isArray(result)) { if (isAssistantsEndpoint(provider) && Array.isArray(result)) {
@ -74,12 +200,31 @@ async function createMCPTool({ req, toolKey, provider: _provider }) {
return result; return result;
} catch (error) { } catch (error) {
logger.error( logger.error(
`[MCP][User: ${config?.configurable?.user_id}][${serverName}] Error calling "${toolName}" MCP tool:`, `[MCP][User: ${userId}][${serverName}] Error calling "${toolName}" MCP tool:`,
error, error,
); );
/** OAuth error, provide a helpful message */
const isOAuthError =
error.message?.includes('401') ||
error.message?.includes('OAuth') ||
error.message?.includes('authentication') ||
error.message?.includes('Non-200 status code (401)');
if (isOAuthError) {
throw new Error(
`OAuth authentication required for ${serverName}. Please check the server logs for the authentication URL.`,
);
}
throw new Error( throw new Error(
`"${toolKey}" tool call failed${error?.message ? `: ${error?.message}` : '.'}`, `"${toolKey}" tool call failed${error?.message ? `: ${error?.message}` : '.'}`,
); );
} finally {
// Clean up abort handler to prevent memory leaks
if (abortHandler && derivedSignal) {
derivedSignal.removeEventListener('abort', abortHandler);
}
} }
}; };

View file

@ -1,12 +1,13 @@
const axios = require('axios'); const axios = require('axios');
const { Providers } = require('@librechat/agents'); const { Providers } = require('@librechat/agents');
const { logAxiosError } = require('@librechat/api');
const { logger } = require('@librechat/data-schemas');
const { HttpsProxyAgent } = require('https-proxy-agent'); const { HttpsProxyAgent } = require('https-proxy-agent');
const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider'); const { EModelEndpoint, defaultModels, CacheKeys } = require('librechat-data-provider');
const { inputSchema, logAxiosError, extractBaseURL, processModelData } = require('~/utils'); const { inputSchema, extractBaseURL, processModelData } = require('~/utils');
const { OllamaClient } = require('~/app/clients/OllamaClient'); const { OllamaClient } = require('~/app/clients/OllamaClient');
const { isUserProvided } = require('~/server/utils'); const { isUserProvided } = require('~/server/utils');
const getLogStores = require('~/cache/getLogStores'); const getLogStores = require('~/cache/getLogStores');
const { logger } = require('~/config');
/** /**
* Splits a string by commas and trims each resulting value. * Splits a string by commas and trims each resulting value.

View file

@ -1,6 +1,6 @@
const axios = require('axios'); const axios = require('axios');
const { logger } = require('@librechat/data-schemas');
const { EModelEndpoint, defaultModels } = require('librechat-data-provider'); const { EModelEndpoint, defaultModels } = require('librechat-data-provider');
const { logger } = require('~/config');
const { const {
fetchModels, fetchModels,
@ -28,7 +28,8 @@ jest.mock('~/cache/getLogStores', () =>
set: jest.fn().mockResolvedValue(true), set: jest.fn().mockResolvedValue(true),
})), })),
); );
jest.mock('~/config', () => ({ jest.mock('@librechat/data-schemas', () => ({
...jest.requireActual('@librechat/data-schemas'),
logger: { logger: {
error: jest.fn(), error: jest.fn(),
}, },

View file

@ -1,6 +1,6 @@
const { encrypt, decrypt } = require('~/server/utils/crypto'); const { logger } = require('@librechat/data-schemas');
const { PluginAuth } = require('~/db/models'); const { encrypt, decrypt } = require('@librechat/api');
const { logger } = require('~/config'); const { findOnePluginAuth, updatePluginAuth, deletePluginAuth } = require('~/models');
/** /**
* Asynchronously retrieves and decrypts the authentication value for a user's plugin, based on a specified authentication field. * Asynchronously retrieves and decrypts the authentication value for a user's plugin, based on a specified authentication field.
@ -25,7 +25,7 @@ const { logger } = require('~/config');
*/ */
const getUserPluginAuthValue = async (userId, authField, throwError = true) => { const getUserPluginAuthValue = async (userId, authField, throwError = true) => {
try { try {
const pluginAuth = await PluginAuth.findOne({ userId, authField }).lean(); const pluginAuth = await findOnePluginAuth({ userId, authField });
if (!pluginAuth) { if (!pluginAuth) {
throw new Error(`No plugin auth ${authField} found for user ${userId}`); throw new Error(`No plugin auth ${authField} found for user ${userId}`);
} }
@ -79,23 +79,12 @@ const getUserPluginAuthValue = async (userId, authField, throwError = true) => {
const updateUserPluginAuth = async (userId, authField, pluginKey, value) => { const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
try { try {
const encryptedValue = await encrypt(value); const encryptedValue = await encrypt(value);
const pluginAuth = await PluginAuth.findOne({ userId, authField }).lean(); return await updatePluginAuth({
if (pluginAuth) {
return await PluginAuth.findOneAndUpdate(
{ userId, authField },
{ $set: { value: encryptedValue } },
{ new: true, upsert: true },
).lean();
} else {
const newPluginAuth = await new PluginAuth({
userId, userId,
authField, authField,
value: encryptedValue,
pluginKey, pluginKey,
value: encryptedValue,
}); });
await newPluginAuth.save();
return newPluginAuth.toObject();
}
} catch (err) { } catch (err) {
logger.error('[updateUserPluginAuth]', err); logger.error('[updateUserPluginAuth]', err);
return err; return err;
@ -105,26 +94,25 @@ const updateUserPluginAuth = async (userId, authField, pluginKey, value) => {
/** /**
* @async * @async
* @param {string} userId * @param {string} userId
* @param {string} authField * @param {string | null} authField - The specific authField to delete, or null if `all` is true.
* @param {boolean} [all] * @param {boolean} [all=false] - Whether to delete all auths for the user (or for a specific pluginKey if provided).
* @param {string} [pluginKey] - Optional. If `all` is true and `pluginKey` is provided, delete all auths for this user and pluginKey.
* @returns {Promise<import('mongoose').DeleteResult>} * @returns {Promise<import('mongoose').DeleteResult>}
* @throws {Error} * @throws {Error}
*/ */
const deleteUserPluginAuth = async (userId, authField, all = false) => { const deleteUserPluginAuth = async (userId, authField, all = false, pluginKey) => {
if (all) {
try { try {
const response = await PluginAuth.deleteMany({ userId }); return await deletePluginAuth({
return response; userId,
authField,
pluginKey,
all,
});
} catch (err) { } catch (err) {
logger.error('[deleteUserPluginAuth]', err); logger.error(
return err; `[deleteUserPluginAuth] Error deleting ${all ? 'all' : 'single'} auth(s) for userId: ${userId}${pluginKey ? ` and pluginKey: ${pluginKey}` : ''}`,
} err,
} );
try {
return await PluginAuth.deleteOne({ userId, authField });
} catch (err) {
logger.error('[deleteUserPluginAuth]', err);
return err; return err;
} }
}; };

View file

@ -1,6 +1,6 @@
const axios = require('axios'); const axios = require('axios');
const { logAxiosError } = require('@librechat/api');
const { EModelEndpoint } = require('librechat-data-provider'); const { EModelEndpoint } = require('librechat-data-provider');
const { logAxiosError } = require('~/utils');
/** /**
* @typedef {Object} RetrieveOptions * @typedef {Object} RetrieveOptions

View file

@ -1,172 +0,0 @@
const axios = require('axios');
const { handleOAuthToken } = require('~/models/Token');
const { decryptV2 } = require('~/server/utils/crypto');
const { logAxiosError } = require('~/utils');
const { logger } = require('~/config');
/**
* Processes the access tokens and stores them in the database.
* @param {object} tokenData
* @param {string} tokenData.access_token
* @param {number} tokenData.expires_in
* @param {string} [tokenData.refresh_token]
* @param {number} [tokenData.refresh_token_expires_in]
* @param {object} metadata
* @param {string} metadata.userId
* @param {string} metadata.identifier
* @returns {Promise<void>}
*/
async function processAccessTokens(tokenData, { userId, identifier }) {
const { access_token, expires_in = 3600, refresh_token, refresh_token_expires_in } = tokenData;
if (!access_token) {
logger.error('Access token not found: ', tokenData);
throw new Error('Access token not found');
}
await handleOAuthToken({
identifier,
token: access_token,
expiresIn: expires_in,
userId,
});
if (refresh_token != null) {
logger.debug('Processing refresh token');
await handleOAuthToken({
token: refresh_token,
type: 'oauth_refresh',
userId,
identifier: `${identifier}:refresh`,
expiresIn: refresh_token_expires_in ?? null,
});
}
logger.debug('Access tokens processed');
}
/**
* Refreshes the access token using the refresh token.
* @param {object} fields
* @param {string} fields.userId - The ID of the user.
* @param {string} fields.client_url - The URL of the OAuth provider.
* @param {string} fields.identifier - The identifier for the token.
* @param {string} fields.refresh_token - The refresh token to use.
* @param {string} fields.encrypted_oauth_client_id - The client ID for the OAuth provider.
* @param {string} fields.encrypted_oauth_client_secret - The client secret for the OAuth provider.
* @returns {Promise<{
* access_token: string,
* expires_in: number,
* refresh_token?: string,
* refresh_token_expires_in?: number,
* }>}
*/
const refreshAccessToken = async ({
userId,
client_url,
identifier,
refresh_token,
encrypted_oauth_client_id,
encrypted_oauth_client_secret,
}) => {
try {
const oauth_client_id = await decryptV2(encrypted_oauth_client_id);
const oauth_client_secret = await decryptV2(encrypted_oauth_client_secret);
const params = new URLSearchParams({
client_id: oauth_client_id,
client_secret: oauth_client_secret,
grant_type: 'refresh_token',
refresh_token,
});
const response = await axios({
method: 'POST',
url: client_url,
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
Accept: 'application/json',
},
data: params.toString(),
});
await processAccessTokens(response.data, {
userId,
identifier,
});
logger.debug(`Access token refreshed successfully for ${identifier}`);
return response.data;
} catch (error) {
const message = 'Error refreshing OAuth tokens';
throw new Error(
logAxiosError({
message,
error,
}),
);
}
};
/**
* Handles the OAuth callback and exchanges the authorization code for tokens.
* @param {object} fields
* @param {string} fields.code - The authorization code returned by the provider.
* @param {string} fields.userId - The ID of the user.
* @param {string} fields.identifier - The identifier for the token.
* @param {string} fields.client_url - The URL of the OAuth provider.
* @param {string} fields.redirect_uri - The redirect URI for the OAuth provider.
* @param {string} fields.encrypted_oauth_client_id - The client ID for the OAuth provider.
* @param {string} fields.encrypted_oauth_client_secret - The client secret for the OAuth provider.
* @returns {Promise<{
* access_token: string,
* expires_in: number,
* refresh_token?: string,
* refresh_token_expires_in?: number,
* }>}
*/
const getAccessToken = async ({
code,
userId,
identifier,
client_url,
redirect_uri,
encrypted_oauth_client_id,
encrypted_oauth_client_secret,
}) => {
const oauth_client_id = await decryptV2(encrypted_oauth_client_id);
const oauth_client_secret = await decryptV2(encrypted_oauth_client_secret);
const params = new URLSearchParams({
code,
client_id: oauth_client_id,
client_secret: oauth_client_secret,
grant_type: 'authorization_code',
redirect_uri,
});
try {
const response = await axios({
method: 'POST',
url: client_url,
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
Accept: 'application/json',
},
data: params.toString(),
});
await processAccessTokens(response.data, {
userId,
identifier,
});
logger.debug(`Access tokens successfully created for ${identifier}`);
return response.data;
} catch (error) {
const message = 'Error exchanging OAuth code';
throw new Error(
logAxiosError({
message,
error,
}),
);
}
};
module.exports = {
getAccessToken,
refreshAccessToken,
};

Some files were not shown because too many files have changed in this diff Show more