mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-21 02:40:14 +01:00
Merge branch 'main' into feature/entra-id-azure-integration
This commit is contained in:
commit
631f4b3703
151 changed files with 3677 additions and 1242 deletions
|
|
@ -327,16 +327,23 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
|
|||
const revocationEndpointAuthMethodsSupported =
|
||||
serverConfig.oauth?.revocation_endpoint_auth_methods_supported ??
|
||||
clientMetadata.revocation_endpoint_auth_methods_supported;
|
||||
const oauthHeaders = serverConfig.oauth_headers ?? {};
|
||||
|
||||
if (tokens?.access_token) {
|
||||
try {
|
||||
await MCPOAuthHandler.revokeOAuthToken(serverName, tokens.access_token, 'access', {
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
});
|
||||
await MCPOAuthHandler.revokeOAuthToken(
|
||||
serverName,
|
||||
tokens.access_token,
|
||||
'access',
|
||||
{
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
},
|
||||
oauthHeaders,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(`Error revoking OAuth access token for ${serverName}:`, error);
|
||||
}
|
||||
|
|
@ -344,13 +351,19 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
|
|||
|
||||
if (tokens?.refresh_token) {
|
||||
try {
|
||||
await MCPOAuthHandler.revokeOAuthToken(serverName, tokens.refresh_token, 'refresh', {
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
});
|
||||
await MCPOAuthHandler.revokeOAuthToken(
|
||||
serverName,
|
||||
tokens.refresh_token,
|
||||
'refresh',
|
||||
{
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
},
|
||||
oauthHeaders,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(`Error revoking OAuth refresh token for ${serverName}:`, error);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -211,16 +211,13 @@ class AgentClient extends BaseClient {
|
|||
* @returns {Promise<Array<Partial<MongoFile>>>}
|
||||
*/
|
||||
async addImageURLs(message, attachments) {
|
||||
const { files, text, image_urls } = await encodeAndFormat(
|
||||
const { files, image_urls } = await encodeAndFormat(
|
||||
this.options.req,
|
||||
attachments,
|
||||
this.options.agent.provider,
|
||||
VisionModes.agents,
|
||||
);
|
||||
message.image_urls = image_urls.length ? image_urls : undefined;
|
||||
if (text && text.length) {
|
||||
message.ocr = text;
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
|
|
@ -248,19 +245,18 @@ class AgentClient extends BaseClient {
|
|||
|
||||
if (this.options.attachments) {
|
||||
const attachments = await this.options.attachments;
|
||||
const latestMessage = orderedMessages[orderedMessages.length - 1];
|
||||
|
||||
if (this.message_file_map) {
|
||||
this.message_file_map[orderedMessages[orderedMessages.length - 1].messageId] = attachments;
|
||||
this.message_file_map[latestMessage.messageId] = attachments;
|
||||
} else {
|
||||
this.message_file_map = {
|
||||
[orderedMessages[orderedMessages.length - 1].messageId]: attachments,
|
||||
[latestMessage.messageId]: attachments,
|
||||
};
|
||||
}
|
||||
|
||||
const files = await this.processAttachments(
|
||||
orderedMessages[orderedMessages.length - 1],
|
||||
attachments,
|
||||
);
|
||||
await this.addFileContextToMessage(latestMessage, attachments);
|
||||
const files = await this.processAttachments(latestMessage, attachments);
|
||||
|
||||
this.options.attachments = files;
|
||||
}
|
||||
|
|
@ -280,21 +276,21 @@ class AgentClient extends BaseClient {
|
|||
assistantName: this.options?.modelLabel,
|
||||
});
|
||||
|
||||
if (message.ocr && i !== orderedMessages.length - 1) {
|
||||
if (message.fileContext && i !== orderedMessages.length - 1) {
|
||||
if (typeof formattedMessage.content === 'string') {
|
||||
formattedMessage.content = message.ocr + '\n' + formattedMessage.content;
|
||||
formattedMessage.content = message.fileContext + '\n' + formattedMessage.content;
|
||||
} else {
|
||||
const textPart = formattedMessage.content.find((part) => part.type === 'text');
|
||||
textPart
|
||||
? (textPart.text = message.ocr + '\n' + textPart.text)
|
||||
: formattedMessage.content.unshift({ type: 'text', text: message.ocr });
|
||||
? (textPart.text = message.fileContext + '\n' + textPart.text)
|
||||
: formattedMessage.content.unshift({ type: 'text', text: message.fileContext });
|
||||
}
|
||||
} else if (message.ocr && i === orderedMessages.length - 1) {
|
||||
systemContent = [systemContent, message.ocr].join('\n');
|
||||
} else if (message.fileContext && i === orderedMessages.length - 1) {
|
||||
systemContent = [systemContent, message.fileContext].join('\n');
|
||||
}
|
||||
|
||||
const needsTokenCount =
|
||||
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.ocr;
|
||||
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.fileContext;
|
||||
|
||||
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
||||
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
|
||||
|
|
|
|||
|
|
@ -127,8 +127,13 @@ describe('MCP Routes', () => {
|
|||
}),
|
||||
};
|
||||
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
|
||||
getLogStores.mockReturnValue({});
|
||||
require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager);
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
MCPOAuthHandler.initiateOAuthFlow.mockResolvedValue({
|
||||
authorizationUrl: 'https://oauth.example.com/auth',
|
||||
|
|
@ -146,6 +151,7 @@ describe('MCP Routes', () => {
|
|||
'test-server',
|
||||
'https://test-server.com',
|
||||
'test-user-id',
|
||||
{},
|
||||
{ clientId: 'test-client-id' },
|
||||
);
|
||||
});
|
||||
|
|
@ -314,6 +320,7 @@ describe('MCP Routes', () => {
|
|||
};
|
||||
const mockMcpManager = {
|
||||
getUserConnection: jest.fn().mockResolvedValue(mockUserConnection),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
|
|
@ -336,6 +343,7 @@ describe('MCP Routes', () => {
|
|||
'test-flow-id',
|
||||
'test-auth-code',
|
||||
mockFlowManager,
|
||||
{},
|
||||
);
|
||||
expect(MCPTokenStorage.storeTokens).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
|
|
@ -392,6 +400,11 @@ describe('MCP Routes', () => {
|
|||
getLogStores.mockReturnValue({});
|
||||
require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager);
|
||||
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({
|
||||
code: 'test-auth-code',
|
||||
state: 'test-flow-id',
|
||||
|
|
@ -427,6 +440,7 @@ describe('MCP Routes', () => {
|
|||
|
||||
const mockMcpManager = {
|
||||
getUserConnection: jest.fn().mockRejectedValue(new Error('Reconnection failed')),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
|
|
@ -1234,6 +1248,7 @@ describe('MCP Routes', () => {
|
|||
getUserConnection: jest.fn().mockResolvedValue({
|
||||
fetchTools: jest.fn().mockResolvedValue([]),
|
||||
}),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
|
|
@ -1281,6 +1296,7 @@ describe('MCP Routes', () => {
|
|||
.fn()
|
||||
.mockResolvedValue([{ name: 'test-tool', description: 'Test tool' }]),
|
||||
}),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
|
|
|
|||
|
|
@ -115,6 +115,9 @@ router.get('/', async function (req, res) {
|
|||
sharePointPickerGraphScope: process.env.SHAREPOINT_PICKER_GRAPH_SCOPE,
|
||||
sharePointPickerSharePointScope: process.env.SHAREPOINT_PICKER_SHAREPOINT_SCOPE,
|
||||
openidReuseTokens,
|
||||
conversationImportMaxFileSize: process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES
|
||||
? parseInt(process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES, 10)
|
||||
: 0,
|
||||
};
|
||||
|
||||
const minPasswordLength = parseInt(process.env.MIN_PASSWORD_LENGTH, 10);
|
||||
|
|
|
|||
|
|
@ -65,6 +65,7 @@ router.get('/:serverName/oauth/initiate', requireJwtAuth, async (req, res) => {
|
|||
serverName,
|
||||
serverUrl,
|
||||
userId,
|
||||
getOAuthHeaders(serverName),
|
||||
oauthConfig,
|
||||
);
|
||||
|
||||
|
|
@ -132,7 +133,12 @@ router.get('/:serverName/oauth/callback', async (req, res) => {
|
|||
});
|
||||
|
||||
logger.debug('[MCP OAuth] Completing OAuth flow');
|
||||
const tokens = await MCPOAuthHandler.completeOAuthFlow(flowId, code, flowManager);
|
||||
const tokens = await MCPOAuthHandler.completeOAuthFlow(
|
||||
flowId,
|
||||
code,
|
||||
flowManager,
|
||||
getOAuthHeaders(serverName),
|
||||
);
|
||||
logger.info('[MCP OAuth] OAuth flow completed, tokens received in callback route');
|
||||
|
||||
/** Persist tokens immediately so reconnection uses fresh credentials */
|
||||
|
|
@ -538,4 +544,10 @@ router.get('/:serverName/auth-values', requireJwtAuth, async (req, res) => {
|
|||
}
|
||||
});
|
||||
|
||||
function getOAuthHeaders(serverName) {
|
||||
const mcpManager = getMCPManager();
|
||||
const serverConfig = mcpManager.getRawConfig(serverName);
|
||||
return serverConfig?.oauth_headers ?? {};
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
|
|
|
|||
|
|
@ -99,7 +99,8 @@ router.get('/link/:conversationId', requireJwtAuth, async (req, res) => {
|
|||
|
||||
router.post('/:conversationId', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const created = await createSharedLink(req.user.id, req.params.conversationId);
|
||||
const { targetMessageId } = req.body;
|
||||
const created = await createSharedLink(req.user.id, req.params.conversationId, targetMessageId);
|
||||
if (created) {
|
||||
res.status(200).json(created);
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -85,7 +85,9 @@ async function loadConfigModels(req) {
|
|||
}
|
||||
|
||||
if (Array.isArray(models.default)) {
|
||||
modelsConfig[name] = models.default;
|
||||
modelsConfig[name] = models.default.map((model) =>
|
||||
typeof model === 'string' ? model : model.name,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -254,8 +254,8 @@ describe('loadConfigModels', () => {
|
|||
// For groq and ollama, since the apiKey is "user_provided", models should not be fetched
|
||||
// Depending on your implementation's behavior regarding "default" models without fetching,
|
||||
// you may need to adjust the following assertions:
|
||||
expect(result.groq).toBe(exampleConfig.endpoints.custom[2].models.default);
|
||||
expect(result.ollama).toBe(exampleConfig.endpoints.custom[3].models.default);
|
||||
expect(result.groq).toEqual(exampleConfig.endpoints.custom[2].models.default);
|
||||
expect(result.ollama).toEqual(exampleConfig.endpoints.custom[3].models.default);
|
||||
|
||||
// Verifying fetchModels was not called for groq and ollama
|
||||
expect(fetchModels).not.toHaveBeenCalledWith(
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
const axios = require('axios');
|
||||
const { logAxiosError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { logAxiosError, processTextWithTokenLimit } = require('@librechat/api');
|
||||
const {
|
||||
FileSources,
|
||||
VisionModes,
|
||||
ImageDetail,
|
||||
ContentTypes,
|
||||
EModelEndpoint,
|
||||
mergeFileConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const countTokens = require('~/server/utils/countTokens');
|
||||
|
||||
/**
|
||||
* Converts a readable stream to a base64 encoded string.
|
||||
|
|
@ -88,15 +86,14 @@ const blobStorageSources = new Set([FileSources.azure_blob, FileSources.s3]);
|
|||
* @param {Array<MongoFile>} files - The array of files to encode and format.
|
||||
* @param {EModelEndpoint} [endpoint] - Optional: The endpoint for the image.
|
||||
* @param {string} [mode] - Optional: The endpoint mode for the image.
|
||||
* @returns {Promise<{ text: string; files: MongoFile[]; image_urls: MessageContentImageUrl[] }>} - A promise that resolves to the result object containing the encoded images and file details.
|
||||
* @returns {Promise<{ files: MongoFile[]; image_urls: MessageContentImageUrl[] }>} - A promise that resolves to the result object containing the encoded images and file details.
|
||||
*/
|
||||
async function encodeAndFormat(req, files, endpoint, mode) {
|
||||
const promises = [];
|
||||
/** @type {Record<FileSources, Pick<ReturnType<typeof getStrategyFunctions>, 'prepareImagePayload' | 'getDownloadStream'>>} */
|
||||
const encodingMethods = {};
|
||||
/** @type {{ text: string; files: MongoFile[]; image_urls: MessageContentImageUrl[] }} */
|
||||
/** @type {{ files: MongoFile[]; image_urls: MessageContentImageUrl[] }} */
|
||||
const result = {
|
||||
text: '',
|
||||
files: [],
|
||||
image_urls: [],
|
||||
};
|
||||
|
|
@ -105,29 +102,9 @@ async function encodeAndFormat(req, files, endpoint, mode) {
|
|||
return result;
|
||||
}
|
||||
|
||||
const fileTokenLimit =
|
||||
req.body?.fileTokenLimit ?? mergeFileConfig(req.config?.fileConfig).fileTokenLimit;
|
||||
|
||||
for (let file of files) {
|
||||
/** @type {FileSources} */
|
||||
const source = file.source ?? FileSources.local;
|
||||
if (source === FileSources.text && file.text) {
|
||||
let fileText = file.text;
|
||||
|
||||
const { text: limitedText, wasTruncated } = await processTextWithTokenLimit({
|
||||
text: fileText,
|
||||
tokenLimit: fileTokenLimit,
|
||||
tokenCountFn: (text) => countTokens(text),
|
||||
});
|
||||
|
||||
if (wasTruncated) {
|
||||
logger.debug(
|
||||
`[encodeAndFormat] Text content truncated for file: ${file.filename} due to token limits`,
|
||||
);
|
||||
}
|
||||
|
||||
result.text += `${!result.text ? 'Attached document(s):\n```md' : '\n\n---\n\n'}# "${file.filename}"\n${limitedText}\n`;
|
||||
}
|
||||
|
||||
if (!file.height) {
|
||||
promises.push([file, null]);
|
||||
|
|
@ -165,10 +142,6 @@ async function encodeAndFormat(req, files, endpoint, mode) {
|
|||
promises.push(preparePayload(req, file));
|
||||
}
|
||||
|
||||
if (result.text) {
|
||||
result.text += '\n```';
|
||||
}
|
||||
|
||||
const detail = req.body.imageDetail ?? ImageDetail.auto;
|
||||
|
||||
/** @type {Array<[MongoFile, string]>} */
|
||||
|
|
|
|||
|
|
@ -508,7 +508,10 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
|
|||
const { file } = req;
|
||||
const appConfig = req.config;
|
||||
const { agent_id, tool_resource, file_id, temp_file_id = null } = metadata;
|
||||
if (agent_id && !tool_resource) {
|
||||
|
||||
let messageAttachment = !!metadata.message_file;
|
||||
|
||||
if (agent_id && !tool_resource && !messageAttachment) {
|
||||
throw new Error('No tool resource provided for agent file upload');
|
||||
}
|
||||
|
||||
|
|
@ -516,7 +519,6 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
|
|||
throw new Error('Image uploads are not supported for file search tool resources');
|
||||
}
|
||||
|
||||
let messageAttachment = !!metadata.message_file;
|
||||
if (!messageAttachment && !agent_id) {
|
||||
throw new Error('No agent ID provided for agent file upload');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,6 +10,15 @@ const importConversations = async (job) => {
|
|||
const { filepath, requestUserId } = job;
|
||||
try {
|
||||
logger.debug(`user: ${requestUserId} | Importing conversation(s) from file...`);
|
||||
|
||||
/* error if file is too large */
|
||||
const fileInfo = await fs.stat(filepath);
|
||||
if (fileInfo.size > process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES) {
|
||||
throw new Error(
|
||||
`File size is ${fileInfo.size} bytes. It exceeds the maximum limit of ${process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES} bytes.`,
|
||||
);
|
||||
}
|
||||
|
||||
const fileData = await fs.readFile(filepath, 'utf8');
|
||||
const jsonData = JSON.parse(fileData);
|
||||
const importer = getImporter(jsonData);
|
||||
|
|
@ -17,6 +26,7 @@ const importConversations = async (job) => {
|
|||
logger.debug(`user: ${requestUserId} | Finished importing conversations`);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Failed to import conversation: `, error);
|
||||
throw error; // throw error all the way up so request does not return success
|
||||
} finally {
|
||||
try {
|
||||
await fs.unlink(filepath);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue