mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-07 19:18:52 +01:00
Merge branch 'main' into feature/entra-id-azure-integration
This commit is contained in:
commit
631f4b3703
151 changed files with 3677 additions and 1242 deletions
|
|
@ -3,6 +3,7 @@ const fetch = require('node-fetch');
|
|||
const { logger } = require('@librechat/data-schemas');
|
||||
const {
|
||||
getBalanceConfig,
|
||||
extractFileContext,
|
||||
encodeAndFormatAudios,
|
||||
encodeAndFormatVideos,
|
||||
encodeAndFormatDocuments,
|
||||
|
|
@ -10,6 +11,7 @@ const {
|
|||
const {
|
||||
Constants,
|
||||
ErrorTypes,
|
||||
FileSources,
|
||||
ContentTypes,
|
||||
excludedKeys,
|
||||
EModelEndpoint,
|
||||
|
|
@ -21,6 +23,7 @@ const { getMessages, saveMessage, updateMessage, saveConvo, getConvo } = require
|
|||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const { checkBalance } = require('~/models/balanceMethods');
|
||||
const { truncateToolCallOutputs } = require('./prompts');
|
||||
const countTokens = require('~/server/utils/countTokens');
|
||||
const { getFiles } = require('~/models/File');
|
||||
const TextStream = require('./TextStream');
|
||||
|
||||
|
|
@ -1245,27 +1248,62 @@ class BaseClient {
|
|||
return audioResult.files;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts text context from attachments and sets it on the message.
|
||||
* This handles text that was already extracted from files (OCR, transcriptions, document text, etc.)
|
||||
* @param {TMessage} message - The message to add context to
|
||||
* @param {MongoFile[]} attachments - Array of file attachments
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async addFileContextToMessage(message, attachments) {
|
||||
const fileContext = await extractFileContext({
|
||||
attachments,
|
||||
req: this.options?.req,
|
||||
tokenCountFn: (text) => countTokens(text),
|
||||
});
|
||||
|
||||
if (fileContext) {
|
||||
message.fileContext = fileContext;
|
||||
}
|
||||
}
|
||||
|
||||
async processAttachments(message, attachments) {
|
||||
const categorizedAttachments = {
|
||||
images: [],
|
||||
documents: [],
|
||||
videos: [],
|
||||
audios: [],
|
||||
documents: [],
|
||||
};
|
||||
|
||||
const allFiles = [];
|
||||
|
||||
for (const file of attachments) {
|
||||
/** @type {FileSources} */
|
||||
const source = file.source ?? FileSources.local;
|
||||
if (source === FileSources.text) {
|
||||
allFiles.push(file);
|
||||
continue;
|
||||
}
|
||||
if (file.embedded === true || file.metadata?.fileIdentifier != null) {
|
||||
allFiles.push(file);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (file.type.startsWith('image/')) {
|
||||
categorizedAttachments.images.push(file);
|
||||
} else if (file.type === 'application/pdf') {
|
||||
categorizedAttachments.documents.push(file);
|
||||
allFiles.push(file);
|
||||
} else if (file.type.startsWith('video/')) {
|
||||
categorizedAttachments.videos.push(file);
|
||||
allFiles.push(file);
|
||||
} else if (file.type.startsWith('audio/')) {
|
||||
categorizedAttachments.audios.push(file);
|
||||
allFiles.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
const [imageFiles, documentFiles, videoFiles, audioFiles] = await Promise.all([
|
||||
const [imageFiles] = await Promise.all([
|
||||
categorizedAttachments.images.length > 0
|
||||
? this.addImageURLs(message, categorizedAttachments.images)
|
||||
: Promise.resolve([]),
|
||||
|
|
@ -1280,7 +1318,8 @@ class BaseClient {
|
|||
: Promise.resolve([]),
|
||||
]);
|
||||
|
||||
const allFiles = [...imageFiles, ...documentFiles, ...videoFiles, ...audioFiles];
|
||||
allFiles.push(...imageFiles);
|
||||
|
||||
const seenFileIds = new Set();
|
||||
const uniqueFiles = [];
|
||||
|
||||
|
|
@ -1345,6 +1384,7 @@ class BaseClient {
|
|||
{},
|
||||
);
|
||||
|
||||
await this.addFileContextToMessage(message, files);
|
||||
await this.processAttachments(message, files);
|
||||
|
||||
this.message_file_map[message.messageId] = files;
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ const { EModelEndpoint, ArtifactModes } = require('librechat-data-provider');
|
|||
const { generateShadcnPrompt } = require('~/app/clients/prompts/shadcn-docs/generate');
|
||||
const { components } = require('~/app/clients/prompts/shadcn-docs/components');
|
||||
|
||||
/** @deprecated */
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const artifactsPromptV1 = dedent`The assistant can create and reference artifacts during conversations.
|
||||
|
||||
|
|
@ -115,6 +116,7 @@ Here are some examples of correct usage of artifacts:
|
|||
</assistant_response>
|
||||
</example>
|
||||
</examples>`;
|
||||
|
||||
const artifactsPrompt = dedent`The assistant can create and reference artifacts during conversations.
|
||||
|
||||
Artifacts are for substantial, self-contained content that users might modify or reuse, displayed in a separate UI window for clarity.
|
||||
|
|
@ -165,6 +167,10 @@ Artifacts are for substantial, self-contained content that users might modify or
|
|||
- SVG: "image/svg+xml"
|
||||
- The user interface will render the Scalable Vector Graphics (SVG) image within the artifact tags.
|
||||
- The assistant should specify the viewbox of the SVG rather than defining a width/height
|
||||
- Markdown: "text/markdown" or "text/md"
|
||||
- The user interface will render Markdown content placed within the artifact tags.
|
||||
- Supports standard Markdown syntax including headers, lists, links, images, code blocks, tables, and more.
|
||||
- Both "text/markdown" and "text/md" are accepted as valid MIME types for Markdown content.
|
||||
- Mermaid Diagrams: "application/vnd.mermaid"
|
||||
- The user interface will render Mermaid diagrams placed within the artifact tags.
|
||||
- React Components: "application/vnd.react"
|
||||
|
|
@ -366,6 +372,10 @@ Artifacts are for substantial, self-contained content that users might modify or
|
|||
- SVG: "image/svg+xml"
|
||||
- The user interface will render the Scalable Vector Graphics (SVG) image within the artifact tags.
|
||||
- The assistant should specify the viewbox of the SVG rather than defining a width/height
|
||||
- Markdown: "text/markdown" or "text/md"
|
||||
- The user interface will render Markdown content placed within the artifact tags.
|
||||
- Supports standard Markdown syntax including headers, lists, links, images, code blocks, tables, and more.
|
||||
- Both "text/markdown" and "text/md" are accepted as valid MIME types for Markdown content.
|
||||
- Mermaid Diagrams: "application/vnd.mermaid"
|
||||
- The user interface will render Mermaid diagrams placed within the artifact tags.
|
||||
- React Components: "application/vnd.react"
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ const tokenValues = Object.assign(
|
|||
'gemini-2.0': { prompt: 0, completion: 0 }, // https://ai.google.dev/pricing
|
||||
'gemini-2.5-pro': { prompt: 1.25, completion: 10 },
|
||||
'gemini-2.5-flash': { prompt: 0.3, completion: 2.5 },
|
||||
'gemini-2.5-flash-lite': { prompt: 0.075, completion: 0.4 },
|
||||
'gemini-2.5-flash-lite': { prompt: 0.1, completion: 0.4 },
|
||||
'gemini-2.5': { prompt: 0, completion: 0 }, // Free for a period of time
|
||||
'gemini-1.5-flash-8b': { prompt: 0.075, completion: 0.3 },
|
||||
'gemini-1.5-flash': { prompt: 0.15, completion: 0.6 },
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@
|
|||
"multer": "^2.0.2",
|
||||
"nanoid": "^3.3.7",
|
||||
"node-fetch": "^2.7.0",
|
||||
"nodemailer": "^6.9.15",
|
||||
"nodemailer": "^7.0.9",
|
||||
"ollama": "^0.5.0",
|
||||
"openai": "^5.10.1",
|
||||
"openid-client": "^6.5.0",
|
||||
|
|
|
|||
|
|
@ -327,16 +327,23 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
|
|||
const revocationEndpointAuthMethodsSupported =
|
||||
serverConfig.oauth?.revocation_endpoint_auth_methods_supported ??
|
||||
clientMetadata.revocation_endpoint_auth_methods_supported;
|
||||
const oauthHeaders = serverConfig.oauth_headers ?? {};
|
||||
|
||||
if (tokens?.access_token) {
|
||||
try {
|
||||
await MCPOAuthHandler.revokeOAuthToken(serverName, tokens.access_token, 'access', {
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
});
|
||||
await MCPOAuthHandler.revokeOAuthToken(
|
||||
serverName,
|
||||
tokens.access_token,
|
||||
'access',
|
||||
{
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
},
|
||||
oauthHeaders,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(`Error revoking OAuth access token for ${serverName}:`, error);
|
||||
}
|
||||
|
|
@ -344,13 +351,19 @@ const maybeUninstallOAuthMCP = async (userId, pluginKey, appConfig) => {
|
|||
|
||||
if (tokens?.refresh_token) {
|
||||
try {
|
||||
await MCPOAuthHandler.revokeOAuthToken(serverName, tokens.refresh_token, 'refresh', {
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
});
|
||||
await MCPOAuthHandler.revokeOAuthToken(
|
||||
serverName,
|
||||
tokens.refresh_token,
|
||||
'refresh',
|
||||
{
|
||||
serverUrl: serverConfig.url,
|
||||
clientId: clientInfo.client_id,
|
||||
clientSecret: clientInfo.client_secret ?? '',
|
||||
revocationEndpoint,
|
||||
revocationEndpointAuthMethodsSupported,
|
||||
},
|
||||
oauthHeaders,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.error(`Error revoking OAuth refresh token for ${serverName}:`, error);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -211,16 +211,13 @@ class AgentClient extends BaseClient {
|
|||
* @returns {Promise<Array<Partial<MongoFile>>>}
|
||||
*/
|
||||
async addImageURLs(message, attachments) {
|
||||
const { files, text, image_urls } = await encodeAndFormat(
|
||||
const { files, image_urls } = await encodeAndFormat(
|
||||
this.options.req,
|
||||
attachments,
|
||||
this.options.agent.provider,
|
||||
VisionModes.agents,
|
||||
);
|
||||
message.image_urls = image_urls.length ? image_urls : undefined;
|
||||
if (text && text.length) {
|
||||
message.ocr = text;
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
|
|
@ -248,19 +245,18 @@ class AgentClient extends BaseClient {
|
|||
|
||||
if (this.options.attachments) {
|
||||
const attachments = await this.options.attachments;
|
||||
const latestMessage = orderedMessages[orderedMessages.length - 1];
|
||||
|
||||
if (this.message_file_map) {
|
||||
this.message_file_map[orderedMessages[orderedMessages.length - 1].messageId] = attachments;
|
||||
this.message_file_map[latestMessage.messageId] = attachments;
|
||||
} else {
|
||||
this.message_file_map = {
|
||||
[orderedMessages[orderedMessages.length - 1].messageId]: attachments,
|
||||
[latestMessage.messageId]: attachments,
|
||||
};
|
||||
}
|
||||
|
||||
const files = await this.processAttachments(
|
||||
orderedMessages[orderedMessages.length - 1],
|
||||
attachments,
|
||||
);
|
||||
await this.addFileContextToMessage(latestMessage, attachments);
|
||||
const files = await this.processAttachments(latestMessage, attachments);
|
||||
|
||||
this.options.attachments = files;
|
||||
}
|
||||
|
|
@ -280,21 +276,21 @@ class AgentClient extends BaseClient {
|
|||
assistantName: this.options?.modelLabel,
|
||||
});
|
||||
|
||||
if (message.ocr && i !== orderedMessages.length - 1) {
|
||||
if (message.fileContext && i !== orderedMessages.length - 1) {
|
||||
if (typeof formattedMessage.content === 'string') {
|
||||
formattedMessage.content = message.ocr + '\n' + formattedMessage.content;
|
||||
formattedMessage.content = message.fileContext + '\n' + formattedMessage.content;
|
||||
} else {
|
||||
const textPart = formattedMessage.content.find((part) => part.type === 'text');
|
||||
textPart
|
||||
? (textPart.text = message.ocr + '\n' + textPart.text)
|
||||
: formattedMessage.content.unshift({ type: 'text', text: message.ocr });
|
||||
? (textPart.text = message.fileContext + '\n' + textPart.text)
|
||||
: formattedMessage.content.unshift({ type: 'text', text: message.fileContext });
|
||||
}
|
||||
} else if (message.ocr && i === orderedMessages.length - 1) {
|
||||
systemContent = [systemContent, message.ocr].join('\n');
|
||||
} else if (message.fileContext && i === orderedMessages.length - 1) {
|
||||
systemContent = [systemContent, message.fileContext].join('\n');
|
||||
}
|
||||
|
||||
const needsTokenCount =
|
||||
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.ocr;
|
||||
(this.contextStrategy && !orderedMessages[i].tokenCount) || message.fileContext;
|
||||
|
||||
/* If tokens were never counted, or, is a Vision request and the message has files, count again */
|
||||
if (needsTokenCount || (this.isVisionModel && (message.image_urls || message.files))) {
|
||||
|
|
|
|||
|
|
@ -127,8 +127,13 @@ describe('MCP Routes', () => {
|
|||
}),
|
||||
};
|
||||
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
|
||||
getLogStores.mockReturnValue({});
|
||||
require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager);
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
MCPOAuthHandler.initiateOAuthFlow.mockResolvedValue({
|
||||
authorizationUrl: 'https://oauth.example.com/auth',
|
||||
|
|
@ -146,6 +151,7 @@ describe('MCP Routes', () => {
|
|||
'test-server',
|
||||
'https://test-server.com',
|
||||
'test-user-id',
|
||||
{},
|
||||
{ clientId: 'test-client-id' },
|
||||
);
|
||||
});
|
||||
|
|
@ -314,6 +320,7 @@ describe('MCP Routes', () => {
|
|||
};
|
||||
const mockMcpManager = {
|
||||
getUserConnection: jest.fn().mockResolvedValue(mockUserConnection),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
|
|
@ -336,6 +343,7 @@ describe('MCP Routes', () => {
|
|||
'test-flow-id',
|
||||
'test-auth-code',
|
||||
mockFlowManager,
|
||||
{},
|
||||
);
|
||||
expect(MCPTokenStorage.storeTokens).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
|
|
@ -392,6 +400,11 @@ describe('MCP Routes', () => {
|
|||
getLogStores.mockReturnValue({});
|
||||
require('~/config').getFlowStateManager.mockReturnValue(mockFlowManager);
|
||||
|
||||
const mockMcpManager = {
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({
|
||||
code: 'test-auth-code',
|
||||
state: 'test-flow-id',
|
||||
|
|
@ -427,6 +440,7 @@ describe('MCP Routes', () => {
|
|||
|
||||
const mockMcpManager = {
|
||||
getUserConnection: jest.fn().mockRejectedValue(new Error('Reconnection failed')),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
|
|
@ -1234,6 +1248,7 @@ describe('MCP Routes', () => {
|
|||
getUserConnection: jest.fn().mockResolvedValue({
|
||||
fetchTools: jest.fn().mockResolvedValue([]),
|
||||
}),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
|
|
@ -1281,6 +1296,7 @@ describe('MCP Routes', () => {
|
|||
.fn()
|
||||
.mockResolvedValue([{ name: 'test-tool', description: 'Test tool' }]),
|
||||
}),
|
||||
getRawConfig: jest.fn().mockReturnValue({}),
|
||||
};
|
||||
require('~/config').getMCPManager.mockReturnValue(mockMcpManager);
|
||||
|
||||
|
|
|
|||
|
|
@ -115,6 +115,9 @@ router.get('/', async function (req, res) {
|
|||
sharePointPickerGraphScope: process.env.SHAREPOINT_PICKER_GRAPH_SCOPE,
|
||||
sharePointPickerSharePointScope: process.env.SHAREPOINT_PICKER_SHAREPOINT_SCOPE,
|
||||
openidReuseTokens,
|
||||
conversationImportMaxFileSize: process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES
|
||||
? parseInt(process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES, 10)
|
||||
: 0,
|
||||
};
|
||||
|
||||
const minPasswordLength = parseInt(process.env.MIN_PASSWORD_LENGTH, 10);
|
||||
|
|
|
|||
|
|
@ -65,6 +65,7 @@ router.get('/:serverName/oauth/initiate', requireJwtAuth, async (req, res) => {
|
|||
serverName,
|
||||
serverUrl,
|
||||
userId,
|
||||
getOAuthHeaders(serverName),
|
||||
oauthConfig,
|
||||
);
|
||||
|
||||
|
|
@ -132,7 +133,12 @@ router.get('/:serverName/oauth/callback', async (req, res) => {
|
|||
});
|
||||
|
||||
logger.debug('[MCP OAuth] Completing OAuth flow');
|
||||
const tokens = await MCPOAuthHandler.completeOAuthFlow(flowId, code, flowManager);
|
||||
const tokens = await MCPOAuthHandler.completeOAuthFlow(
|
||||
flowId,
|
||||
code,
|
||||
flowManager,
|
||||
getOAuthHeaders(serverName),
|
||||
);
|
||||
logger.info('[MCP OAuth] OAuth flow completed, tokens received in callback route');
|
||||
|
||||
/** Persist tokens immediately so reconnection uses fresh credentials */
|
||||
|
|
@ -538,4 +544,10 @@ router.get('/:serverName/auth-values', requireJwtAuth, async (req, res) => {
|
|||
}
|
||||
});
|
||||
|
||||
function getOAuthHeaders(serverName) {
|
||||
const mcpManager = getMCPManager();
|
||||
const serverConfig = mcpManager.getRawConfig(serverName);
|
||||
return serverConfig?.oauth_headers ?? {};
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
|
|
|
|||
|
|
@ -99,7 +99,8 @@ router.get('/link/:conversationId', requireJwtAuth, async (req, res) => {
|
|||
|
||||
router.post('/:conversationId', requireJwtAuth, async (req, res) => {
|
||||
try {
|
||||
const created = await createSharedLink(req.user.id, req.params.conversationId);
|
||||
const { targetMessageId } = req.body;
|
||||
const created = await createSharedLink(req.user.id, req.params.conversationId, targetMessageId);
|
||||
if (created) {
|
||||
res.status(200).json(created);
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -85,7 +85,9 @@ async function loadConfigModels(req) {
|
|||
}
|
||||
|
||||
if (Array.isArray(models.default)) {
|
||||
modelsConfig[name] = models.default;
|
||||
modelsConfig[name] = models.default.map((model) =>
|
||||
typeof model === 'string' ? model : model.name,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -254,8 +254,8 @@ describe('loadConfigModels', () => {
|
|||
// For groq and ollama, since the apiKey is "user_provided", models should not be fetched
|
||||
// Depending on your implementation's behavior regarding "default" models without fetching,
|
||||
// you may need to adjust the following assertions:
|
||||
expect(result.groq).toBe(exampleConfig.endpoints.custom[2].models.default);
|
||||
expect(result.ollama).toBe(exampleConfig.endpoints.custom[3].models.default);
|
||||
expect(result.groq).toEqual(exampleConfig.endpoints.custom[2].models.default);
|
||||
expect(result.ollama).toEqual(exampleConfig.endpoints.custom[3].models.default);
|
||||
|
||||
// Verifying fetchModels was not called for groq and ollama
|
||||
expect(fetchModels).not.toHaveBeenCalledWith(
|
||||
|
|
|
|||
|
|
@ -1,16 +1,14 @@
|
|||
const axios = require('axios');
|
||||
const { logAxiosError } = require('@librechat/api');
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
const { logAxiosError, processTextWithTokenLimit } = require('@librechat/api');
|
||||
const {
|
||||
FileSources,
|
||||
VisionModes,
|
||||
ImageDetail,
|
||||
ContentTypes,
|
||||
EModelEndpoint,
|
||||
mergeFileConfig,
|
||||
} = require('librechat-data-provider');
|
||||
const { getStrategyFunctions } = require('~/server/services/Files/strategies');
|
||||
const countTokens = require('~/server/utils/countTokens');
|
||||
|
||||
/**
|
||||
* Converts a readable stream to a base64 encoded string.
|
||||
|
|
@ -88,15 +86,14 @@ const blobStorageSources = new Set([FileSources.azure_blob, FileSources.s3]);
|
|||
* @param {Array<MongoFile>} files - The array of files to encode and format.
|
||||
* @param {EModelEndpoint} [endpoint] - Optional: The endpoint for the image.
|
||||
* @param {string} [mode] - Optional: The endpoint mode for the image.
|
||||
* @returns {Promise<{ text: string; files: MongoFile[]; image_urls: MessageContentImageUrl[] }>} - A promise that resolves to the result object containing the encoded images and file details.
|
||||
* @returns {Promise<{ files: MongoFile[]; image_urls: MessageContentImageUrl[] }>} - A promise that resolves to the result object containing the encoded images and file details.
|
||||
*/
|
||||
async function encodeAndFormat(req, files, endpoint, mode) {
|
||||
const promises = [];
|
||||
/** @type {Record<FileSources, Pick<ReturnType<typeof getStrategyFunctions>, 'prepareImagePayload' | 'getDownloadStream'>>} */
|
||||
const encodingMethods = {};
|
||||
/** @type {{ text: string; files: MongoFile[]; image_urls: MessageContentImageUrl[] }} */
|
||||
/** @type {{ files: MongoFile[]; image_urls: MessageContentImageUrl[] }} */
|
||||
const result = {
|
||||
text: '',
|
||||
files: [],
|
||||
image_urls: [],
|
||||
};
|
||||
|
|
@ -105,29 +102,9 @@ async function encodeAndFormat(req, files, endpoint, mode) {
|
|||
return result;
|
||||
}
|
||||
|
||||
const fileTokenLimit =
|
||||
req.body?.fileTokenLimit ?? mergeFileConfig(req.config?.fileConfig).fileTokenLimit;
|
||||
|
||||
for (let file of files) {
|
||||
/** @type {FileSources} */
|
||||
const source = file.source ?? FileSources.local;
|
||||
if (source === FileSources.text && file.text) {
|
||||
let fileText = file.text;
|
||||
|
||||
const { text: limitedText, wasTruncated } = await processTextWithTokenLimit({
|
||||
text: fileText,
|
||||
tokenLimit: fileTokenLimit,
|
||||
tokenCountFn: (text) => countTokens(text),
|
||||
});
|
||||
|
||||
if (wasTruncated) {
|
||||
logger.debug(
|
||||
`[encodeAndFormat] Text content truncated for file: ${file.filename} due to token limits`,
|
||||
);
|
||||
}
|
||||
|
||||
result.text += `${!result.text ? 'Attached document(s):\n```md' : '\n\n---\n\n'}# "${file.filename}"\n${limitedText}\n`;
|
||||
}
|
||||
|
||||
if (!file.height) {
|
||||
promises.push([file, null]);
|
||||
|
|
@ -165,10 +142,6 @@ async function encodeAndFormat(req, files, endpoint, mode) {
|
|||
promises.push(preparePayload(req, file));
|
||||
}
|
||||
|
||||
if (result.text) {
|
||||
result.text += '\n```';
|
||||
}
|
||||
|
||||
const detail = req.body.imageDetail ?? ImageDetail.auto;
|
||||
|
||||
/** @type {Array<[MongoFile, string]>} */
|
||||
|
|
|
|||
|
|
@ -508,7 +508,10 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
|
|||
const { file } = req;
|
||||
const appConfig = req.config;
|
||||
const { agent_id, tool_resource, file_id, temp_file_id = null } = metadata;
|
||||
if (agent_id && !tool_resource) {
|
||||
|
||||
let messageAttachment = !!metadata.message_file;
|
||||
|
||||
if (agent_id && !tool_resource && !messageAttachment) {
|
||||
throw new Error('No tool resource provided for agent file upload');
|
||||
}
|
||||
|
||||
|
|
@ -516,7 +519,6 @@ const processAgentFileUpload = async ({ req, res, metadata }) => {
|
|||
throw new Error('Image uploads are not supported for file search tool resources');
|
||||
}
|
||||
|
||||
let messageAttachment = !!metadata.message_file;
|
||||
if (!messageAttachment && !agent_id) {
|
||||
throw new Error('No agent ID provided for agent file upload');
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,6 +10,15 @@ const importConversations = async (job) => {
|
|||
const { filepath, requestUserId } = job;
|
||||
try {
|
||||
logger.debug(`user: ${requestUserId} | Importing conversation(s) from file...`);
|
||||
|
||||
/* error if file is too large */
|
||||
const fileInfo = await fs.stat(filepath);
|
||||
if (fileInfo.size > process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES) {
|
||||
throw new Error(
|
||||
`File size is ${fileInfo.size} bytes. It exceeds the maximum limit of ${process.env.CONVERSATION_IMPORT_MAX_FILE_SIZE_BYTES} bytes.`,
|
||||
);
|
||||
}
|
||||
|
||||
const fileData = await fs.readFile(filepath, 'utf8');
|
||||
const jsonData = JSON.parse(fileData);
|
||||
const importer = getImporter(jsonData);
|
||||
|
|
@ -17,6 +26,7 @@ const importConversations = async (job) => {
|
|||
logger.debug(`user: ${requestUserId} | Finished importing conversations`);
|
||||
} catch (error) {
|
||||
logger.error(`user: ${requestUserId} | Failed to import conversation: `, error);
|
||||
throw error; // throw error all the way up so request does not return success
|
||||
} finally {
|
||||
try {
|
||||
await fs.unlink(filepath);
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
const undici = require('undici');
|
||||
const { get } = require('lodash');
|
||||
const fetch = require('node-fetch');
|
||||
const passport = require('passport');
|
||||
const client = require('openid-client');
|
||||
|
|
@ -329,6 +330,12 @@ async function setupOpenId() {
|
|||
: 'OPENID_GENERATE_NONCE=false - Standard flow without explicit nonce or metadata',
|
||||
});
|
||||
|
||||
// Set of env variables that specify how to set if a user is an admin
|
||||
// If not set, all users will be treated as regular users
|
||||
const adminRole = process.env.OPENID_ADMIN_ROLE;
|
||||
const adminRoleParameterPath = process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH;
|
||||
const adminRoleTokenKind = process.env.OPENID_ADMIN_ROLE_TOKEN_KIND;
|
||||
|
||||
const openidLogin = new CustomOpenIDStrategy(
|
||||
{
|
||||
config: openidConfig,
|
||||
|
|
@ -386,20 +393,19 @@ async function setupOpenId() {
|
|||
} else if (requiredRoleTokenKind === 'id') {
|
||||
decodedToken = jwtDecode(tokenset.id_token);
|
||||
}
|
||||
const pathParts = requiredRoleParameterPath.split('.');
|
||||
let found = true;
|
||||
let roles = pathParts.reduce((o, key) => {
|
||||
if (o === null || o === undefined || !(key in o)) {
|
||||
found = false;
|
||||
return [];
|
||||
}
|
||||
return o[key];
|
||||
}, decodedToken);
|
||||
|
||||
if (!found) {
|
||||
let roles = get(decodedToken, requiredRoleParameterPath);
|
||||
if (!roles || (!Array.isArray(roles) && typeof roles !== 'string')) {
|
||||
logger.error(
|
||||
`[openidStrategy] Key '${requiredRoleParameterPath}' not found in ${requiredRoleTokenKind} token!`,
|
||||
`[openidStrategy] Key '${requiredRoleParameterPath}' not found or invalid type in ${requiredRoleTokenKind} token!`,
|
||||
);
|
||||
const rolesList =
|
||||
requiredRoles.length === 1
|
||||
? `"${requiredRoles[0]}"`
|
||||
: `one of: ${requiredRoles.map((r) => `"${r}"`).join(', ')}`;
|
||||
return done(null, false, {
|
||||
message: `You must have ${rolesList} role to log in.`,
|
||||
});
|
||||
}
|
||||
|
||||
if (!requiredRoles.some((role) => roles.includes(role))) {
|
||||
|
|
@ -447,6 +453,50 @@ async function setupOpenId() {
|
|||
}
|
||||
}
|
||||
|
||||
if (adminRole && adminRoleParameterPath && adminRoleTokenKind) {
|
||||
let adminRoleObject;
|
||||
switch (adminRoleTokenKind) {
|
||||
case 'access':
|
||||
adminRoleObject = jwtDecode(tokenset.access_token);
|
||||
break;
|
||||
case 'id':
|
||||
adminRoleObject = jwtDecode(tokenset.id_token);
|
||||
break;
|
||||
case 'userinfo':
|
||||
adminRoleObject = userinfo;
|
||||
break;
|
||||
default:
|
||||
logger.error(
|
||||
`[openidStrategy] Invalid admin role token kind: ${adminRoleTokenKind}. Must be one of 'access', 'id', or 'userinfo'.`,
|
||||
);
|
||||
return done(new Error('Invalid admin role token kind'));
|
||||
}
|
||||
|
||||
const adminRoles = get(adminRoleObject, adminRoleParameterPath);
|
||||
|
||||
// Accept 3 types of values for the object extracted from adminRoleParameterPath:
|
||||
// 1. A boolean value indicating if the user is an admin
|
||||
// 2. A string with a single role name
|
||||
// 3. An array of role names
|
||||
|
||||
if (
|
||||
adminRoles &&
|
||||
(adminRoles === true ||
|
||||
adminRoles === adminRole ||
|
||||
(Array.isArray(adminRoles) && adminRoles.includes(adminRole)))
|
||||
) {
|
||||
user.role = 'ADMIN';
|
||||
logger.info(
|
||||
`[openidStrategy] User ${username} is an admin based on role: ${adminRole}`,
|
||||
);
|
||||
} else if (user.role === 'ADMIN') {
|
||||
user.role = 'USER';
|
||||
logger.info(
|
||||
`[openidStrategy] User ${username} demoted from admin - role no longer present in token`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!!userinfo && userinfo.picture && !user.avatar?.includes('manual=true')) {
|
||||
/** @type {string | undefined} */
|
||||
const imageUrl = userinfo.picture;
|
||||
|
|
|
|||
|
|
@ -125,6 +125,9 @@ describe('setupOpenId', () => {
|
|||
process.env.OPENID_REQUIRED_ROLE = 'requiredRole';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'roles';
|
||||
process.env.OPENID_REQUIRED_ROLE_TOKEN_KIND = 'id';
|
||||
process.env.OPENID_ADMIN_ROLE = 'admin';
|
||||
process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH = 'permissions';
|
||||
process.env.OPENID_ADMIN_ROLE_TOKEN_KIND = 'id';
|
||||
delete process.env.OPENID_USERNAME_CLAIM;
|
||||
delete process.env.OPENID_NAME_CLAIM;
|
||||
delete process.env.PROXY;
|
||||
|
|
@ -133,6 +136,7 @@ describe('setupOpenId', () => {
|
|||
// Default jwtDecode mock returns a token that includes the required role.
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
permissions: ['admin'],
|
||||
});
|
||||
|
||||
// By default, assume that no user is found, so createUser will be called
|
||||
|
|
@ -441,4 +445,475 @@ describe('setupOpenId', () => {
|
|||
expect(callOptions.usePKCE).toBe(false);
|
||||
expect(callOptions.params?.code_challenge_method).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should set role to "ADMIN" if OPENID_ADMIN_ROLE is set and user has that role', async () => {
|
||||
// Act
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert – verify that the user role is set to "ADMIN"
|
||||
expect(user.role).toBe('ADMIN');
|
||||
});
|
||||
|
||||
it('should not set user role if OPENID_ADMIN_ROLE is set but the user does not have that role', async () => {
|
||||
// Arrange – simulate a token without the admin permission
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
permissions: ['not-admin'],
|
||||
});
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert – verify that the user role is not defined
|
||||
expect(user.role).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should demote existing admin user when admin role is removed from token', async () => {
|
||||
// Arrange – simulate an existing user who is currently an admin
|
||||
const existingAdminUser = {
|
||||
_id: 'existingAdminId',
|
||||
provider: 'openid',
|
||||
email: tokenset.claims().email,
|
||||
openidId: tokenset.claims().sub,
|
||||
username: 'adminuser',
|
||||
name: 'Admin User',
|
||||
role: 'ADMIN',
|
||||
};
|
||||
|
||||
findUser.mockImplementation(async (query) => {
|
||||
if (query.openidId === tokenset.claims().sub || query.email === tokenset.claims().email) {
|
||||
return existingAdminUser;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
// Token without admin permission
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
permissions: ['not-admin'],
|
||||
});
|
||||
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert – verify that the user was demoted
|
||||
expect(user.role).toBe('USER');
|
||||
expect(updateUser).toHaveBeenCalledWith(
|
||||
existingAdminUser._id,
|
||||
expect.objectContaining({
|
||||
role: 'USER',
|
||||
}),
|
||||
);
|
||||
expect(logger.info).toHaveBeenCalledWith(
|
||||
expect.stringContaining('demoted from admin - role no longer present in token'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should NOT demote admin user when admin role env vars are not configured', async () => {
|
||||
// Arrange – remove admin role env vars
|
||||
delete process.env.OPENID_ADMIN_ROLE;
|
||||
delete process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH;
|
||||
delete process.env.OPENID_ADMIN_ROLE_TOKEN_KIND;
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
// Simulate an existing admin user
|
||||
const existingAdminUser = {
|
||||
_id: 'existingAdminId',
|
||||
provider: 'openid',
|
||||
email: tokenset.claims().email,
|
||||
openidId: tokenset.claims().sub,
|
||||
username: 'adminuser',
|
||||
name: 'Admin User',
|
||||
role: 'ADMIN',
|
||||
};
|
||||
|
||||
findUser.mockImplementation(async (query) => {
|
||||
if (query.openidId === tokenset.claims().sub || query.email === tokenset.claims().email) {
|
||||
return existingAdminUser;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
});
|
||||
|
||||
// Act
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
// Assert – verify that the admin user was NOT demoted
|
||||
expect(user.role).toBe('ADMIN');
|
||||
expect(updateUser).toHaveBeenCalledWith(
|
||||
existingAdminUser._id,
|
||||
expect.objectContaining({
|
||||
role: 'ADMIN',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
describe('lodash get - nested path extraction', () => {
|
||||
it('should extract roles from deeply nested token path', async () => {
|
||||
process.env.OPENID_REQUIRED_ROLE = 'app-user';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'resource_access.my-client.roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
resource_access: {
|
||||
'my-client': {
|
||||
roles: ['app-user', 'viewer'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(user).toBeTruthy();
|
||||
expect(user.email).toBe(tokenset.claims().email);
|
||||
});
|
||||
|
||||
it('should extract roles from three-level nested path', async () => {
|
||||
process.env.OPENID_REQUIRED_ROLE = 'editor';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'data.access.permissions.roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
data: {
|
||||
access: {
|
||||
permissions: {
|
||||
roles: ['editor', 'reader'],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(user).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should log error and reject login when required role path does not exist in token', async () => {
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
process.env.OPENID_REQUIRED_ROLE = 'app-user';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'resource_access.nonexistent.roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
resource_access: {
|
||||
'my-client': {
|
||||
roles: ['app-user'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user, details } = await validate(tokenset);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
"Key 'resource_access.nonexistent.roles' not found or invalid type in id token!",
|
||||
),
|
||||
);
|
||||
expect(user).toBe(false);
|
||||
expect(details.message).toContain('role to log in');
|
||||
});
|
||||
|
||||
it('should handle missing intermediate nested path gracefully', async () => {
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
process.env.OPENID_REQUIRED_ROLE = 'user';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'org.team.roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
org: {
|
||||
other: 'value',
|
||||
},
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Key 'org.team.roles' not found or invalid type in id token!"),
|
||||
);
|
||||
expect(user).toBe(false);
|
||||
});
|
||||
|
||||
it('should extract admin role from nested path in access token', async () => {
|
||||
process.env.OPENID_ADMIN_ROLE = 'admin';
|
||||
process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH = 'realm_access.roles';
|
||||
process.env.OPENID_ADMIN_ROLE_TOKEN_KIND = 'access';
|
||||
|
||||
jwtDecode.mockImplementation((token) => {
|
||||
if (token === 'fake_access_token') {
|
||||
return {
|
||||
realm_access: {
|
||||
roles: ['admin', 'user'],
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
roles: ['requiredRole'],
|
||||
};
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(user.role).toBe('ADMIN');
|
||||
});
|
||||
|
||||
it('should extract admin role from nested path in userinfo', async () => {
|
||||
process.env.OPENID_ADMIN_ROLE = 'admin';
|
||||
process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH = 'organization.permissions';
|
||||
process.env.OPENID_ADMIN_ROLE_TOKEN_KIND = 'userinfo';
|
||||
|
||||
const userinfoWithNestedGroups = {
|
||||
...tokenset.claims(),
|
||||
organization: {
|
||||
permissions: ['admin', 'write'],
|
||||
},
|
||||
};
|
||||
|
||||
require('openid-client').fetchUserInfo.mockResolvedValue({
|
||||
organization: {
|
||||
permissions: ['admin', 'write'],
|
||||
},
|
||||
});
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate({
|
||||
...tokenset,
|
||||
claims: () => userinfoWithNestedGroups,
|
||||
});
|
||||
|
||||
expect(user.role).toBe('ADMIN');
|
||||
});
|
||||
|
||||
it('should handle boolean admin role value', async () => {
|
||||
process.env.OPENID_ADMIN_ROLE = 'admin';
|
||||
process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH = 'is_admin';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
is_admin: true,
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(user.role).toBe('ADMIN');
|
||||
});
|
||||
|
||||
it('should handle string admin role value matching exactly', async () => {
|
||||
process.env.OPENID_ADMIN_ROLE = 'super-admin';
|
||||
process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH = 'role';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
role: 'super-admin',
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(user.role).toBe('ADMIN');
|
||||
});
|
||||
|
||||
it('should not set admin role when string value does not match', async () => {
|
||||
process.env.OPENID_ADMIN_ROLE = 'super-admin';
|
||||
process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH = 'role';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
role: 'regular-user',
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(user.role).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle array admin role value', async () => {
|
||||
process.env.OPENID_ADMIN_ROLE = 'site-admin';
|
||||
process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH = 'app_roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
app_roles: ['user', 'site-admin', 'moderator'],
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(user.role).toBe('ADMIN');
|
||||
});
|
||||
|
||||
it('should not set admin when role is not in array', async () => {
|
||||
process.env.OPENID_ADMIN_ROLE = 'site-admin';
|
||||
process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH = 'app_roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole'],
|
||||
app_roles: ['user', 'moderator'],
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(user.role).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle nested path with special characters in keys', async () => {
|
||||
process.env.OPENID_REQUIRED_ROLE = 'app-user';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'resource_access.my-app-123.roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
resource_access: {
|
||||
'my-app-123': {
|
||||
roles: ['app-user'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(user).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should handle empty object at nested path', async () => {
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
process.env.OPENID_REQUIRED_ROLE = 'user';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'access.roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
access: {},
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Key 'access.roles' not found or invalid type in id token!"),
|
||||
);
|
||||
expect(user).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle null value at intermediate path', async () => {
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
process.env.OPENID_REQUIRED_ROLE = 'user';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'data.roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
data: null,
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Key 'data.roles' not found or invalid type in id token!"),
|
||||
);
|
||||
expect(user).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject login with invalid admin role token kind', async () => {
|
||||
process.env.OPENID_ADMIN_ROLE = 'admin';
|
||||
process.env.OPENID_ADMIN_ROLE_PARAMETER_PATH = 'roles';
|
||||
process.env.OPENID_ADMIN_ROLE_TOKEN_KIND = 'invalid';
|
||||
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: ['requiredRole', 'admin'],
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
await expect(validate(tokenset)).rejects.toThrow('Invalid admin role token kind');
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining(
|
||||
"Invalid admin role token kind: invalid. Must be one of 'access', 'id', or 'userinfo'",
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('should reject login when roles path returns invalid type (object)', async () => {
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
process.env.OPENID_REQUIRED_ROLE = 'app-user';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'roles';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roles: { admin: true, user: false },
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user, details } = await validate(tokenset);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Key 'roles' not found or invalid type in id token!"),
|
||||
);
|
||||
expect(user).toBe(false);
|
||||
expect(details.message).toContain('role to log in');
|
||||
});
|
||||
|
||||
it('should reject login when roles path returns invalid type (number)', async () => {
|
||||
const { logger } = require('@librechat/data-schemas');
|
||||
process.env.OPENID_REQUIRED_ROLE = 'user';
|
||||
process.env.OPENID_REQUIRED_ROLE_PARAMETER_PATH = 'roleCount';
|
||||
|
||||
jwtDecode.mockReturnValue({
|
||||
roleCount: 5,
|
||||
});
|
||||
|
||||
await setupOpenId();
|
||||
verifyCallback = require('openid-client/passport').__getVerifyCallback();
|
||||
|
||||
const { user } = await validate(tokenset);
|
||||
|
||||
expect(logger.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Key 'roleCount' not found or invalid type in id token!"),
|
||||
);
|
||||
expect(user).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue