mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-04-07 00:15:23 +02:00
Merge branch 'main' into aron/data-retention-upstream
This commit is contained in:
commit
7c0768c2de
63 changed files with 3107 additions and 409 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -137,6 +137,7 @@ helm/**/.values.yaml
|
|||
|
||||
# AI Assistants
|
||||
/.claude/
|
||||
/.codex/
|
||||
/.cursor/
|
||||
/.copilot/
|
||||
/.aider/
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"jest": "^30.2.0",
|
||||
"mongodb-memory-server": "^10.1.4",
|
||||
"mongodb-memory-server": "^11.0.1",
|
||||
"nodemon": "^3.0.3",
|
||||
"supertest": "^7.1.0"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -311,6 +311,87 @@ describe('MCP Routes', () => {
|
|||
expect(response.headers.location).toBe(`${basePath}/oauth/error?error=access_denied`);
|
||||
});
|
||||
|
||||
describe('OAuth error callback failFlow', () => {
|
||||
it('should fail the flow when OAuth error is received with valid CSRF cookie', async () => {
|
||||
const flowId = 'test-user-id:test-server';
|
||||
const mockFlowManager = {
|
||||
failFlow: jest.fn().mockResolvedValue(true),
|
||||
};
|
||||
|
||||
getLogStores.mockReturnValueOnce({});
|
||||
require('~/config').getFlowStateManager.mockReturnValueOnce(mockFlowManager);
|
||||
MCPOAuthHandler.resolveStateToFlowId.mockResolvedValueOnce(flowId);
|
||||
|
||||
const csrfToken = generateTestCsrfToken(flowId);
|
||||
const response = await request(app)
|
||||
.get('/api/mcp/test-server/oauth/callback')
|
||||
.set('Cookie', [`oauth_csrf=${csrfToken}`])
|
||||
.query({
|
||||
error: 'invalid_client',
|
||||
state: flowId,
|
||||
});
|
||||
const basePath = getBasePath();
|
||||
|
||||
expect(response.status).toBe(302);
|
||||
expect(response.headers.location).toBe(`${basePath}/oauth/error?error=invalid_client`);
|
||||
expect(mockFlowManager.failFlow).toHaveBeenCalledWith(
|
||||
flowId,
|
||||
'mcp_oauth',
|
||||
'invalid_client',
|
||||
);
|
||||
});
|
||||
|
||||
it('should fail the flow when OAuth error is received with valid session cookie', async () => {
|
||||
const flowId = 'test-user-id:test-server';
|
||||
const mockFlowManager = {
|
||||
failFlow: jest.fn().mockResolvedValue(true),
|
||||
};
|
||||
|
||||
getLogStores.mockReturnValueOnce({});
|
||||
require('~/config').getFlowStateManager.mockReturnValueOnce(mockFlowManager);
|
||||
MCPOAuthHandler.resolveStateToFlowId.mockResolvedValueOnce(flowId);
|
||||
|
||||
const sessionToken = generateTestCsrfToken('test-user-id');
|
||||
const response = await request(app)
|
||||
.get('/api/mcp/test-server/oauth/callback')
|
||||
.set('Cookie', [`oauth_session=${sessionToken}`])
|
||||
.query({
|
||||
error: 'invalid_client',
|
||||
state: flowId,
|
||||
});
|
||||
const basePath = getBasePath();
|
||||
|
||||
expect(response.status).toBe(302);
|
||||
expect(response.headers.location).toBe(`${basePath}/oauth/error?error=invalid_client`);
|
||||
expect(mockFlowManager.failFlow).toHaveBeenCalledWith(
|
||||
flowId,
|
||||
'mcp_oauth',
|
||||
'invalid_client',
|
||||
);
|
||||
});
|
||||
|
||||
it('should NOT fail the flow when OAuth error is received without cookies (DoS prevention)', async () => {
|
||||
const flowId = 'test-user-id:test-server';
|
||||
const mockFlowManager = {
|
||||
failFlow: jest.fn(),
|
||||
};
|
||||
|
||||
getLogStores.mockReturnValueOnce({});
|
||||
require('~/config').getFlowStateManager.mockReturnValueOnce(mockFlowManager);
|
||||
MCPOAuthHandler.resolveStateToFlowId.mockResolvedValueOnce(flowId);
|
||||
|
||||
const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({
|
||||
error: 'invalid_client',
|
||||
state: flowId,
|
||||
});
|
||||
const basePath = getBasePath();
|
||||
|
||||
expect(response.status).toBe(302);
|
||||
expect(response.headers.location).toBe(`${basePath}/oauth/error?error=invalid_client`);
|
||||
expect(mockFlowManager.failFlow).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should redirect to error page when code is missing', async () => {
|
||||
const response = await request(app).get('/api/mcp/test-server/oauth/callback').query({
|
||||
state: 'test-user-id:test-server',
|
||||
|
|
|
|||
155
api/server/routes/__tests__/roles.spec.js
Normal file
155
api/server/routes/__tests__/roles.spec.js
Normal file
|
|
@ -0,0 +1,155 @@
|
|||
const express = require('express');
|
||||
const request = require('supertest');
|
||||
const { SystemRoles, roleDefaults } = require('librechat-data-provider');
|
||||
|
||||
const mockGetRoleByName = jest.fn();
|
||||
const mockHasCapability = jest.fn();
|
||||
|
||||
jest.mock('~/server/middleware', () => ({
|
||||
requireJwtAuth: (_req, _res, next) => next(),
|
||||
}));
|
||||
|
||||
jest.mock('~/server/middleware/roles/capabilities', () => ({
|
||||
hasCapability: (...args) => mockHasCapability(...args),
|
||||
requireCapability: () => (_req, _res, next) => next(),
|
||||
}));
|
||||
|
||||
jest.mock('~/models', () => ({
|
||||
getRoleByName: (...args) => mockGetRoleByName(...args),
|
||||
updateRoleByName: jest.fn(),
|
||||
}));
|
||||
|
||||
const rolesRouter = require('../roles');
|
||||
|
||||
function createApp(user) {
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, _res, next) => {
|
||||
req.user = user;
|
||||
next();
|
||||
});
|
||||
app.use('/api/roles', rolesRouter);
|
||||
return app;
|
||||
}
|
||||
|
||||
const staffRole = {
|
||||
name: 'STAFF',
|
||||
permissions: {
|
||||
PROMPTS: { USE: true, CREATE: false },
|
||||
},
|
||||
};
|
||||
|
||||
const userRole = roleDefaults[SystemRoles.USER];
|
||||
const adminRole = roleDefaults[SystemRoles.ADMIN];
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockHasCapability.mockResolvedValue(false);
|
||||
mockGetRoleByName.mockResolvedValue(null);
|
||||
});
|
||||
|
||||
describe('GET /api/roles/:roleName — isOwnRole authorization', () => {
|
||||
it('allows a custom role user to fetch their own role', async () => {
|
||||
mockGetRoleByName.mockResolvedValue(staffRole);
|
||||
const app = createApp({ id: 'u1', role: 'STAFF' });
|
||||
|
||||
const res = await request(app).get('/api/roles/STAFF');
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.body.name).toBe('STAFF');
|
||||
expect(mockGetRoleByName).toHaveBeenCalledWith('STAFF', '-_id -__v');
|
||||
});
|
||||
|
||||
it('returns 403 when a custom role user requests a different custom role', async () => {
|
||||
const app = createApp({ id: 'u1', role: 'STAFF' });
|
||||
|
||||
const res = await request(app).get('/api/roles/MANAGER');
|
||||
|
||||
expect(res.status).toBe(403);
|
||||
expect(mockGetRoleByName).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns 403 when a custom role user requests ADMIN', async () => {
|
||||
const app = createApp({ id: 'u1', role: 'STAFF' });
|
||||
|
||||
const res = await request(app).get('/api/roles/ADMIN');
|
||||
|
||||
expect(res.status).toBe(403);
|
||||
expect(mockGetRoleByName).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('allows USER to fetch the USER role (roleDefaults key)', async () => {
|
||||
mockGetRoleByName.mockResolvedValue(userRole);
|
||||
const app = createApp({ id: 'u1', role: SystemRoles.USER });
|
||||
|
||||
const res = await request(app).get(`/api/roles/${SystemRoles.USER}`);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
it('returns 403 when USER requests the ADMIN role', async () => {
|
||||
const app = createApp({ id: 'u1', role: SystemRoles.USER });
|
||||
|
||||
const res = await request(app).get(`/api/roles/${SystemRoles.ADMIN}`);
|
||||
|
||||
expect(res.status).toBe(403);
|
||||
});
|
||||
|
||||
it('allows ADMIN user to fetch their own ADMIN role via isOwnRole', async () => {
|
||||
mockHasCapability.mockResolvedValue(false);
|
||||
mockGetRoleByName.mockResolvedValue(adminRole);
|
||||
const app = createApp({ id: 'u1', role: SystemRoles.ADMIN });
|
||||
|
||||
const res = await request(app).get(`/api/roles/${SystemRoles.ADMIN}`);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
|
||||
it('allows any user with READ_ROLES capability to fetch any role', async () => {
|
||||
mockHasCapability.mockResolvedValue(true);
|
||||
mockGetRoleByName.mockResolvedValue(staffRole);
|
||||
const app = createApp({ id: 'u1', role: SystemRoles.USER });
|
||||
|
||||
const res = await request(app).get('/api/roles/STAFF');
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.body.name).toBe('STAFF');
|
||||
});
|
||||
|
||||
it('returns 404 when the requested role does not exist', async () => {
|
||||
mockGetRoleByName.mockResolvedValue(null);
|
||||
const app = createApp({ id: 'u1', role: 'GHOST' });
|
||||
|
||||
const res = await request(app).get('/api/roles/GHOST');
|
||||
|
||||
expect(res.status).toBe(404);
|
||||
});
|
||||
|
||||
it('returns 500 when getRoleByName throws', async () => {
|
||||
mockGetRoleByName.mockRejectedValue(new Error('db error'));
|
||||
const app = createApp({ id: 'u1', role: SystemRoles.USER });
|
||||
|
||||
const res = await request(app).get(`/api/roles/${SystemRoles.USER}`);
|
||||
|
||||
expect(res.status).toBe(500);
|
||||
});
|
||||
|
||||
it('returns 403 for prototype property names like constructor (no prototype pollution)', async () => {
|
||||
const app = createApp({ id: 'u1', role: 'STAFF' });
|
||||
|
||||
const res = await request(app).get('/api/roles/constructor');
|
||||
|
||||
expect(res.status).toBe(403);
|
||||
expect(mockGetRoleByName).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('treats hasCapability failure as no capability (does not 500)', async () => {
|
||||
mockHasCapability.mockRejectedValue(new Error('capability check failed'));
|
||||
const app = createApp({ id: 'u1', role: 'STAFF' });
|
||||
mockGetRoleByName.mockResolvedValue(staffRole);
|
||||
|
||||
const res = await request(app).get('/api/roles/STAFF');
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
});
|
||||
});
|
||||
|
|
@ -3,7 +3,12 @@ const passport = require('passport');
|
|||
const crypto = require('node:crypto');
|
||||
const { CacheKeys } = require('librechat-data-provider');
|
||||
const { logger, SystemCapabilities } = require('@librechat/data-schemas');
|
||||
const { getAdminPanelUrl, exchangeAdminCode, createSetBalanceConfig } = require('@librechat/api');
|
||||
const {
|
||||
getAdminPanelUrl,
|
||||
exchangeAdminCode,
|
||||
createSetBalanceConfig,
|
||||
storeAndStripChallenge,
|
||||
} = require('@librechat/api');
|
||||
const { loginController } = require('~/server/controllers/auth/LoginController');
|
||||
const { requireCapability } = require('~/server/middleware/roles/capabilities');
|
||||
const { createOAuthHandler } = require('~/server/controllers/auth/oauth');
|
||||
|
|
@ -73,11 +78,6 @@ router.get('/oauth/openid/check', (req, res) => {
|
|||
res.status(200).json({ message: 'OpenID check successful' });
|
||||
});
|
||||
|
||||
/** PKCE challenge cache TTL: 5 minutes (enough for user to authenticate with IdP) */
|
||||
const PKCE_CHALLENGE_TTL = 5 * 60 * 1000;
|
||||
/** Regex pattern for valid PKCE challenges: 64 hex characters (SHA-256 hex digest) */
|
||||
const PKCE_CHALLENGE_PATTERN = /^[a-f0-9]{64}$/;
|
||||
|
||||
/**
|
||||
* Generates a random hex state string for OAuth flows.
|
||||
* @returns {string} A 32-byte random hex string.
|
||||
|
|
@ -86,27 +86,6 @@ function generateState() {
|
|||
return crypto.randomBytes(32).toString('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores a PKCE challenge in cache keyed by state.
|
||||
* @param {string} state - The OAuth state value.
|
||||
* @param {string | undefined} codeChallenge - The PKCE code_challenge from query params.
|
||||
* @param {string} provider - Provider name for logging.
|
||||
* @returns {Promise<boolean>} True if stored successfully or no challenge provided.
|
||||
*/
|
||||
async function storePkceChallenge(state, codeChallenge, provider) {
|
||||
if (typeof codeChallenge !== 'string' || !PKCE_CHALLENGE_PATTERN.test(codeChallenge)) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
const cache = getLogStores(CacheKeys.ADMIN_OAUTH_EXCHANGE);
|
||||
await cache.set(`pkce:${state}`, codeChallenge, PKCE_CHALLENGE_TTL);
|
||||
return true;
|
||||
} catch (err) {
|
||||
logger.error(`[admin/oauth/${provider}] Failed to store PKCE challenge:`, err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware to retrieve PKCE challenge from cache using the OAuth state.
|
||||
* Reads state from req.oauthState (set by a preceding middleware).
|
||||
|
|
@ -148,7 +127,8 @@ function retrievePkceChallenge(provider) {
|
|||
|
||||
router.get('/oauth/openid', async (req, res, next) => {
|
||||
const state = generateState();
|
||||
const stored = await storePkceChallenge(state, req.query.code_challenge, 'openid');
|
||||
const cache = getLogStores(CacheKeys.ADMIN_OAUTH_EXCHANGE);
|
||||
const stored = await storeAndStripChallenge(cache, req, state, 'openid');
|
||||
if (!stored) {
|
||||
return res.redirect(
|
||||
`${getAdminPanelUrl()}/auth/openid/callback?error=pkce_store_failed&error_description=Failed+to+store+PKCE+challenge`,
|
||||
|
|
@ -185,7 +165,8 @@ router.get(
|
|||
|
||||
router.get('/oauth/saml', async (req, res, next) => {
|
||||
const state = generateState();
|
||||
const stored = await storePkceChallenge(state, req.query.code_challenge, 'saml');
|
||||
const cache = getLogStores(CacheKeys.ADMIN_OAUTH_EXCHANGE);
|
||||
const stored = await storeAndStripChallenge(cache, req, state, 'saml');
|
||||
if (!stored) {
|
||||
return res.redirect(
|
||||
`${getAdminPanelUrl()}/auth/saml/callback?error=pkce_store_failed&error_description=Failed+to+store+PKCE+challenge`,
|
||||
|
|
@ -222,7 +203,8 @@ router.post(
|
|||
|
||||
router.get('/oauth/google', async (req, res, next) => {
|
||||
const state = generateState();
|
||||
const stored = await storePkceChallenge(state, req.query.code_challenge, 'google');
|
||||
const cache = getLogStores(CacheKeys.ADMIN_OAUTH_EXCHANGE);
|
||||
const stored = await storeAndStripChallenge(cache, req, state, 'google');
|
||||
if (!stored) {
|
||||
return res.redirect(
|
||||
`${getAdminPanelUrl()}/auth/google/callback?error=pkce_store_failed&error_description=Failed+to+store+PKCE+challenge`,
|
||||
|
|
@ -260,7 +242,8 @@ router.get(
|
|||
|
||||
router.get('/oauth/github', async (req, res, next) => {
|
||||
const state = generateState();
|
||||
const stored = await storePkceChallenge(state, req.query.code_challenge, 'github');
|
||||
const cache = getLogStores(CacheKeys.ADMIN_OAUTH_EXCHANGE);
|
||||
const stored = await storeAndStripChallenge(cache, req, state, 'github');
|
||||
if (!stored) {
|
||||
return res.redirect(
|
||||
`${getAdminPanelUrl()}/auth/github/callback?error=pkce_store_failed&error_description=Failed+to+store+PKCE+challenge`,
|
||||
|
|
@ -298,7 +281,8 @@ router.get(
|
|||
|
||||
router.get('/oauth/discord', async (req, res, next) => {
|
||||
const state = generateState();
|
||||
const stored = await storePkceChallenge(state, req.query.code_challenge, 'discord');
|
||||
const cache = getLogStores(CacheKeys.ADMIN_OAUTH_EXCHANGE);
|
||||
const stored = await storeAndStripChallenge(cache, req, state, 'discord');
|
||||
if (!stored) {
|
||||
return res.redirect(
|
||||
`${getAdminPanelUrl()}/auth/discord/callback?error=pkce_store_failed&error_description=Failed+to+store+PKCE+challenge`,
|
||||
|
|
@ -336,7 +320,8 @@ router.get(
|
|||
|
||||
router.get('/oauth/facebook', async (req, res, next) => {
|
||||
const state = generateState();
|
||||
const stored = await storePkceChallenge(state, req.query.code_challenge, 'facebook');
|
||||
const cache = getLogStores(CacheKeys.ADMIN_OAUTH_EXCHANGE);
|
||||
const stored = await storeAndStripChallenge(cache, req, state, 'facebook');
|
||||
if (!stored) {
|
||||
return res.redirect(
|
||||
`${getAdminPanelUrl()}/auth/facebook/callback?error=pkce_store_failed&error_description=Failed+to+store+PKCE+challenge`,
|
||||
|
|
@ -374,7 +359,8 @@ router.get(
|
|||
|
||||
router.get('/oauth/apple', async (req, res, next) => {
|
||||
const state = generateState();
|
||||
const stored = await storePkceChallenge(state, req.query.code_challenge, 'apple');
|
||||
const cache = getLogStores(CacheKeys.ADMIN_OAUTH_EXCHANGE);
|
||||
const stored = await storeAndStripChallenge(cache, req, state, 'apple');
|
||||
if (!stored) {
|
||||
return res.redirect(
|
||||
`${getAdminPanelUrl()}/auth/apple/callback?error=pkce_store_failed&error_description=Failed+to+store+PKCE+challenge`,
|
||||
|
|
|
|||
|
|
@ -149,6 +149,29 @@ router.get('/:serverName/oauth/callback', async (req, res) => {
|
|||
|
||||
if (oauthError) {
|
||||
logger.error('[MCP OAuth] OAuth error received', { error: oauthError });
|
||||
// Gate failFlow behind callback validation to prevent DoS via leaked state
|
||||
if (state && typeof state === 'string') {
|
||||
try {
|
||||
const flowsCache = getLogStores(CacheKeys.FLOWS);
|
||||
const flowManager = getFlowStateManager(flowsCache);
|
||||
const flowId = await MCPOAuthHandler.resolveStateToFlowId(state, flowManager);
|
||||
if (flowId) {
|
||||
const flowParts = flowId.split(':');
|
||||
const [flowUserId] = flowParts;
|
||||
const hasCsrf = validateOAuthCsrf(req, res, flowId, OAUTH_CSRF_COOKIE_PATH);
|
||||
const hasSession = !hasCsrf && validateOAuthSession(req, flowUserId);
|
||||
if (hasCsrf || hasSession) {
|
||||
await flowManager.failFlow(flowId, 'mcp_oauth', String(oauthError));
|
||||
logger.debug('[MCP OAuth] Marked flow as FAILED with OAuth error', {
|
||||
flowId,
|
||||
error: oauthError,
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.debug('[MCP OAuth] Could not mark flow as failed', err);
|
||||
}
|
||||
}
|
||||
return res.redirect(
|
||||
`${basePath}/oauth/error?error=${encodeURIComponent(String(oauthError))}`,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -71,9 +71,7 @@ const createPermissionUpdateHandler = (permissionKey) => {
|
|||
const config = permissionConfigs[permissionKey];
|
||||
|
||||
return async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
const { roleName } = req.params;
|
||||
const updates = req.body;
|
||||
|
||||
try {
|
||||
|
|
@ -110,9 +108,7 @@ const createPermissionUpdateHandler = (permissionKey) => {
|
|||
* Get a specific role by name
|
||||
*/
|
||||
router.get('/:roleName', async (req, res) => {
|
||||
const { roleName: _r } = req.params;
|
||||
// TODO: TEMP, use a better parsing for roleName
|
||||
const roleName = _r.toUpperCase();
|
||||
const { roleName } = req.params;
|
||||
|
||||
try {
|
||||
let hasReadRoles = false;
|
||||
|
|
@ -121,7 +117,9 @@ router.get('/:roleName', async (req, res) => {
|
|||
} catch (err) {
|
||||
logger.warn(`[GET /roles/:roleName] capability check failed: ${err.message}`);
|
||||
}
|
||||
if (!hasReadRoles && (roleName === SystemRoles.ADMIN || !roleDefaults[roleName])) {
|
||||
const isOwnRole = req.user?.role === roleName;
|
||||
const isDefaultRole = Object.hasOwn(roleDefaults, roleName);
|
||||
if (!hasReadRoles && !isOwnRole && (roleName === SystemRoles.ADMIN || !isDefaultRole)) {
|
||||
return res.status(403).send({ message: 'Unauthorized' });
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ const {
|
|||
getFlowStateManager,
|
||||
getMCPManager,
|
||||
} = require('~/config');
|
||||
const { findToken, createToken, updateToken } = require('~/models');
|
||||
const { findToken, createToken, updateToken, deleteTokens } = require('~/models');
|
||||
const { getGraphApiToken } = require('./GraphTokenService');
|
||||
const { reinitMCPServer } = require('./Tools/mcp');
|
||||
const { getAppConfig } = require('./Config');
|
||||
|
|
@ -644,6 +644,7 @@ function createToolInstance({
|
|||
findToken,
|
||||
createToken,
|
||||
updateToken,
|
||||
deleteTokens,
|
||||
},
|
||||
oauthStart,
|
||||
oauthEnd,
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@
|
|||
"lodash": "^4.17.23",
|
||||
"lucide-react": "^0.394.0",
|
||||
"match-sorter": "^8.1.0",
|
||||
"mermaid": "^11.13.0",
|
||||
"mermaid": "^11.14.0",
|
||||
"micromark-extension-llm-math": "^3.1.0",
|
||||
"qrcode.react": "^4.2.0",
|
||||
"rc-input-number": "^7.4.2",
|
||||
|
|
|
|||
|
|
@ -47,7 +47,6 @@ export default function Message(props: TMessageProps) {
|
|||
</div>
|
||||
</MessageContainer>
|
||||
<MultiMessage
|
||||
key={messageId}
|
||||
messageId={messageId}
|
||||
conversation={conversation}
|
||||
messagesTree={children ?? []}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import type { Assistant, Agent } from 'librechat-data-provider';
|
|||
import type { TMessageIcon } from '~/common';
|
||||
import ConvoIconURL from '~/components/Endpoints/ConvoIconURL';
|
||||
import { useGetEndpointsQuery } from '~/data-provider';
|
||||
import { getIconEndpoint, logger } from '~/utils';
|
||||
import { getIconEndpoint } from '~/utils';
|
||||
import Icon from '~/components/Endpoints/Icon';
|
||||
|
||||
type MessageIconProps = {
|
||||
|
|
@ -19,38 +19,27 @@ type MessageIconProps = {
|
|||
* this component renders display properties only, not identity-derived content.
|
||||
*/
|
||||
export function arePropsEqual(prev: MessageIconProps, next: MessageIconProps): boolean {
|
||||
if (prev.iconData?.endpoint !== next.iconData?.endpoint) {
|
||||
return false;
|
||||
}
|
||||
if (prev.iconData?.model !== next.iconData?.model) {
|
||||
return false;
|
||||
}
|
||||
if (prev.iconData?.iconURL !== next.iconData?.iconURL) {
|
||||
return false;
|
||||
}
|
||||
if (prev.iconData?.modelLabel !== next.iconData?.modelLabel) {
|
||||
return false;
|
||||
}
|
||||
if (prev.iconData?.isCreatedByUser !== next.iconData?.isCreatedByUser) {
|
||||
return false;
|
||||
}
|
||||
if (prev.agent?.name !== next.agent?.name) {
|
||||
return false;
|
||||
}
|
||||
if (prev.agent?.avatar?.filepath !== next.agent?.avatar?.filepath) {
|
||||
return false;
|
||||
}
|
||||
if (prev.assistant?.name !== next.assistant?.name) {
|
||||
return false;
|
||||
}
|
||||
if (prev.assistant?.metadata?.avatar !== next.assistant?.metadata?.avatar) {
|
||||
return false;
|
||||
const checks: [unknown, unknown][] = [
|
||||
[prev.iconData?.endpoint, next.iconData?.endpoint],
|
||||
[prev.iconData?.model, next.iconData?.model],
|
||||
[prev.iconData?.iconURL, next.iconData?.iconURL],
|
||||
[prev.iconData?.modelLabel, next.iconData?.modelLabel],
|
||||
[prev.iconData?.isCreatedByUser, next.iconData?.isCreatedByUser],
|
||||
[prev.agent?.name, next.agent?.name],
|
||||
[prev.agent?.avatar?.filepath, next.agent?.avatar?.filepath],
|
||||
[prev.assistant?.name, next.assistant?.name],
|
||||
[prev.assistant?.metadata?.avatar, next.assistant?.metadata?.avatar],
|
||||
];
|
||||
|
||||
for (const [prevVal, nextVal] of checks) {
|
||||
if (prevVal !== nextVal) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const MessageIcon = memo(({ iconData, assistant, agent }: MessageIconProps) => {
|
||||
logger.log('icon_data', iconData, assistant, agent);
|
||||
const { data: endpointsConfig } = useGetEndpointsQuery();
|
||||
|
||||
const agentName = agent?.name ?? '';
|
||||
|
|
|
|||
|
|
@ -179,7 +179,6 @@ export default function Message(props: TMessageProps) {
|
|||
</div>
|
||||
</div>
|
||||
<MultiMessage
|
||||
key={messageId}
|
||||
messageId={messageId}
|
||||
conversation={conversation}
|
||||
messagesTree={children ?? []}
|
||||
|
|
|
|||
|
|
@ -62,7 +62,6 @@ function MessagesViewContent({
|
|||
<>
|
||||
<div ref={screenshotTargetRef}>
|
||||
<MultiMessage
|
||||
key={conversationId}
|
||||
messagesTree={_messagesTree}
|
||||
messageId={conversationId ?? null}
|
||||
setCurrentEditId={setCurrentEditId}
|
||||
|
|
|
|||
|
|
@ -39,47 +39,40 @@ export default function MultiMessage({
|
|||
return null;
|
||||
}
|
||||
|
||||
const message = messagesTree[messagesTree.length - siblingIdx - 1] as TMessage | undefined;
|
||||
const currentSiblingIdx = messagesTree.length - siblingIdx - 1;
|
||||
const message = messagesTree[currentSiblingIdx] as TMessage | undefined;
|
||||
|
||||
if (!message) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* No explicit key — React uses positional reconciliation since MultiMessage
|
||||
* always renders exactly one child at this position.
|
||||
*
|
||||
* Both messageId and parentMessageId change during the SSE lifecycle
|
||||
* (client UUID → createdHandler ID → server ID), so neither can serve as a
|
||||
* stable key. Using either caused React to unmount/remount the entire subtree
|
||||
* on each SSE event, destroying memoized state and causing visible flickering.
|
||||
*
|
||||
* Without a key, React reuses the component instance and updates props in place.
|
||||
* The memo comparators on ContentRender/MessageRender handle field-level diffing,
|
||||
* and sibling switches work correctly because the message prop changes entirely.
|
||||
*/
|
||||
const sharedProps = {
|
||||
message,
|
||||
currentEditId,
|
||||
setCurrentEditId,
|
||||
siblingIdx: currentSiblingIdx,
|
||||
siblingCount: messagesTree.length,
|
||||
setSiblingIdx: setSiblingIdxRev,
|
||||
};
|
||||
|
||||
if (isAssistantsEndpoint(message.endpoint) && message.content) {
|
||||
return (
|
||||
<MessageParts
|
||||
key={message.messageId}
|
||||
message={message}
|
||||
currentEditId={currentEditId}
|
||||
setCurrentEditId={setCurrentEditId}
|
||||
siblingIdx={messagesTree.length - siblingIdx - 1}
|
||||
siblingCount={messagesTree.length}
|
||||
setSiblingIdx={setSiblingIdxRev}
|
||||
/>
|
||||
);
|
||||
return <MessageParts {...sharedProps} />;
|
||||
} else if (message.content) {
|
||||
return (
|
||||
<MessageContent
|
||||
key={message.messageId}
|
||||
message={message}
|
||||
currentEditId={currentEditId}
|
||||
setCurrentEditId={setCurrentEditId}
|
||||
siblingIdx={messagesTree.length - siblingIdx - 1}
|
||||
siblingCount={messagesTree.length}
|
||||
setSiblingIdx={setSiblingIdxRev}
|
||||
/>
|
||||
);
|
||||
return <MessageContent {...sharedProps} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<Message
|
||||
key={message.messageId}
|
||||
message={message}
|
||||
currentEditId={currentEditId}
|
||||
setCurrentEditId={setCurrentEditId}
|
||||
siblingIdx={messagesTree.length - siblingIdx - 1}
|
||||
siblingCount={messagesTree.length}
|
||||
setSiblingIdx={setSiblingIdxRev}
|
||||
/>
|
||||
);
|
||||
return <Message {...sharedProps} />;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,169 @@
|
|||
import React from 'react';
|
||||
import { render } from '@testing-library/react';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { Agent } from 'librechat-data-provider';
|
||||
import type { TMessageIcon } from '~/common';
|
||||
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
...jest.requireActual('librechat-data-provider'),
|
||||
getEndpointField: jest.fn(() => ''),
|
||||
}));
|
||||
jest.mock('~/data-provider', () => ({
|
||||
useGetEndpointsQuery: jest.fn(() => ({ data: {} })),
|
||||
}));
|
||||
jest.mock('~/utils', () => ({
|
||||
getIconEndpoint: jest.fn(() => 'agents'),
|
||||
}));
|
||||
|
||||
const iconRenderCount = { current: 0 };
|
||||
|
||||
jest.mock('~/components/Endpoints/ConvoIconURL', () => {
|
||||
const ConvoIconURL = (props: Record<string, unknown>) => {
|
||||
iconRenderCount.current += 1;
|
||||
return <div data-testid="convo-icon-url" data-icon-url={props.iconURL as string} />;
|
||||
};
|
||||
ConvoIconURL.displayName = 'ConvoIconURL';
|
||||
return { __esModule: true, default: ConvoIconURL };
|
||||
});
|
||||
jest.mock('~/components/Endpoints/Icon', () => {
|
||||
const Icon = (props: Record<string, unknown>) => {
|
||||
iconRenderCount.current += 1;
|
||||
return <div data-testid="icon" data-icon-url={props.iconURL as string} />;
|
||||
};
|
||||
Icon.displayName = 'Icon';
|
||||
return { __esModule: true, default: Icon };
|
||||
});
|
||||
|
||||
import MessageIcon from '../MessageIcon';
|
||||
|
||||
const makeAgent = (overrides?: Partial<Agent>): Agent =>
|
||||
({
|
||||
id: 'agent_123',
|
||||
name: 'GitHub Agent',
|
||||
avatar: { filepath: '/images/agent-avatar.png' },
|
||||
...overrides,
|
||||
}) as Agent;
|
||||
|
||||
const baseIconData: TMessageIcon = {
|
||||
endpoint: EModelEndpoint.agents,
|
||||
model: 'agent_123',
|
||||
iconURL: undefined,
|
||||
modelLabel: 'GitHub Agent',
|
||||
isCreatedByUser: false,
|
||||
};
|
||||
|
||||
describe('MessageIcon render cycles', () => {
|
||||
beforeEach(() => {
|
||||
iconRenderCount.current = 0;
|
||||
});
|
||||
|
||||
it('renders once on initial mount', () => {
|
||||
render(<MessageIcon iconData={baseIconData} agent={makeAgent()} />);
|
||||
expect(iconRenderCount.current).toBe(1);
|
||||
});
|
||||
|
||||
it('does not re-render when parent re-renders with same field values but new object references', () => {
|
||||
const agent = makeAgent();
|
||||
const { rerender } = render(<MessageIcon iconData={baseIconData} agent={agent} />);
|
||||
iconRenderCount.current = 0;
|
||||
|
||||
rerender(<MessageIcon iconData={{ ...baseIconData }} agent={makeAgent()} />);
|
||||
|
||||
expect(iconRenderCount.current).toBe(0);
|
||||
});
|
||||
|
||||
it('does not re-render when agent object reference changes but name and avatar are the same', () => {
|
||||
const agent1 = makeAgent();
|
||||
const { rerender } = render(<MessageIcon iconData={baseIconData} agent={agent1} />);
|
||||
iconRenderCount.current = 0;
|
||||
|
||||
const agent2 = makeAgent({ id: 'agent_456' });
|
||||
rerender(<MessageIcon iconData={baseIconData} agent={agent2} />);
|
||||
|
||||
expect(iconRenderCount.current).toBe(0);
|
||||
});
|
||||
|
||||
it('re-renders when agent avatar filepath changes', () => {
|
||||
const agent1 = makeAgent();
|
||||
const { rerender } = render(<MessageIcon iconData={baseIconData} agent={agent1} />);
|
||||
iconRenderCount.current = 0;
|
||||
|
||||
const agent2 = makeAgent({ avatar: { filepath: '/images/new-avatar.png' } });
|
||||
rerender(<MessageIcon iconData={baseIconData} agent={agent2} />);
|
||||
|
||||
expect(iconRenderCount.current).toBe(1);
|
||||
});
|
||||
|
||||
it('re-renders when agent goes from undefined to defined (name changes from undefined to string)', () => {
|
||||
const { rerender } = render(<MessageIcon iconData={baseIconData} agent={undefined} />);
|
||||
iconRenderCount.current = 0;
|
||||
|
||||
rerender(<MessageIcon iconData={baseIconData} agent={makeAgent()} />);
|
||||
|
||||
expect(iconRenderCount.current).toBe(1);
|
||||
});
|
||||
|
||||
describe('simulates message lifecycle', () => {
|
||||
it('renders exactly twice during new message + streaming start: initial render + modelLabel update', () => {
|
||||
const initialIconData: TMessageIcon = {
|
||||
endpoint: EModelEndpoint.agents,
|
||||
model: 'agent_123',
|
||||
iconURL: undefined,
|
||||
modelLabel: '',
|
||||
isCreatedByUser: false,
|
||||
};
|
||||
const agent = makeAgent();
|
||||
|
||||
const { rerender } = render(<MessageIcon iconData={initialIconData} agent={agent} />);
|
||||
|
||||
const streamingIconData: TMessageIcon = {
|
||||
...initialIconData,
|
||||
modelLabel: 'GitHub Agent',
|
||||
};
|
||||
|
||||
rerender(<MessageIcon iconData={streamingIconData} agent={agent} />);
|
||||
|
||||
expect(iconRenderCount.current).toBe(2);
|
||||
});
|
||||
|
||||
it('does NOT re-render on subsequent streaming chunks (content changes, isSubmitting stays true)', () => {
|
||||
const iconData: TMessageIcon = {
|
||||
endpoint: EModelEndpoint.agents,
|
||||
model: 'agent_123',
|
||||
iconURL: undefined,
|
||||
modelLabel: 'GitHub Agent',
|
||||
isCreatedByUser: false,
|
||||
};
|
||||
const agent = makeAgent();
|
||||
|
||||
const { rerender } = render(<MessageIcon iconData={iconData} agent={agent} />);
|
||||
iconRenderCount.current = 0;
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
rerender(<MessageIcon iconData={{ ...iconData }} agent={makeAgent()} />);
|
||||
}
|
||||
|
||||
expect(iconRenderCount.current).toBe(0);
|
||||
});
|
||||
|
||||
it('does NOT re-render when agentsMap context updates with same agent data', () => {
|
||||
const iconData: TMessageIcon = {
|
||||
endpoint: EModelEndpoint.agents,
|
||||
model: 'agent_123',
|
||||
iconURL: undefined,
|
||||
modelLabel: 'GitHub Agent',
|
||||
isCreatedByUser: false,
|
||||
};
|
||||
|
||||
const agent1 = makeAgent();
|
||||
const { rerender } = render(<MessageIcon iconData={iconData} agent={agent1} />);
|
||||
iconRenderCount.current = 0;
|
||||
|
||||
const agent2 = makeAgent();
|
||||
expect(agent1).not.toBe(agent2);
|
||||
rerender(<MessageIcon iconData={iconData} agent={agent2} />);
|
||||
|
||||
expect(iconRenderCount.current).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -48,7 +48,6 @@ export default function MessageContent(props: TMessageProps) {
|
|||
</div>
|
||||
</MessageContainer>
|
||||
<MultiMessage
|
||||
key={messageId}
|
||||
messageId={messageId}
|
||||
conversation={conversation}
|
||||
messagesTree={children ?? []}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import { useMemo, useEffect, useState } from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { ShieldEllipsis } from 'lucide-react';
|
||||
import { useForm, Controller } from 'react-hook-form';
|
||||
import { Permissions, SystemRoles, roleDefaults, PermissionTypes } from 'librechat-data-provider';
|
||||
import { Permissions, SystemRoles, PermissionTypes } from 'librechat-data-provider';
|
||||
import {
|
||||
Button,
|
||||
Switch,
|
||||
|
|
@ -15,7 +15,7 @@ import {
|
|||
} from '@librechat/client';
|
||||
import type { Control, UseFormSetValue, UseFormGetValues } from 'react-hook-form';
|
||||
import { useUpdatePeoplePickerPermissionsMutation } from '~/data-provider';
|
||||
import { useLocalize, useAuthContext } from '~/hooks';
|
||||
import { useLocalize, useAuthContext, useRoleSelector } from '~/hooks';
|
||||
|
||||
type FormValues = {
|
||||
[Permissions.VIEW_USERS]: boolean;
|
||||
|
|
@ -70,7 +70,7 @@ const LabelController: React.FC<LabelControllerProps> = ({
|
|||
const PeoplePickerAdminSettings = () => {
|
||||
const localize = useLocalize();
|
||||
const { showToast } = useToastContext();
|
||||
const { user, roles } = useAuthContext();
|
||||
const { user } = useAuthContext();
|
||||
const { mutate, isLoading } = useUpdatePeoplePickerPermissionsMutation({
|
||||
onSuccess: () => {
|
||||
showToast({ status: 'success', message: localize('com_ui_saved') });
|
||||
|
|
@ -81,15 +81,14 @@ const PeoplePickerAdminSettings = () => {
|
|||
});
|
||||
|
||||
const [isRoleMenuOpen, setIsRoleMenuOpen] = useState(false);
|
||||
const [selectedRole, setSelectedRole] = useState<SystemRoles>(SystemRoles.USER);
|
||||
|
||||
const defaultValues = useMemo(() => {
|
||||
const rolePerms = roles?.[selectedRole]?.permissions;
|
||||
if (rolePerms) {
|
||||
return rolePerms[PermissionTypes.PEOPLE_PICKER];
|
||||
}
|
||||
return roleDefaults[selectedRole].permissions[PermissionTypes.PEOPLE_PICKER];
|
||||
}, [roles, selectedRole]);
|
||||
const {
|
||||
selectedRole,
|
||||
isSelectedCustomRole,
|
||||
isCustomRoleLoading,
|
||||
isCustomRoleError,
|
||||
defaultValues,
|
||||
roleDropdownItems,
|
||||
} = useRoleSelector(PermissionTypes.PEOPLE_PICKER);
|
||||
|
||||
const {
|
||||
reset,
|
||||
|
|
@ -100,17 +99,15 @@ const PeoplePickerAdminSettings = () => {
|
|||
formState: { isSubmitting },
|
||||
} = useForm<FormValues>({
|
||||
mode: 'onChange',
|
||||
defaultValues,
|
||||
defaultValues: defaultValues as FormValues,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const value = roles?.[selectedRole]?.permissions?.[PermissionTypes.PEOPLE_PICKER];
|
||||
if (value) {
|
||||
reset(value);
|
||||
} else {
|
||||
reset(roleDefaults[selectedRole].permissions[PermissionTypes.PEOPLE_PICKER]);
|
||||
if (isSelectedCustomRole && (isCustomRoleLoading || isCustomRoleError)) {
|
||||
return;
|
||||
}
|
||||
}, [roles, selectedRole, reset]);
|
||||
reset(defaultValues as FormValues);
|
||||
}, [isSelectedCustomRole, isCustomRoleLoading, isCustomRoleError, defaultValues, reset]);
|
||||
|
||||
if (user?.role !== SystemRoles.ADMIN) {
|
||||
return null;
|
||||
|
|
@ -138,21 +135,6 @@ const PeoplePickerAdminSettings = () => {
|
|||
mutate({ roleName: selectedRole, updates: data });
|
||||
};
|
||||
|
||||
const roleDropdownItems = [
|
||||
{
|
||||
label: SystemRoles.USER,
|
||||
onClick: () => {
|
||||
setSelectedRole(SystemRoles.USER);
|
||||
},
|
||||
},
|
||||
{
|
||||
label: SystemRoles.ADMIN,
|
||||
onClick: () => {
|
||||
setSelectedRole(SystemRoles.ADMIN);
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
<OGDialog>
|
||||
<OGDialogTrigger asChild>
|
||||
|
|
@ -179,7 +161,7 @@ const PeoplePickerAdminSettings = () => {
|
|||
isOpen={isRoleMenuOpen}
|
||||
setIsOpen={setIsRoleMenuOpen}
|
||||
trigger={
|
||||
<Ariakit.MenuButton className="inline-flex w-1/4 items-center justify-center rounded-lg border border-border-light bg-transparent px-2 py-1 text-text-primary transition-all ease-in-out hover:bg-surface-tertiary">
|
||||
<Ariakit.MenuButton className="inline-flex min-w-[6rem] items-center justify-center rounded-lg border border-border-light bg-transparent px-2 py-1 text-text-primary transition-all ease-in-out hover:bg-surface-tertiary">
|
||||
{selectedRole}
|
||||
</Ariakit.MenuButton>
|
||||
}
|
||||
|
|
@ -207,7 +189,11 @@ const PeoplePickerAdminSettings = () => {
|
|||
<button
|
||||
type="button"
|
||||
onClick={handleSubmit(onSubmit)}
|
||||
disabled={isSubmitting || isLoading}
|
||||
disabled={
|
||||
isSubmitting ||
|
||||
isLoading ||
|
||||
(isSelectedCustomRole && (isCustomRoleLoading || isCustomRoleError))
|
||||
}
|
||||
className="btn rounded bg-green-500 font-bold text-white transition-all hover:bg-green-600"
|
||||
>
|
||||
{localize('com_ui_save')}
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ const ConversationsSection = memo(() => {
|
|||
<BookmarkNav tags={tags} setTags={setTags} />
|
||||
</Suspense>
|
||||
)}
|
||||
<SearchBar isSmallScreen={isSmallScreen} />
|
||||
{search.enabled && <SearchBar isSmallScreen={isSmallScreen} />}
|
||||
</div>
|
||||
{isSmallScreen && (
|
||||
<div
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import { useMemo, useEffect, useState } from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import * as Ariakit from '@ariakit/react';
|
||||
import { ShieldEllipsis } from 'lucide-react';
|
||||
import { useForm, Controller } from 'react-hook-form';
|
||||
import { Permissions, SystemRoles, roleDefaults, PermissionTypes } from 'librechat-data-provider';
|
||||
import { Permissions, SystemRoles } from 'librechat-data-provider';
|
||||
import {
|
||||
OGDialog,
|
||||
OGDialogTitle,
|
||||
|
|
@ -13,8 +13,9 @@ import {
|
|||
DropdownPopup,
|
||||
} from '@librechat/client';
|
||||
import type { Control, UseFormSetValue, UseFormGetValues } from 'react-hook-form';
|
||||
import type { PermissionTypes } from 'librechat-data-provider';
|
||||
import type { TranslationKeys } from '~/hooks/useLocalize';
|
||||
import { useLocalize, useAuthContext } from '~/hooks';
|
||||
import { useLocalize, useAuthContext, useRoleSelector } from '~/hooks';
|
||||
|
||||
type FormValues = Record<Permissions, boolean>;
|
||||
|
||||
|
|
@ -34,7 +35,7 @@ export interface AdminSettingsDialogProps {
|
|||
menuId: string;
|
||||
/** Mutation function and loading state from the permission update hook */
|
||||
mutation: {
|
||||
mutate: (data: { roleName: SystemRoles; updates: Record<Permissions, boolean> }) => void;
|
||||
mutate: (data: { roleName: string; updates: Record<Permissions, boolean> }) => void;
|
||||
isLoading: boolean;
|
||||
};
|
||||
/** Whether to show the admin access warning when ADMIN role and USE permission is displayed (default: true) */
|
||||
|
|
@ -108,18 +109,18 @@ const AdminSettingsDialog: React.FC<AdminSettingsDialogProps> = ({
|
|||
extraContent,
|
||||
}) => {
|
||||
const localize = useLocalize();
|
||||
const { user, roles } = useAuthContext();
|
||||
const { user } = useAuthContext();
|
||||
const { mutate, isLoading } = mutation;
|
||||
|
||||
const [isRoleMenuOpen, setIsRoleMenuOpen] = useState(false);
|
||||
const [selectedRole, setSelectedRole] = useState<SystemRoles>(SystemRoles.USER);
|
||||
|
||||
const defaultValues = useMemo(() => {
|
||||
if (roles?.[selectedRole]?.permissions) {
|
||||
return roles[selectedRole]?.permissions[permissionType];
|
||||
}
|
||||
return roleDefaults[selectedRole].permissions[permissionType];
|
||||
}, [roles, selectedRole, permissionType]);
|
||||
const {
|
||||
selectedRole,
|
||||
isSelectedCustomRole,
|
||||
isCustomRoleLoading,
|
||||
isCustomRoleError,
|
||||
defaultValues,
|
||||
roleDropdownItems,
|
||||
} = useRoleSelector(permissionType);
|
||||
|
||||
const {
|
||||
reset,
|
||||
|
|
@ -134,12 +135,11 @@ const AdminSettingsDialog: React.FC<AdminSettingsDialogProps> = ({
|
|||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (roles?.[selectedRole]?.permissions?.[permissionType]) {
|
||||
reset(roles[selectedRole]?.permissions[permissionType]);
|
||||
} else {
|
||||
reset(roleDefaults[selectedRole].permissions[permissionType]);
|
||||
if (isSelectedCustomRole && (isCustomRoleLoading || isCustomRoleError)) {
|
||||
return;
|
||||
}
|
||||
}, [roles, selectedRole, reset, permissionType]);
|
||||
reset(defaultValues);
|
||||
}, [isSelectedCustomRole, isCustomRoleLoading, isCustomRoleError, defaultValues, reset]);
|
||||
|
||||
if (user?.role !== SystemRoles.ADMIN) {
|
||||
return null;
|
||||
|
|
@ -149,21 +149,6 @@ const AdminSettingsDialog: React.FC<AdminSettingsDialogProps> = ({
|
|||
mutate({ roleName: selectedRole, updates: data });
|
||||
};
|
||||
|
||||
const roleDropdownItems = [
|
||||
{
|
||||
label: SystemRoles.USER,
|
||||
onClick: () => {
|
||||
setSelectedRole(SystemRoles.USER);
|
||||
},
|
||||
},
|
||||
{
|
||||
label: SystemRoles.ADMIN,
|
||||
onClick: () => {
|
||||
setSelectedRole(SystemRoles.ADMIN);
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const defaultTrigger = (
|
||||
<Button
|
||||
size="sm"
|
||||
|
|
@ -199,7 +184,7 @@ const AdminSettingsDialog: React.FC<AdminSettingsDialogProps> = ({
|
|||
isOpen={isRoleMenuOpen}
|
||||
setIsOpen={setIsRoleMenuOpen}
|
||||
trigger={
|
||||
<Ariakit.MenuButton className="inline-flex w-1/4 items-center justify-center rounded-lg border border-border-light bg-transparent px-2 py-1 text-text-primary transition-all ease-in-out hover:bg-surface-tertiary">
|
||||
<Ariakit.MenuButton className="inline-flex min-w-[6rem] items-center justify-center rounded-lg border border-border-light bg-transparent px-2 py-1 text-text-primary transition-all ease-in-out hover:bg-surface-tertiary">
|
||||
{selectedRole}
|
||||
</Ariakit.MenuButton>
|
||||
}
|
||||
|
|
@ -257,7 +242,11 @@ const AdminSettingsDialog: React.FC<AdminSettingsDialogProps> = ({
|
|||
<Button
|
||||
type="submit"
|
||||
variant="submit"
|
||||
disabled={isSubmitting || isLoading}
|
||||
disabled={
|
||||
isSubmitting ||
|
||||
isLoading ||
|
||||
(isSelectedCustomRole && (isCustomRoleLoading || isCustomRoleError))
|
||||
}
|
||||
aria-label={localize('com_ui_save')}
|
||||
>
|
||||
{localize('com_ui_save')}
|
||||
|
|
|
|||
|
|
@ -29,6 +29,18 @@ export const useGetRole = (
|
|||
});
|
||||
};
|
||||
|
||||
export const useListRoles = (
|
||||
config?: UseQueryOptions<t.ListRolesResponse>,
|
||||
): QueryObserverResult<t.ListRolesResponse> => {
|
||||
return useQuery<t.ListRolesResponse>([QueryKeys.rolesList], () => dataService.listRoles(), {
|
||||
refetchOnWindowFocus: false,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnMount: false,
|
||||
retry: false,
|
||||
...config,
|
||||
});
|
||||
};
|
||||
|
||||
export const useUpdatePromptPermissionsMutation = (
|
||||
options?: t.UpdatePromptPermOptions,
|
||||
): UseMutationResult<
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import {
|
|||
apiBaseUrl,
|
||||
SystemRoles,
|
||||
setTokenHeader,
|
||||
isSystemRoleName,
|
||||
buildLoginRedirectUrl,
|
||||
} from 'librechat-data-provider';
|
||||
import type * as t from 'librechat-data-provider';
|
||||
|
|
@ -47,12 +48,18 @@ const AuthContextProvider = ({
|
|||
const [isAuthenticated, setIsAuthenticated] = useState<boolean>(false);
|
||||
const setQueriesEnabled = useSetRecoilState<boolean>(store.queriesEnabled);
|
||||
|
||||
const userRoleName = user?.role ?? '';
|
||||
const isCustomRole = isAuthenticated && !!user?.role && !isSystemRoleName(user.role);
|
||||
|
||||
const { data: userRole = null } = useGetRole(SystemRoles.USER, {
|
||||
enabled: !!(isAuthenticated && (user?.role ?? '')),
|
||||
});
|
||||
const { data: adminRole = null } = useGetRole(SystemRoles.ADMIN, {
|
||||
enabled: !!(isAuthenticated && user?.role === SystemRoles.ADMIN),
|
||||
});
|
||||
const { data: customRole = null } = useGetRole(isCustomRole ? userRoleName : '_', {
|
||||
enabled: isCustomRole,
|
||||
});
|
||||
|
||||
const navigate = useNavigate();
|
||||
|
||||
|
|
@ -267,11 +274,22 @@ const AuthContextProvider = ({
|
|||
roles: {
|
||||
[SystemRoles.USER]: userRole,
|
||||
[SystemRoles.ADMIN]: adminRole,
|
||||
...(isCustomRole && customRole ? { [userRoleName]: customRole } : {}),
|
||||
},
|
||||
isAuthenticated,
|
||||
}),
|
||||
|
||||
[user, error, isAuthenticated, token, userRole, adminRole],
|
||||
[
|
||||
user,
|
||||
error,
|
||||
isAuthenticated,
|
||||
token,
|
||||
userRole,
|
||||
adminRole,
|
||||
isCustomRole,
|
||||
userRoleName,
|
||||
customRole,
|
||||
],
|
||||
);
|
||||
|
||||
return <AuthContext.Provider value={memoedValue}>{children}</AuthContext.Provider>;
|
||||
|
|
|
|||
110
client/src/hooks/Input/useAutoSave.spec.ts
Normal file
110
client/src/hooks/Input/useAutoSave.spec.ts
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
jest.mock('recoil', () => ({
|
||||
...jest.requireActual('recoil'),
|
||||
useRecoilValue: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/store', () => ({
|
||||
saveDrafts: { key: 'saveDrafts', default: true },
|
||||
}));
|
||||
|
||||
jest.mock('~/Providers', () => ({
|
||||
useChatFormContext: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/data-provider', () => ({
|
||||
useGetFiles: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('~/utils', () => ({
|
||||
...jest.requireActual('~/utils'),
|
||||
getDraft: jest.fn(),
|
||||
setDraft: jest.fn(),
|
||||
clearDraft: jest.fn(),
|
||||
clearAllDrafts: jest.fn(),
|
||||
}));
|
||||
|
||||
import React from 'react';
|
||||
import { renderHook, act } from '@testing-library/react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { useChatFormContext } from '~/Providers';
|
||||
import { useGetFiles } from '~/data-provider';
|
||||
import { getDraft, setDraft } from '~/utils';
|
||||
import store from '~/store';
|
||||
import { useAutoSave } from '~/hooks';
|
||||
|
||||
const mockSetValue = jest.fn();
|
||||
const mockGetDraft = getDraft as jest.Mock;
|
||||
const mockSetDraft = setDraft as jest.Mock;
|
||||
|
||||
const makeTextAreaRef = (value = '') =>
|
||||
({
|
||||
current: { value, addEventListener: jest.fn(), removeEventListener: jest.fn() },
|
||||
}) as unknown as React.RefObject<HTMLTextAreaElement>;
|
||||
|
||||
beforeEach(() => {
|
||||
(useRecoilValue as jest.Mock).mockImplementation((atom) => {
|
||||
if (atom === store.saveDrafts) return true;
|
||||
return undefined;
|
||||
});
|
||||
(useChatFormContext as jest.Mock).mockReturnValue({ setValue: mockSetValue });
|
||||
(useGetFiles as jest.Mock).mockReturnValue({ data: [] });
|
||||
mockGetDraft.mockReturnValue('');
|
||||
});
|
||||
|
||||
describe('useAutoSave — conversation switching', () => {
|
||||
it('clears the textarea when switching to a conversation with no draft', () => {
|
||||
const { rerender } = renderHook(
|
||||
({ conversationId }: { conversationId: string }) =>
|
||||
useAutoSave({
|
||||
conversationId,
|
||||
textAreaRef: makeTextAreaRef(),
|
||||
files: new Map(),
|
||||
setFiles: jest.fn(),
|
||||
}),
|
||||
{ initialProps: { conversationId: 'convo-1' } },
|
||||
);
|
||||
|
||||
act(() => {
|
||||
rerender({ conversationId: 'convo-2' });
|
||||
});
|
||||
|
||||
expect(mockSetValue).toHaveBeenLastCalledWith('text', '');
|
||||
});
|
||||
|
||||
it('restores the saved draft when switching to a conversation with one', () => {
|
||||
mockGetDraft.mockImplementation((id: string) => (id === 'convo-2' ? 'Hello, world!' : ''));
|
||||
|
||||
const { rerender } = renderHook(
|
||||
({ conversationId }: { conversationId: string }) =>
|
||||
useAutoSave({
|
||||
conversationId,
|
||||
textAreaRef: makeTextAreaRef(),
|
||||
files: new Map(),
|
||||
setFiles: jest.fn(),
|
||||
}),
|
||||
{ initialProps: { conversationId: 'convo-1' } },
|
||||
);
|
||||
|
||||
act(() => {
|
||||
rerender({ conversationId: 'convo-2' });
|
||||
});
|
||||
|
||||
expect(mockSetValue).toHaveBeenLastCalledWith('text', 'Hello, world!');
|
||||
});
|
||||
|
||||
it('saves the current textarea content before switching away', () => {
|
||||
const textAreaRef = makeTextAreaRef('draft in progress');
|
||||
|
||||
const { rerender } = renderHook(
|
||||
({ conversationId }: { conversationId: string }) =>
|
||||
useAutoSave({ conversationId, textAreaRef, files: new Map(), setFiles: jest.fn() }),
|
||||
{ initialProps: { conversationId: 'convo-1' } },
|
||||
);
|
||||
|
||||
act(() => {
|
||||
rerender({ conversationId: 'convo-2' });
|
||||
});
|
||||
|
||||
expect(mockSetDraft).toHaveBeenCalledWith({ id: 'convo-1', value: 'draft in progress' });
|
||||
});
|
||||
});
|
||||
|
|
@ -73,11 +73,7 @@ export const useAutoSave = ({
|
|||
|
||||
const restoreText = useCallback(
|
||||
(id: string) => {
|
||||
const savedDraft = getDraft(id);
|
||||
if (!savedDraft) {
|
||||
return;
|
||||
}
|
||||
setValue('text', savedDraft);
|
||||
setValue('text', getDraft(id) ?? '');
|
||||
},
|
||||
[setValue],
|
||||
);
|
||||
|
|
|
|||
|
|
@ -64,13 +64,20 @@ jest.mock('~/data-provider', () => ({
|
|||
error: null,
|
||||
})),
|
||||
useGetRole: jest.fn(() => ({ data: null })),
|
||||
useListRoles: jest.fn(() => ({ data: undefined })),
|
||||
}));
|
||||
|
||||
const authConfig: TAuthConfig = { loginRedirect: '/login', test: true };
|
||||
|
||||
function TestConsumer() {
|
||||
const ctx = useAuthContext();
|
||||
return <div data-testid="consumer" data-authenticated={ctx.isAuthenticated} />;
|
||||
return (
|
||||
<div
|
||||
data-testid="consumer"
|
||||
data-authenticated={ctx.isAuthenticated}
|
||||
data-roles={JSON.stringify(ctx.roles ?? {})}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function renderProvider() {
|
||||
|
|
@ -445,3 +452,130 @@ describe('AuthContextProvider — logout error handling', () => {
|
|||
jest.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
||||
describe('AuthContextProvider — custom role detection and fetching', () => {
|
||||
const mockUseGetRole = jest.requireMock('~/data-provider').useGetRole;
|
||||
const staffPermissions = {
|
||||
name: 'STAFF',
|
||||
permissions: { PROMPTS: { USE: true, CREATE: false } },
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
sessionStorage.clear();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sessionStorage.clear();
|
||||
window.history.replaceState({}, '', '/');
|
||||
});
|
||||
|
||||
it('calls useGetRole with the custom role name and enabled: true for custom role users', () => {
|
||||
jest.useFakeTimers();
|
||||
|
||||
renderProviderLive();
|
||||
|
||||
const [, refreshOptions] = mockRefreshMutate.mock.calls[0] as [
|
||||
unknown,
|
||||
{ onSuccess: (data: unknown) => void },
|
||||
];
|
||||
|
||||
act(() => {
|
||||
refreshOptions.onSuccess({ user: { id: '1', role: 'STAFF' }, token: 'tok' });
|
||||
});
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(100);
|
||||
});
|
||||
|
||||
const staffCalls = mockUseGetRole.mock.calls.filter(([name]: [string]) => name === 'STAFF');
|
||||
expect(staffCalls.length).toBeGreaterThan(0);
|
||||
const lastStaffCall = staffCalls[staffCalls.length - 1];
|
||||
expect(lastStaffCall[1]).toEqual(expect.objectContaining({ enabled: true }));
|
||||
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('calls useGetRole with enabled: false for USER role users', () => {
|
||||
jest.useFakeTimers();
|
||||
|
||||
renderProviderLive();
|
||||
|
||||
const [, refreshOptions] = mockRefreshMutate.mock.calls[0] as [
|
||||
unknown,
|
||||
{ onSuccess: (data: unknown) => void },
|
||||
];
|
||||
|
||||
act(() => {
|
||||
refreshOptions.onSuccess({ user: { id: '1', role: 'USER' }, token: 'tok' });
|
||||
});
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(100);
|
||||
});
|
||||
|
||||
const sentinelCalls = mockUseGetRole.mock.calls.filter(([name]: [string]) => name === '_');
|
||||
expect(sentinelCalls.length).toBeGreaterThan(0);
|
||||
for (const call of sentinelCalls) {
|
||||
expect(call[1]).toEqual(expect.objectContaining({ enabled: false }));
|
||||
}
|
||||
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('calls useGetRole with enabled: false for ADMIN role users', () => {
|
||||
jest.useFakeTimers();
|
||||
|
||||
renderProviderLive();
|
||||
|
||||
const [, refreshOptions] = mockRefreshMutate.mock.calls[0] as [
|
||||
unknown,
|
||||
{ onSuccess: (data: unknown) => void },
|
||||
];
|
||||
|
||||
act(() => {
|
||||
refreshOptions.onSuccess({ user: { id: '1', role: 'ADMIN' }, token: 'tok' });
|
||||
});
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(100);
|
||||
});
|
||||
|
||||
const sentinelCalls = mockUseGetRole.mock.calls.filter(([name]: [string]) => name === '_');
|
||||
expect(sentinelCalls.length).toBeGreaterThan(0);
|
||||
for (const call of sentinelCalls) {
|
||||
expect(call[1]).toEqual(expect.objectContaining({ enabled: false }));
|
||||
}
|
||||
|
||||
jest.useRealTimers();
|
||||
});
|
||||
|
||||
it('includes custom role data in the roles context map when loaded', () => {
|
||||
jest.useFakeTimers();
|
||||
mockUseGetRole.mockImplementation((name: string, opts?: { enabled?: boolean }) => {
|
||||
if (name === 'STAFF' && opts?.enabled) {
|
||||
return { data: staffPermissions };
|
||||
}
|
||||
return { data: null };
|
||||
});
|
||||
|
||||
const { getByTestId } = renderProviderLive();
|
||||
|
||||
const [, refreshOptions] = mockRefreshMutate.mock.calls[0] as [
|
||||
unknown,
|
||||
{ onSuccess: (data: unknown) => void },
|
||||
];
|
||||
|
||||
act(() => {
|
||||
refreshOptions.onSuccess({ user: { id: '1', role: 'STAFF' }, token: 'tok' });
|
||||
});
|
||||
act(() => {
|
||||
jest.advanceTimersByTime(100);
|
||||
});
|
||||
|
||||
const rolesAttr = getByTestId('consumer').getAttribute('data-roles') ?? '{}';
|
||||
const roles = JSON.parse(rolesAttr);
|
||||
expect(roles).toHaveProperty('STAFF');
|
||||
expect(roles.STAFF).toEqual(staffPermissions);
|
||||
|
||||
mockUseGetRole.mockReturnValue({ data: null });
|
||||
jest.useRealTimers();
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -37,3 +37,4 @@ export { default as useTextToSpeech } from './Input/useTextToSpeech';
|
|||
export { default as useGenerationsByLatest } from './useGenerationsByLatest';
|
||||
export { default as useLocalizedConfig } from './useLocalizedConfig';
|
||||
export { default as useResourcePermissions } from './useResourcePermissions';
|
||||
export { useRoleSelector } from './useRoleSelector';
|
||||
|
|
|
|||
64
client/src/hooks/useRoleSelector.ts
Normal file
64
client/src/hooks/useRoleSelector.ts
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
import { useMemo, useState, useCallback } from 'react';
|
||||
import { SystemRoles, roleDefaults, isSystemRoleName } from 'librechat-data-provider';
|
||||
import type { PermissionTypes, TRole } from 'librechat-data-provider';
|
||||
import { useGetRole, useListRoles } from '~/data-provider';
|
||||
import { useAuthContext } from './AuthContext';
|
||||
|
||||
export function useRoleSelector(permissionType: PermissionTypes) {
|
||||
const { user, roles } = useAuthContext();
|
||||
const [selectedRole, setSelectedRole] = useState<string>(SystemRoles.USER);
|
||||
|
||||
const { data: roleList } = useListRoles({
|
||||
enabled: user?.role === SystemRoles.ADMIN,
|
||||
});
|
||||
|
||||
const isSelectedCustomRole = !isSystemRoleName(selectedRole);
|
||||
|
||||
const {
|
||||
data: customRoleData = null,
|
||||
isLoading: isCustomRoleLoading,
|
||||
isError: isCustomRoleError,
|
||||
} = useGetRole(isSelectedCustomRole ? selectedRole : '_', { enabled: isSelectedCustomRole });
|
||||
|
||||
const resolvePermissions = useCallback(
|
||||
(role: string, customData: TRole | null) => {
|
||||
const isCustom = !isSystemRoleName(role);
|
||||
if (isCustom && customData?.permissions?.[permissionType]) {
|
||||
return customData.permissions[permissionType];
|
||||
}
|
||||
if (!isCustom && roles?.[role]?.permissions?.[permissionType]) {
|
||||
return roles[role]?.permissions[permissionType];
|
||||
}
|
||||
const defaults = !isCustom
|
||||
? roleDefaults[role as SystemRoles]
|
||||
: roleDefaults[SystemRoles.USER];
|
||||
return defaults.permissions[permissionType];
|
||||
},
|
||||
[roles, permissionType],
|
||||
);
|
||||
|
||||
const defaultValues = useMemo(
|
||||
() => resolvePermissions(selectedRole, customRoleData),
|
||||
[resolvePermissions, selectedRole, customRoleData],
|
||||
);
|
||||
|
||||
const availableRoleNames = useMemo(() => {
|
||||
const names = roleList?.roles?.map((r) => r.name);
|
||||
return names?.length ? names : [SystemRoles.USER, SystemRoles.ADMIN];
|
||||
}, [roleList]);
|
||||
|
||||
const roleDropdownItems = useMemo(
|
||||
() => availableRoleNames.map((role) => ({ label: role, onClick: () => setSelectedRole(role) })),
|
||||
[availableRoleNames],
|
||||
);
|
||||
|
||||
return {
|
||||
selectedRole,
|
||||
setSelectedRole,
|
||||
isSelectedCustomRole,
|
||||
isCustomRoleLoading,
|
||||
isCustomRoleError,
|
||||
defaultValues,
|
||||
roleDropdownItems,
|
||||
};
|
||||
}
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
# https://www.librechat.ai/docs/configuration/librechat_yaml
|
||||
|
||||
# Configuration version (required)
|
||||
version: 1.3.6
|
||||
version: 1.3.7
|
||||
|
||||
# Cache settings: Set to true to enable caching
|
||||
cache: true
|
||||
|
|
@ -80,10 +80,8 @@ interface:
|
|||
|
||||
By using the Website, you acknowledge that you have read these Terms of Service and agree to be bound by them.
|
||||
|
||||
endpointsMenu: true
|
||||
modelSelect: true
|
||||
parameters: true
|
||||
sidePanel: true
|
||||
presets: true
|
||||
prompts:
|
||||
use: true
|
||||
|
|
|
|||
350
package-lock.json
generated
350
package-lock.json
generated
|
|
@ -133,7 +133,7 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"jest": "^30.2.0",
|
||||
"mongodb-memory-server": "^10.1.4",
|
||||
"mongodb-memory-server": "^11.0.1",
|
||||
"nodemon": "^3.0.3",
|
||||
"supertest": "^7.1.0"
|
||||
}
|
||||
|
|
@ -447,7 +447,7 @@
|
|||
"lodash": "^4.17.23",
|
||||
"lucide-react": "^0.394.0",
|
||||
"match-sorter": "^8.1.0",
|
||||
"mermaid": "^11.13.0",
|
||||
"mermaid": "^11.14.0",
|
||||
"micromark-extension-llm-math": "^3.1.0",
|
||||
"qrcode.react": "^4.2.0",
|
||||
"rc-input-number": "^7.4.2",
|
||||
|
|
@ -6895,6 +6895,12 @@
|
|||
"lodash-es": "4.17.23"
|
||||
}
|
||||
},
|
||||
"node_modules/@chevrotain/cst-dts-gen/node_modules/lodash-es": {
|
||||
"version": "4.17.23",
|
||||
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz",
|
||||
"integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@chevrotain/gast": {
|
||||
"version": "11.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.1.2.tgz",
|
||||
|
|
@ -6905,6 +6911,12 @@
|
|||
"lodash-es": "4.17.23"
|
||||
}
|
||||
},
|
||||
"node_modules/@chevrotain/gast/node_modules/lodash-es": {
|
||||
"version": "4.17.23",
|
||||
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz",
|
||||
"integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@chevrotain/regexp-to-ast": {
|
||||
"version": "11.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.1.2.tgz",
|
||||
|
|
@ -11944,9 +11956,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@mermaid-js/parser": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-1.0.1.tgz",
|
||||
"integrity": "sha512-opmV19kN1JsK0T6HhhokHpcVkqKpF+x2pPDKKM2ThHtZAB5F4PROopk0amuVYK5qMrIA4erzpNm8gmPNJgMDxQ==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-1.1.0.tgz",
|
||||
"integrity": "sha512-gxK9ZX2+Fex5zu8LhRQoMeMPEHbc73UKZ0FQ54YrQtUxE1VVhMwzeNtKRPAu5aXks4FasbMe4xB4bWrmq6Jlxw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"langium": "^4.0.0"
|
||||
|
|
@ -21826,9 +21838,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@xmldom/xmldom": {
|
||||
"version": "0.8.10",
|
||||
"resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz",
|
||||
"integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==",
|
||||
"version": "0.8.12",
|
||||
"resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.12.tgz",
|
||||
"integrity": "sha512-9k/gHF6n/pAi/9tqr3m3aqkuiNosYTurLLUtc7xQ9sxB/wm7WPygCv8GYa6mS0fLJEHhqMC1ATYhz++U/lRHqg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
|
|
@ -22506,13 +22518,6 @@
|
|||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/b4a": {
|
||||
"version": "1.6.7",
|
||||
"resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.7.tgz",
|
||||
"integrity": "sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"node_modules/babel-jest": {
|
||||
"version": "30.2.0",
|
||||
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.2.0.tgz",
|
||||
|
|
@ -22726,12 +22731,101 @@
|
|||
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
|
||||
},
|
||||
"node_modules/bare-events": {
|
||||
"version": "2.6.0",
|
||||
"resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.6.0.tgz",
|
||||
"integrity": "sha512-EKZ5BTXYExaNqi3I3f9RtEsaI/xBSGjE0XZCZilPzFAV/goswFHuPd9jEZlPIZ/iNZJwDSao9qRiScySz7MbQg==",
|
||||
"version": "2.8.2",
|
||||
"resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.8.2.tgz",
|
||||
"integrity": "sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"optional": true
|
||||
"peerDependencies": {
|
||||
"bare-abort-controller": "*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bare-abort-controller": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/bare-fs": {
|
||||
"version": "4.6.0",
|
||||
"resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.6.0.tgz",
|
||||
"integrity": "sha512-2YkS7NuiJceSEbyEOdSNLE9tsGd+f4+f7C+Nik/MCk27SYdwIMPT/yRKvg++FZhQXgk0KWJKJyXX9RhVV0RGqA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"bare-events": "^2.5.4",
|
||||
"bare-path": "^3.0.0",
|
||||
"bare-stream": "^2.6.4",
|
||||
"bare-url": "^2.2.2",
|
||||
"fast-fifo": "^1.3.2"
|
||||
},
|
||||
"engines": {
|
||||
"bare": ">=1.16.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"bare-buffer": "*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bare-buffer": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/bare-os": {
|
||||
"version": "3.8.7",
|
||||
"resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.8.7.tgz",
|
||||
"integrity": "sha512-G4Gr1UsGeEy2qtDTZwL7JFLo2wapUarz7iTMcYcMFdS89AIQuBoyjgXZz0Utv7uHs3xA9LckhVbeBi8lEQrC+w==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"bare": ">=1.14.0"
|
||||
}
|
||||
},
|
||||
"node_modules/bare-path": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/bare-path/-/bare-path-3.0.0.tgz",
|
||||
"integrity": "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"bare-os": "^3.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/bare-stream": {
|
||||
"version": "2.12.0",
|
||||
"resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.12.0.tgz",
|
||||
"integrity": "sha512-w28i8lkBgREV3rPXGbgK+BO66q+ZpKqRWrZLiCdmmUlLPrQ45CzkvRhN+7lnv00Gpi2zy5naRxnUFAxCECDm9g==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"streamx": "^2.25.0",
|
||||
"teex": "^1.0.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"bare-abort-controller": "*",
|
||||
"bare-buffer": "*",
|
||||
"bare-events": "*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"bare-abort-controller": {
|
||||
"optional": true
|
||||
},
|
||||
"bare-buffer": {
|
||||
"optional": true
|
||||
},
|
||||
"bare-events": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/bare-url": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/bare-url/-/bare-url-2.4.0.tgz",
|
||||
"integrity": "sha512-NSTU5WN+fy/L0DDenfE8SXQna4voXuW0FHM7wH8i3/q9khUSchfPbPezO4zSFMnDGIf9YE+mt/RWhZgNRKRIXA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"bare-path": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/base64-js": {
|
||||
"version": "1.5.1",
|
||||
|
|
@ -23455,6 +23549,12 @@
|
|||
"chevrotain": "^11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/chevrotain/node_modules/lodash-es": {
|
||||
"version": "4.17.23",
|
||||
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz",
|
||||
"integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/chokidar": {
|
||||
"version": "3.5.3",
|
||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz",
|
||||
|
|
@ -26711,6 +26811,16 @@
|
|||
"node": ">=0.8.x"
|
||||
}
|
||||
},
|
||||
"node_modules/events-universal": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/events-universal/-/events-universal-1.0.1.tgz",
|
||||
"integrity": "sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"bare-events": "^2.7.0"
|
||||
}
|
||||
},
|
||||
"node_modules/eventsource": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.2.tgz",
|
||||
|
|
@ -32016,15 +32126,15 @@
|
|||
}
|
||||
},
|
||||
"node_modules/lodash": {
|
||||
"version": "4.17.23",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
|
||||
"integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
|
||||
"version": "4.18.1",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz",
|
||||
"integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash-es": {
|
||||
"version": "4.17.23",
|
||||
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz",
|
||||
"integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==",
|
||||
"version": "4.18.1",
|
||||
"resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.18.1.tgz",
|
||||
"integrity": "sha512-J8xewKD/Gk22OZbhpOVSwcs60zhd95ESDwezOFuA3/099925PdHJ7OFHNTGtajL3AlZkykD32HykiMo+BIBI8A==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/lodash.camelcase": {
|
||||
|
|
@ -33243,14 +33353,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/mermaid": {
|
||||
"version": "11.13.0",
|
||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.13.0.tgz",
|
||||
"integrity": "sha512-fEnci+Immw6lKMFI8sqzjlATTyjLkRa6axrEgLV2yHTfv8r+h1wjFbV6xeRtd4rUV1cS4EpR9rwp3Rci7TRWDw==",
|
||||
"version": "11.14.0",
|
||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.14.0.tgz",
|
||||
"integrity": "sha512-GSGloRsBs+JINmmhl0JDwjpuezCsHB4WGI4NASHxL3fHo3o/BRXTxhDLKnln8/Q0lRFRyDdEjmk1/d5Sn1Xz8g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@braintree/sanitize-url": "^7.1.1",
|
||||
"@iconify/utils": "^3.0.2",
|
||||
"@mermaid-js/parser": "^1.0.1",
|
||||
"@mermaid-js/parser": "^1.1.0",
|
||||
"@types/d3": "^7.4.3",
|
||||
"@upsetjs/venn.js": "^2.0.0",
|
||||
"cytoscape": "^3.33.1",
|
||||
|
|
@ -34129,42 +34239,123 @@
|
|||
}
|
||||
},
|
||||
"node_modules/mongodb-memory-server": {
|
||||
"version": "10.1.4",
|
||||
"resolved": "https://registry.npmjs.org/mongodb-memory-server/-/mongodb-memory-server-10.1.4.tgz",
|
||||
"integrity": "sha512-+oKQ/kc3CX+816oPFRtaF0CN4vNcGKNjpOQe4bHo/21A3pMD+lC7Xz1EX5HP7siCX4iCpVchDMmCOFXVQSGkUg==",
|
||||
"version": "11.0.1",
|
||||
"resolved": "https://registry.npmjs.org/mongodb-memory-server/-/mongodb-memory-server-11.0.1.tgz",
|
||||
"integrity": "sha512-nUlKovSJZBh7q5hPsewFRam9H66D08Ne18nyknkNalfXMPtK1Og3kOcuqQhcX88x/pghSZPIJHrLbxNFW3OWiw==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"mongodb-memory-server-core": "10.1.4",
|
||||
"tslib": "^2.7.0"
|
||||
"mongodb-memory-server-core": "11.0.1",
|
||||
"tslib": "^2.8.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.20.1"
|
||||
"node": ">=20.19.0"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-memory-server-core": {
|
||||
"version": "10.1.4",
|
||||
"resolved": "https://registry.npmjs.org/mongodb-memory-server-core/-/mongodb-memory-server-core-10.1.4.tgz",
|
||||
"integrity": "sha512-o8fgY7ZalEd8pGps43fFPr/hkQu1L8i6HFEGbsTfA2zDOW0TopgpswaBCqDr0qD7ptibyPfB5DmC+UlIxbThzA==",
|
||||
"version": "11.0.1",
|
||||
"resolved": "https://registry.npmjs.org/mongodb-memory-server-core/-/mongodb-memory-server-core-11.0.1.tgz",
|
||||
"integrity": "sha512-IcIb2S9Xf7Lmz43Z1ZujMqNg7PU5Q7yn+4wOnu7l6pfeGPkEmlqzV1hIbroVx8s4vXhPB1oMGC1u8clW7aj3Xw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"async-mutex": "^0.5.0",
|
||||
"camelcase": "^6.3.0",
|
||||
"debug": "^4.3.7",
|
||||
"debug": "^4.4.3",
|
||||
"find-cache-dir": "^3.3.2",
|
||||
"follow-redirects": "^1.15.9",
|
||||
"https-proxy-agent": "^7.0.5",
|
||||
"mongodb": "^6.9.0",
|
||||
"follow-redirects": "^1.15.11",
|
||||
"https-proxy-agent": "^7.0.6",
|
||||
"mongodb": "^7.0.0",
|
||||
"new-find-package-json": "^2.0.0",
|
||||
"semver": "^7.6.3",
|
||||
"semver": "^7.7.3",
|
||||
"tar-stream": "^3.1.7",
|
||||
"tslib": "^2.7.0",
|
||||
"yauzl": "^3.1.3"
|
||||
"tslib": "^2.8.1",
|
||||
"yauzl": "^3.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.20.1"
|
||||
"node": ">=20.19.0"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-memory-server-core/node_modules/@types/whatwg-url": {
|
||||
"version": "13.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-13.0.0.tgz",
|
||||
"integrity": "sha512-N8WXpbE6Wgri7KUSvrmQcqrMllKZ9uxkYWMt+mCSGwNc0Hsw9VQTW7ApqI4XNrx6/SaM2QQJCzMPDEXE058s+Q==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/webidl-conversions": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-memory-server-core/node_modules/bson": {
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/bson/-/bson-7.2.0.tgz",
|
||||
"integrity": "sha512-YCEo7KjMlbNlyHhz7zAZNDpIpQbd+wOEHJYezv0nMYTn4x31eIUM2yomNNubclAt63dObUzKHWsBLJ9QcZNSnQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": ">=20.19.0"
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-memory-server-core/node_modules/mongodb": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/mongodb/-/mongodb-7.1.1.tgz",
|
||||
"integrity": "sha512-067DXiMjcpYQl6bGjWQoTUEE9UoRViTtKFcoqX7z08I+iDZv/emH1g8XEFiO3qiDfXAheT5ozl1VffDTKhIW/w==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@mongodb-js/saslprep": "^1.3.0",
|
||||
"bson": "^7.1.1",
|
||||
"mongodb-connection-string-url": "^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.19.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@aws-sdk/credential-providers": "^3.806.0",
|
||||
"@mongodb-js/zstd": "^7.0.0",
|
||||
"gcp-metadata": "^7.0.1",
|
||||
"kerberos": "^7.0.0",
|
||||
"mongodb-client-encryption": ">=7.0.0 <7.1.0",
|
||||
"snappy": "^7.3.2",
|
||||
"socks": "^2.8.6"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@aws-sdk/credential-providers": {
|
||||
"optional": true
|
||||
},
|
||||
"@mongodb-js/zstd": {
|
||||
"optional": true
|
||||
},
|
||||
"gcp-metadata": {
|
||||
"optional": true
|
||||
},
|
||||
"kerberos": {
|
||||
"optional": true
|
||||
},
|
||||
"mongodb-client-encryption": {
|
||||
"optional": true
|
||||
},
|
||||
"snappy": {
|
||||
"optional": true
|
||||
},
|
||||
"socks": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/mongodb-memory-server-core/node_modules/mongodb-connection-string-url": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-7.0.1.tgz",
|
||||
"integrity": "sha512-h0AZ9A7IDVwwHyMxmdMXKy+9oNlF0zFoahHiX3vQ8e3KFcSP3VmsmfvtRSuLPxmyv2vjIDxqty8smTgie/SNRQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@types/whatwg-url": "^13.0.0",
|
||||
"whatwg-url": "^14.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.19.0"
|
||||
}
|
||||
},
|
||||
"node_modules/mongoose": {
|
||||
|
|
@ -40366,17 +40557,15 @@
|
|||
}
|
||||
},
|
||||
"node_modules/streamx": {
|
||||
"version": "2.22.1",
|
||||
"resolved": "https://registry.npmjs.org/streamx/-/streamx-2.22.1.tgz",
|
||||
"integrity": "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==",
|
||||
"version": "2.25.0",
|
||||
"resolved": "https://registry.npmjs.org/streamx/-/streamx-2.25.0.tgz",
|
||||
"integrity": "sha512-0nQuG6jf1w+wddNEEXCF4nTg3LtufWINB5eFEN+5TNZW7KWJp6x87+JFL43vaAUPyCfH1wID+mNVyW6OHtFamg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"events-universal": "^1.0.0",
|
||||
"fast-fifo": "^1.3.2",
|
||||
"text-decoder": "^1.1.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"bare-events": "^2.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/strict-event-emitter": {
|
||||
|
|
@ -41142,17 +41331,43 @@
|
|||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tar-stream": {
|
||||
"version": "3.1.7",
|
||||
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz",
|
||||
"integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==",
|
||||
"version": "3.1.8",
|
||||
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.8.tgz",
|
||||
"integrity": "sha512-U6QpVRyCGHva435KoNWy9PRoi2IFYCgtEhq9nmrPPpbRacPs9IH4aJ3gbrFC8dPcXvdSZ4XXfXT5Fshbp2MtlQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"b4a": "^1.6.4",
|
||||
"bare-fs": "^4.5.5",
|
||||
"fast-fifo": "^1.2.0",
|
||||
"streamx": "^2.15.0"
|
||||
}
|
||||
},
|
||||
"node_modules/tar-stream/node_modules/b4a": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/b4a/-/b4a-1.8.0.tgz",
|
||||
"integrity": "sha512-qRuSmNSkGQaHwNbM7J78Wwy+ghLEYF1zNrSeMxj4Kgw6y33O3mXcQ6Ie9fRvfU/YnxWkOchPXbaLb73TkIsfdg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"peerDependencies": {
|
||||
"react-native-b4a": "*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"react-native-b4a": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/teex": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/teex/-/teex-1.0.1.tgz",
|
||||
"integrity": "sha512-eYE6iEI62Ni1H8oIa7KlDU6uQBtqr4Eajni3wX7rpfXD8ysFx8z0+dri+KWEPWpBsxXfxu58x/0jvTVT1ekOSg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"streamx": "^2.12.5"
|
||||
}
|
||||
},
|
||||
"node_modules/temp-dir": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-2.0.0.tgz",
|
||||
|
|
@ -41254,15 +41469,30 @@
|
|||
}
|
||||
},
|
||||
"node_modules/text-decoder": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz",
|
||||
"integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==",
|
||||
"version": "1.2.7",
|
||||
"resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.7.tgz",
|
||||
"integrity": "sha512-vlLytXkeP4xvEq2otHeJfSQIRyWxo/oZGEbXrtEEF9Hnmrdly59sUbzZ/QgyWuLYHctCHxFF4tRQZNQ9k60ExQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"b4a": "^1.6.4"
|
||||
}
|
||||
},
|
||||
"node_modules/text-decoder/node_modules/b4a": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/b4a/-/b4a-1.8.0.tgz",
|
||||
"integrity": "sha512-qRuSmNSkGQaHwNbM7J78Wwy+ghLEYF1zNrSeMxj4Kgw6y33O3mXcQ6Ie9fRvfU/YnxWkOchPXbaLb73TkIsfdg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"peerDependencies": {
|
||||
"react-native-b4a": "*"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"react-native-b4a": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/text-hex": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
|
||||
|
|
@ -45892,7 +46122,7 @@
|
|||
},
|
||||
"packages/data-provider": {
|
||||
"name": "librechat-data-provider",
|
||||
"version": "0.8.406",
|
||||
"version": "0.8.407",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "1.13.6",
|
||||
|
|
@ -45988,7 +46218,7 @@
|
|||
"@types/node": "^20.3.0",
|
||||
"jest": "^30.2.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"mongodb-memory-server": "^10.1.4",
|
||||
"mongodb-memory-server": "^11.0.1",
|
||||
"rimraf": "^6.1.3",
|
||||
"rollup": "^4.34.9",
|
||||
"rollup-plugin-peer-deps-external": "^2.2.4",
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ function createHandlers(overrides = {}) {
|
|||
toggleConfigActive: jest.fn().mockResolvedValue({ _id: 'c1', isActive: false }),
|
||||
hasConfigCapability: jest.fn().mockResolvedValue(true),
|
||||
|
||||
getAppConfig: jest.fn().mockResolvedValue({ interface: { endpointsMenu: true } }),
|
||||
getAppConfig: jest.fn().mockResolvedValue({ interface: { modelSelect: true } }),
|
||||
...overrides,
|
||||
};
|
||||
const handlers = createAdminConfigHandlers(deps);
|
||||
|
|
@ -133,7 +133,7 @@ describe('createAdminConfigHandlers', () => {
|
|||
});
|
||||
const req = mockReq({
|
||||
params: { principalType: 'role', principalId: 'admin' },
|
||||
body: { overrides: { interface: { endpointsMenu: false } } },
|
||||
body: { overrides: { interface: { modelSelect: false } } },
|
||||
});
|
||||
const res = mockRes();
|
||||
|
||||
|
|
@ -148,7 +148,7 @@ describe('createAdminConfigHandlers', () => {
|
|||
});
|
||||
const req = mockReq({
|
||||
params: { principalType: 'role', principalId: 'admin' },
|
||||
body: { overrides: { interface: { endpointsMenu: false } } },
|
||||
body: { overrides: { interface: { modelSelect: false } } },
|
||||
});
|
||||
const res = mockRes();
|
||||
|
||||
|
|
@ -178,7 +178,7 @@ describe('createAdminConfigHandlers', () => {
|
|||
params: { principalType: 'role', principalId: 'admin' },
|
||||
body: {
|
||||
overrides: {
|
||||
interface: { endpointsMenu: false, prompts: false, agents: { use: false } },
|
||||
interface: { modelSelect: false, prompts: false, agents: { use: false } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
|
@ -188,7 +188,7 @@ describe('createAdminConfigHandlers', () => {
|
|||
|
||||
expect(res.statusCode).toBe(201);
|
||||
const savedOverrides = deps.upsertConfig.mock.calls[0][3];
|
||||
expect(savedOverrides.interface).toEqual({ endpointsMenu: false });
|
||||
expect(savedOverrides.interface).toEqual({ modelSelect: false });
|
||||
});
|
||||
|
||||
it('preserves UI sub-keys in composite permission fields like mcpServers', async () => {
|
||||
|
|
@ -263,17 +263,13 @@ describe('createAdminConfigHandlers', () => {
|
|||
const { handlers, deps } = createHandlers();
|
||||
const req = mockReq({
|
||||
params: { principalType: 'role', principalId: 'admin' },
|
||||
query: { fieldPath: 'interface.endpointsMenu' },
|
||||
query: { fieldPath: 'interface.modelSelect' },
|
||||
});
|
||||
const res = mockRes();
|
||||
|
||||
await handlers.deleteConfigField(req, res);
|
||||
|
||||
expect(deps.unsetConfigField).toHaveBeenCalledWith(
|
||||
'role',
|
||||
'admin',
|
||||
'interface.endpointsMenu',
|
||||
);
|
||||
expect(deps.unsetConfigField).toHaveBeenCalledWith('role', 'admin', 'interface.modelSelect');
|
||||
});
|
||||
|
||||
it('allows deleting mcpServers UI sub-key paths', async () => {
|
||||
|
|
@ -343,18 +339,14 @@ describe('createAdminConfigHandlers', () => {
|
|||
const { handlers, deps } = createHandlers();
|
||||
const req = mockReq({
|
||||
params: { principalType: 'role', principalId: 'admin' },
|
||||
query: { fieldPath: 'interface.endpointsMenu' },
|
||||
query: { fieldPath: 'interface.modelSelect' },
|
||||
});
|
||||
const res = mockRes();
|
||||
|
||||
await handlers.deleteConfigField(req, res);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(deps.unsetConfigField).toHaveBeenCalledWith(
|
||||
'role',
|
||||
'admin',
|
||||
'interface.endpointsMenu',
|
||||
);
|
||||
expect(deps.unsetConfigField).toHaveBeenCalledWith('role', 'admin', 'interface.modelSelect');
|
||||
});
|
||||
|
||||
it('returns 400 when fieldPath query param is missing', async () => {
|
||||
|
|
@ -407,7 +399,7 @@ describe('createAdminConfigHandlers', () => {
|
|||
params: { principalType: 'role', principalId: 'admin' },
|
||||
body: {
|
||||
entries: [
|
||||
{ fieldPath: 'interface.endpointsMenu', value: false },
|
||||
{ fieldPath: 'interface.modelSelect', value: false },
|
||||
{ fieldPath: 'interface.prompts', value: false },
|
||||
],
|
||||
},
|
||||
|
|
@ -418,7 +410,7 @@ describe('createAdminConfigHandlers', () => {
|
|||
|
||||
expect(res.statusCode).toBe(200);
|
||||
const patchedFields = deps.patchConfigFields.mock.calls[0][3];
|
||||
expect(patchedFields['interface.endpointsMenu']).toBe(false);
|
||||
expect(patchedFields['interface.modelSelect']).toBe(false);
|
||||
expect(patchedFields['interface.prompts']).toBeUndefined();
|
||||
});
|
||||
|
||||
|
|
@ -632,21 +624,21 @@ describe('createAdminConfigHandlers', () => {
|
|||
name: 'upsertConfigOverrides',
|
||||
reqOverrides: {
|
||||
params: { principalType: 'role', principalId: 'admin' },
|
||||
body: { overrides: { interface: { endpointsMenu: false } } },
|
||||
body: { overrides: { interface: { modelSelect: false } } },
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'patchConfigField',
|
||||
reqOverrides: {
|
||||
params: { principalType: 'role', principalId: 'admin' },
|
||||
body: { entries: [{ fieldPath: 'interface.endpointsMenu', value: false }] },
|
||||
body: { entries: [{ fieldPath: 'interface.modelSelect', value: false }] },
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'deleteConfigField',
|
||||
reqOverrides: {
|
||||
params: { principalType: 'role', principalId: 'admin' },
|
||||
query: { fieldPath: 'interface.endpointsMenu' },
|
||||
query: { fieldPath: 'interface.modelSelect' },
|
||||
},
|
||||
},
|
||||
{
|
||||
|
|
@ -775,7 +767,7 @@ describe('createAdminConfigHandlers', () => {
|
|||
await handlers.getBaseConfig(req, res);
|
||||
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(res.body!.config).toEqual({ interface: { endpointsMenu: true } });
|
||||
expect(res.body!.config).toEqual({ interface: { modelSelect: true } });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { isValidFieldPath, getTopLevelSection } from './config';
|
|||
|
||||
describe('isValidFieldPath', () => {
|
||||
it('accepts simple dot paths', () => {
|
||||
expect(isValidFieldPath('interface.endpointsMenu')).toBe(true);
|
||||
expect(isValidFieldPath('interface.modelSelect')).toBe(true);
|
||||
expect(isValidFieldPath('registration.socialLogins')).toBe(true);
|
||||
expect(isValidFieldPath('a')).toBe(true);
|
||||
expect(isValidFieldPath('a.b.c.d')).toBe(true);
|
||||
|
|
@ -47,7 +47,7 @@ describe('isValidFieldPath', () => {
|
|||
|
||||
describe('getTopLevelSection', () => {
|
||||
it('returns first segment of a dot path', () => {
|
||||
expect(getTopLevelSection('interface.endpointsMenu')).toBe('interface');
|
||||
expect(getTopLevelSection('interface.modelSelect')).toBe('interface');
|
||||
expect(getTopLevelSection('registration.socialLogins.github')).toBe('registration');
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ export function getTopLevelSection(fieldPath: string): string {
|
|||
* - `"interface.mcpServers.use"` → true (permission sub-key)
|
||||
* - `"interface.mcpServers.placeholder"` → false (UI-only sub-key)
|
||||
* - `"interface.peoplePicker.users"` → true (all peoplePicker sub-keys are permissions)
|
||||
* - `"interface.endpointsMenu"` → false (UI-only field)
|
||||
* - `"interface.modelSelect"` → false (UI-only field)
|
||||
*/
|
||||
function isInterfacePermissionPath(fieldPath: string): boolean {
|
||||
const parts = fieldPath.split('.');
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ describe('AppService', () => {
|
|||
it('should correctly assign process.env and initialize app config based on custom config', async () => {
|
||||
const config: Partial<TCustomConfig> = {
|
||||
registration: { socialLogins: ['testLogin'] },
|
||||
fileStrategy: 'testStrategy' as FileSources,
|
||||
fileStrategy: FileSources.s3,
|
||||
balance: {
|
||||
enabled: true,
|
||||
},
|
||||
|
|
@ -93,22 +93,20 @@ describe('AppService', () => {
|
|||
|
||||
const result = await AppService({ config, systemTools: mockSystemTools });
|
||||
|
||||
expect(process.env.CDN_PROVIDER).toEqual('testStrategy');
|
||||
expect(process.env.CDN_PROVIDER).toEqual('s3');
|
||||
|
||||
expect(result).toEqual(
|
||||
expect.objectContaining({
|
||||
config: expect.objectContaining({
|
||||
fileStrategy: 'testStrategy',
|
||||
fileStrategy: 's3',
|
||||
}),
|
||||
registration: expect.objectContaining({
|
||||
socialLogins: ['testLogin'],
|
||||
}),
|
||||
fileStrategy: 'testStrategy',
|
||||
fileStrategy: 's3',
|
||||
interfaceConfig: expect.objectContaining({
|
||||
endpointsMenu: true,
|
||||
modelSelect: true,
|
||||
parameters: true,
|
||||
sidePanel: true,
|
||||
presets: true,
|
||||
}),
|
||||
mcpConfig: null,
|
||||
|
|
|
|||
|
|
@ -192,16 +192,13 @@ export function checkInterfaceConfig(appConfig: AppConfig) {
|
|||
if (i === 0) i++;
|
||||
}
|
||||
|
||||
// warn about config.modelSpecs.enforce if true and if any of these, endpointsMenu, modelSelect, presets, or parameters are enabled, that enforcing model specs can conflict with these options.
|
||||
// warn about config.modelSpecs.enforce if true and if any of these, modelSelect, presets, or parameters are enabled, that enforcing model specs can conflict with these options.
|
||||
if (
|
||||
appConfig?.modelSpecs?.enforce &&
|
||||
(interfaceConfig?.endpointsMenu ||
|
||||
interfaceConfig?.modelSelect ||
|
||||
interfaceConfig?.presets ||
|
||||
interfaceConfig?.parameters)
|
||||
(interfaceConfig?.modelSelect || interfaceConfig?.presets || interfaceConfig?.parameters)
|
||||
) {
|
||||
logger.warn(
|
||||
"Note: Enforcing model specs can conflict with the interface options: endpointsMenu, modelSelect, presets, and parameters. It's recommended to disable these options from the interface or disable enforcing model specs.",
|
||||
"Note: Enforcing model specs can conflict with the interface options: modelSelect, presets, and parameters. It's recommended to disable these options from the interface or disable enforcing model specs.",
|
||||
);
|
||||
if (i === 0) i++;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ const createTestAppConfig = (overrides: Partial<AppConfig> = {}): AppConfig => {
|
|||
version: '1.0.0',
|
||||
cache: true,
|
||||
interface: {
|
||||
endpointsMenu: true,
|
||||
modelSelect: true,
|
||||
},
|
||||
registration: {
|
||||
socialLogins: [],
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ function createMockCache(namespace = 'app_config') {
|
|||
|
||||
function createDeps(overrides = {}) {
|
||||
const cache = createMockCache();
|
||||
const baseConfig = { interfaceConfig: { endpointsMenu: true }, endpoints: ['openAI'] };
|
||||
const baseConfig = { interfaceConfig: { modelSelect: true }, endpoints: ['openAI'] };
|
||||
|
||||
return {
|
||||
loadBaseConfig: jest.fn().mockResolvedValue(baseConfig),
|
||||
|
|
@ -79,7 +79,7 @@ describe('createAppConfigService', () => {
|
|||
getApplicableConfigs: jest
|
||||
.fn()
|
||||
.mockResolvedValue([
|
||||
{ priority: 10, overrides: { interface: { endpointsMenu: false } }, isActive: true },
|
||||
{ priority: 10, overrides: { interface: { modelSelect: false } }, isActive: true },
|
||||
]),
|
||||
});
|
||||
const { getAppConfig } = createAppConfigService(deps);
|
||||
|
|
@ -125,7 +125,7 @@ describe('createAppConfigService', () => {
|
|||
getApplicableConfigs: jest
|
||||
.fn()
|
||||
.mockResolvedValue([
|
||||
{ priority: 10, overrides: { interface: { endpointsMenu: false } }, isActive: true },
|
||||
{ priority: 10, overrides: { interface: { modelSelect: false } }, isActive: true },
|
||||
]),
|
||||
});
|
||||
const { getAppConfig } = createAppConfigService(deps);
|
||||
|
|
@ -133,7 +133,7 @@ describe('createAppConfigService', () => {
|
|||
const config = await getAppConfig({ role: 'ADMIN' });
|
||||
|
||||
const merged = config as TestConfig;
|
||||
expect(merged.interfaceConfig?.endpointsMenu).toBe(false);
|
||||
expect(merged.interfaceConfig?.modelSelect).toBe(false);
|
||||
expect(merged.endpoints).toEqual(['openAI']);
|
||||
});
|
||||
|
||||
|
|
|
|||
211
packages/api/src/auth/adminPkce.spec.ts
Normal file
211
packages/api/src/auth/adminPkce.spec.ts
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
import { Keyv } from 'keyv';
|
||||
|
||||
jest.mock(
|
||||
'@librechat/data-schemas',
|
||||
() => ({
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}),
|
||||
{ virtual: true },
|
||||
);
|
||||
|
||||
import { stripCodeChallenge, storeAndStripChallenge } from './exchange';
|
||||
import type { PkceStrippableRequest } from './exchange';
|
||||
|
||||
function makeReq(overrides: Partial<PkceStrippableRequest> = {}): PkceStrippableRequest {
|
||||
return { query: {}, originalUrl: '', url: '', ...overrides };
|
||||
}
|
||||
|
||||
describe('stripCodeChallenge', () => {
|
||||
const challenge = 'a'.repeat(64);
|
||||
|
||||
it('removes code_challenge from req.query and both URL strings (sole param)', () => {
|
||||
const req = makeReq({
|
||||
query: { code_challenge: challenge },
|
||||
originalUrl: `/api/admin/oauth/openid?code_challenge=${challenge}`,
|
||||
url: `/oauth/openid?code_challenge=${challenge}`,
|
||||
});
|
||||
|
||||
stripCodeChallenge(req);
|
||||
|
||||
expect(req.query.code_challenge).toBeUndefined();
|
||||
expect(req.originalUrl).toBe('/api/admin/oauth/openid');
|
||||
expect(req.url).toBe('/oauth/openid');
|
||||
});
|
||||
|
||||
it('preserves other params when code_challenge is last', () => {
|
||||
const req = makeReq({
|
||||
query: { foo: 'bar', code_challenge: challenge },
|
||||
originalUrl: `/oauth/openid?foo=bar&code_challenge=${challenge}`,
|
||||
url: `/oauth/openid?foo=bar&code_challenge=${challenge}`,
|
||||
});
|
||||
|
||||
stripCodeChallenge(req);
|
||||
|
||||
expect(req.query.code_challenge).toBeUndefined();
|
||||
expect(req.query.foo).toBe('bar');
|
||||
expect(req.originalUrl).toBe('/oauth/openid?foo=bar');
|
||||
expect(req.url).toBe('/oauth/openid?foo=bar');
|
||||
});
|
||||
|
||||
it('preserves other params when code_challenge is first of multiple', () => {
|
||||
const req = makeReq({
|
||||
query: { code_challenge: challenge, foo: 'bar' },
|
||||
originalUrl: `/oauth/openid?code_challenge=${challenge}&foo=bar`,
|
||||
url: `/oauth/openid?code_challenge=${challenge}&foo=bar`,
|
||||
});
|
||||
|
||||
stripCodeChallenge(req);
|
||||
|
||||
expect(req.query.code_challenge).toBeUndefined();
|
||||
expect(req.originalUrl).toBe('/oauth/openid?foo=bar');
|
||||
expect(req.url).toBe('/oauth/openid?foo=bar');
|
||||
});
|
||||
|
||||
it('preserves other params when code_challenge is in the middle', () => {
|
||||
const req = makeReq({
|
||||
query: { a: '1', code_challenge: challenge, b: '2' },
|
||||
originalUrl: `/oauth/openid?a=1&code_challenge=${challenge}&b=2`,
|
||||
url: `/oauth/openid?a=1&code_challenge=${challenge}&b=2`,
|
||||
});
|
||||
|
||||
stripCodeChallenge(req);
|
||||
|
||||
expect(req.query.code_challenge).toBeUndefined();
|
||||
expect(req.originalUrl).toBe('/oauth/openid?a=1&b=2');
|
||||
expect(req.url).toBe('/oauth/openid?a=1&b=2');
|
||||
});
|
||||
|
||||
it('handles empty code_challenge= value', () => {
|
||||
const req = makeReq({
|
||||
query: { code_challenge: '' },
|
||||
originalUrl: '/oauth/openid?code_challenge=',
|
||||
url: '/oauth/openid?code_challenge=',
|
||||
});
|
||||
|
||||
stripCodeChallenge(req);
|
||||
|
||||
expect(req.query.code_challenge).toBeUndefined();
|
||||
expect(req.originalUrl).toBe('/oauth/openid');
|
||||
expect(req.url).toBe('/oauth/openid');
|
||||
});
|
||||
|
||||
it('is a no-op when no code_challenge is present', () => {
|
||||
const req = makeReq({
|
||||
query: { foo: 'bar' },
|
||||
originalUrl: '/oauth/openid?foo=bar',
|
||||
url: '/oauth/openid?foo=bar',
|
||||
});
|
||||
|
||||
stripCodeChallenge(req);
|
||||
|
||||
expect(req.query.foo).toBe('bar');
|
||||
expect(req.originalUrl).toBe('/oauth/openid?foo=bar');
|
||||
expect(req.url).toBe('/oauth/openid?foo=bar');
|
||||
});
|
||||
|
||||
it('is a no-op on a bare path with no query string', () => {
|
||||
const req = makeReq({
|
||||
query: {},
|
||||
originalUrl: '/oauth/openid',
|
||||
url: '/oauth/openid',
|
||||
});
|
||||
|
||||
stripCodeChallenge(req);
|
||||
|
||||
expect(req.originalUrl).toBe('/oauth/openid');
|
||||
expect(req.url).toBe('/oauth/openid');
|
||||
});
|
||||
});
|
||||
|
||||
describe('storeAndStripChallenge', () => {
|
||||
const challenge = 'a'.repeat(64);
|
||||
|
||||
it('stores valid challenge in cache and strips from request', async () => {
|
||||
const cache = new Keyv();
|
||||
const setSpy = jest.spyOn(cache, 'set');
|
||||
const req = makeReq({
|
||||
query: { code_challenge: challenge },
|
||||
originalUrl: `/oauth/openid?code_challenge=${challenge}`,
|
||||
url: `/oauth/openid?code_challenge=${challenge}`,
|
||||
});
|
||||
|
||||
const result = await storeAndStripChallenge(cache, req, 'test-state', 'openid');
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(setSpy).toHaveBeenCalledWith(`pkce:test-state`, challenge, expect.any(Number));
|
||||
expect(req.query.code_challenge).toBeUndefined();
|
||||
expect(req.originalUrl).toBe('/oauth/openid');
|
||||
expect(req.url).toBe('/oauth/openid');
|
||||
});
|
||||
|
||||
it('strips and returns true when no code_challenge is present', async () => {
|
||||
const cache = new Keyv();
|
||||
const setSpy = jest.spyOn(cache, 'set');
|
||||
const req = makeReq({
|
||||
query: {},
|
||||
originalUrl: '/oauth/openid',
|
||||
url: '/oauth/openid',
|
||||
});
|
||||
|
||||
const result = await storeAndStripChallenge(cache, req, 'test-state', 'openid');
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(setSpy).not.toHaveBeenCalled();
|
||||
expect(req.originalUrl).toBe('/oauth/openid');
|
||||
expect(req.url).toBe('/oauth/openid');
|
||||
});
|
||||
|
||||
it('strips and returns true when code_challenge is invalid (not 64 hex)', async () => {
|
||||
const cache = new Keyv();
|
||||
const setSpy = jest.spyOn(cache, 'set');
|
||||
const req = makeReq({
|
||||
query: { code_challenge: 'too-short' },
|
||||
originalUrl: '/oauth/openid?code_challenge=too-short',
|
||||
url: '/oauth/openid?code_challenge=too-short',
|
||||
});
|
||||
|
||||
const result = await storeAndStripChallenge(cache, req, 'test-state', 'openid');
|
||||
|
||||
expect(result).toBe(true);
|
||||
expect(setSpy).not.toHaveBeenCalled();
|
||||
expect(req.query.code_challenge).toBeUndefined();
|
||||
expect(req.originalUrl).toBe('/oauth/openid');
|
||||
expect(req.url).toBe('/oauth/openid');
|
||||
});
|
||||
|
||||
it('returns false and does not strip on cache failure', async () => {
|
||||
const cache = new Keyv();
|
||||
jest.spyOn(cache, 'set').mockRejectedValueOnce(new Error('cache down'));
|
||||
const req = makeReq({
|
||||
query: { code_challenge: challenge },
|
||||
originalUrl: `/oauth/openid?code_challenge=${challenge}`,
|
||||
url: `/oauth/openid?code_challenge=${challenge}`,
|
||||
});
|
||||
|
||||
const result = await storeAndStripChallenge(cache, req, 'test-state', 'openid');
|
||||
|
||||
expect(result).toBe(false);
|
||||
expect(req.query.code_challenge).toBe(challenge);
|
||||
expect(req.originalUrl).toBe(`/oauth/openid?code_challenge=${challenge}`);
|
||||
expect(req.url).toBe(`/oauth/openid?code_challenge=${challenge}`);
|
||||
});
|
||||
|
||||
it('reads code_challenge before stripping (ordering guarantee)', async () => {
|
||||
const cache = new Keyv();
|
||||
const setSpy = jest.spyOn(cache, 'set');
|
||||
const req = makeReq({
|
||||
query: { code_challenge: challenge },
|
||||
originalUrl: `/oauth/openid?code_challenge=${challenge}`,
|
||||
url: `/oauth/openid?code_challenge=${challenge}`,
|
||||
});
|
||||
|
||||
await storeAndStripChallenge(cache, req, 'test-state', 'openid');
|
||||
|
||||
const storedValue = setSpy.mock.calls[0][1];
|
||||
expect(storedValue).toBe(challenge);
|
||||
});
|
||||
});
|
||||
|
|
@ -168,6 +168,73 @@ export async function exchangeAdminCode(
|
|||
};
|
||||
}
|
||||
|
||||
/** PKCE challenge cache TTL: 5 minutes (enough for user to authenticate with IdP) */
|
||||
export const PKCE_CHALLENGE_TTL = 5 * 60 * 1000;
|
||||
/** Regex pattern for valid PKCE challenges: 64 hex characters (SHA-256 hex digest) */
|
||||
export const PKCE_CHALLENGE_PATTERN = /^[a-f0-9]{64}$/;
|
||||
|
||||
/** Removes `code_challenge` from a single URL string, preserving other query params. */
|
||||
const stripChallengeFromUrl = (url: string): string =>
|
||||
url.replace(/\?code_challenge=[^&]*&/, '?').replace(/[?&]code_challenge=[^&]*/, '');
|
||||
|
||||
/** Minimal request shape needed by {@link stripCodeChallenge}. */
|
||||
export interface PkceStrippableRequest {
|
||||
query: Record<string, unknown>;
|
||||
originalUrl: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips `code_challenge` from the request query and URL strings.
|
||||
*
|
||||
* openid-client v6's Passport Strategy uses `currentUrl.searchParams.size === 0`
|
||||
* to distinguish an initial authorization request from an OAuth callback.
|
||||
* The admin-panel-specific `code_challenge` query parameter would cause the
|
||||
* strategy to misclassify the request as a callback and return 401.
|
||||
*
|
||||
* Applied defensively to all providers to ensure the admin-panel-private
|
||||
* `code_challenge` parameter never reaches any Passport strategy.
|
||||
*/
|
||||
export function stripCodeChallenge(req: PkceStrippableRequest): void {
|
||||
delete req.query.code_challenge;
|
||||
req.originalUrl = stripChallengeFromUrl(req.originalUrl);
|
||||
req.url = stripChallengeFromUrl(req.url);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the admin-panel PKCE challenge in cache, then strips `code_challenge`
|
||||
* from the request so it doesn't interfere with the Passport strategy.
|
||||
*
|
||||
* Must be called before `passport.authenticate()` — the two operations are
|
||||
* logically atomic: read the challenge from the query, persist it, then remove
|
||||
* the parameter from the request URL.
|
||||
* @param cache - The Keyv cache instance for storing PKCE challenges.
|
||||
* @param req - The Express request to read and mutate.
|
||||
* @param state - The OAuth state value (cache key).
|
||||
* @param provider - Provider name for logging.
|
||||
* @returns True if stored (or no challenge provided); false on cache failure.
|
||||
*/
|
||||
export async function storeAndStripChallenge(
|
||||
cache: Keyv,
|
||||
req: PkceStrippableRequest,
|
||||
state: string,
|
||||
provider: string,
|
||||
): Promise<boolean> {
|
||||
const { code_challenge: codeChallenge } = req.query;
|
||||
if (typeof codeChallenge !== 'string' || !PKCE_CHALLENGE_PATTERN.test(codeChallenge)) {
|
||||
stripCodeChallenge(req);
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
await cache.set(`pkce:${state}`, codeChallenge, PKCE_CHALLENGE_TTL);
|
||||
stripCodeChallenge(req);
|
||||
return true;
|
||||
} catch (err) {
|
||||
logger.error(`[admin/oauth/${provider}] Failed to store PKCE challenge:`, err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the redirect URI is for the admin panel (cross-origin).
|
||||
* Uses proper URL parsing to compare origins, handling edge cases where
|
||||
|
|
|
|||
|
|
@ -351,10 +351,13 @@ export class MCPConnectionFactory {
|
|||
config?.oauth_headers ?? {},
|
||||
config?.oauth,
|
||||
this.allowedDomains,
|
||||
// Only reuse stored client when deleteTokens is available for stale-client cleanup
|
||||
this.tokenMethods?.deleteTokens ? this.tokenMethods.findToken : undefined,
|
||||
);
|
||||
|
||||
if (existingFlow) {
|
||||
const oldState = (existingFlow.metadata as MCPOAuthFlowMetadata)?.state;
|
||||
const oldMeta = existingFlow.metadata as MCPOAuthFlowMetadata | undefined;
|
||||
const oldState = oldMeta?.state;
|
||||
await this.flowManager!.deleteFlow(newFlowId, 'mcp_oauth');
|
||||
if (oldState) {
|
||||
await MCPOAuthHandler.deleteStateMapping(oldState, this.flowManager!);
|
||||
|
|
@ -368,9 +371,12 @@ export class MCPConnectionFactory {
|
|||
|
||||
// Start monitoring in background — createFlow will find the existing PENDING state
|
||||
// written by initFlow above, so metadata arg is unused (pass {} to make that explicit)
|
||||
this.flowManager!.createFlow(newFlowId, 'mcp_oauth', {}, this.signal).catch((error) => {
|
||||
logger.debug(`${this.logPrefix} OAuth flow monitor ended`, error);
|
||||
});
|
||||
this.flowManager!.createFlow(newFlowId, 'mcp_oauth', {}, this.signal).catch(
|
||||
async (error) => {
|
||||
logger.debug(`${this.logPrefix} OAuth flow monitor ended`, error);
|
||||
await this.clearStaleClientIfRejected(flowMetadata.reusedStoredClient, error);
|
||||
},
|
||||
);
|
||||
|
||||
if (this.oauthStart) {
|
||||
logger.info(`${this.logPrefix} OAuth flow started, issuing authorization URL`);
|
||||
|
|
@ -412,7 +418,7 @@ export class MCPConnectionFactory {
|
|||
if (result?.tokens) {
|
||||
connection.emit('oauthHandled');
|
||||
} else {
|
||||
// OAuth failed, emit oauthFailed to properly reject the promise
|
||||
await this.clearStaleClientIfRejected(result?.reusedStoredClient, result?.error);
|
||||
logger.warn(`${this.logPrefix} OAuth failed, emitting oauthFailed event`);
|
||||
connection.emit('oauthFailed', new Error('OAuth authentication failed'));
|
||||
}
|
||||
|
|
@ -466,6 +472,49 @@ export class MCPConnectionFactory {
|
|||
}
|
||||
}
|
||||
|
||||
/** Clears stored client registration if the error indicates client rejection */
|
||||
private async clearStaleClientIfRejected(
|
||||
reusedStoredClient: boolean | undefined,
|
||||
error: unknown,
|
||||
): Promise<void> {
|
||||
if (!reusedStoredClient || !this.tokenMethods?.deleteTokens) {
|
||||
return;
|
||||
}
|
||||
if (!MCPConnectionFactory.isClientRejection(error)) {
|
||||
return;
|
||||
}
|
||||
await MCPTokenStorage.deleteClientRegistration({
|
||||
userId: this.userId!,
|
||||
serverName: this.serverName,
|
||||
deleteTokens: this.tokenMethods.deleteTokens,
|
||||
}).catch((err) => {
|
||||
logger.warn(`${this.logPrefix} Failed to clear stale client registration`, err);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether an error indicates the OAuth client registration was rejected.
|
||||
* Includes RFC 6749 §5.2 standard codes (`invalid_client`, `unauthorized_client`)
|
||||
* and known vendor-specific patterns (Okta: `client_id mismatch`, Auth0: `client not found`,
|
||||
* generic: `unknown client`).
|
||||
*/
|
||||
static isClientRejection(error: unknown): boolean {
|
||||
if (!error || typeof error !== 'object') {
|
||||
return false;
|
||||
}
|
||||
if ('message' in error && typeof error.message === 'string') {
|
||||
const msg = error.message.toLowerCase();
|
||||
return (
|
||||
msg.includes('invalid_client') ||
|
||||
msg.includes('unauthorized_client') ||
|
||||
msg.includes('client_id mismatch') ||
|
||||
msg.includes('client not found') ||
|
||||
msg.includes('unknown client')
|
||||
);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Determines if an error indicates OAuth authentication is required
|
||||
private isOAuthError(error: unknown): boolean {
|
||||
if (!error || typeof error !== 'object') {
|
||||
|
|
@ -505,6 +554,8 @@ export class MCPConnectionFactory {
|
|||
tokens: MCPOAuthTokens | null;
|
||||
clientInfo?: OAuthClientInformation;
|
||||
metadata?: OAuthMetadata;
|
||||
reusedStoredClient?: boolean;
|
||||
error?: unknown;
|
||||
} | null> {
|
||||
const serverUrl = (this.serverConfig as t.SSEOptions | t.StreamableHTTPOptions).url;
|
||||
logger.debug(
|
||||
|
|
@ -519,6 +570,8 @@ export class MCPConnectionFactory {
|
|||
return null;
|
||||
}
|
||||
|
||||
let reusedStoredClient = false;
|
||||
|
||||
try {
|
||||
logger.debug(`${this.logPrefix} Checking for existing OAuth flow for ${this.serverName}...`);
|
||||
|
||||
|
|
@ -549,6 +602,7 @@ export class MCPConnectionFactory {
|
|||
await this.oauthStart(storedAuthUrl);
|
||||
}
|
||||
|
||||
reusedStoredClient = flowMeta?.reusedStoredClient === true;
|
||||
const tokens = await this.flowManager.createFlow(flowId, 'mcp_oauth', {}, this.signal);
|
||||
if (typeof this.oauthEnd === 'function') {
|
||||
await this.oauthEnd();
|
||||
|
|
@ -560,6 +614,7 @@ export class MCPConnectionFactory {
|
|||
tokens,
|
||||
clientInfo: flowMeta?.clientInfo,
|
||||
metadata: flowMeta?.metadata,
|
||||
reusedStoredClient,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -615,8 +670,11 @@ export class MCPConnectionFactory {
|
|||
this.serverConfig.oauth_headers ?? {},
|
||||
this.serverConfig.oauth,
|
||||
this.allowedDomains,
|
||||
this.tokenMethods?.deleteTokens ? this.tokenMethods.findToken : undefined,
|
||||
);
|
||||
|
||||
reusedStoredClient = flowMetadata.reusedStoredClient === true;
|
||||
|
||||
// Store flow state BEFORE redirecting so the callback can find it
|
||||
const metadataWithUrl = { ...flowMetadata, authorizationUrl };
|
||||
await this.flowManager.initFlow(newFlowId, 'mcp_oauth', metadataWithUrl);
|
||||
|
|
@ -639,18 +697,15 @@ export class MCPConnectionFactory {
|
|||
}
|
||||
logger.info(`${this.logPrefix} OAuth flow completed, tokens received for ${this.serverName}`);
|
||||
|
||||
/** Client information from the flow metadata */
|
||||
const clientInfo = flowMetadata?.clientInfo;
|
||||
const metadata = flowMetadata?.metadata;
|
||||
|
||||
return {
|
||||
tokens,
|
||||
clientInfo,
|
||||
metadata,
|
||||
clientInfo: flowMetadata.clientInfo,
|
||||
metadata: flowMetadata.metadata,
|
||||
reusedStoredClient,
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`${this.logPrefix} Failed to complete OAuth flow for ${this.serverName}`, error);
|
||||
return null;
|
||||
return { tokens: null, reusedStoredClient, error };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import type { MCPOAuthTokens } from '~/mcp/oauth';
|
|||
import type * as t from '~/mcp/types';
|
||||
import { MCPConnectionFactory } from '~/mcp/MCPConnectionFactory';
|
||||
import { MCPConnection } from '~/mcp/connection';
|
||||
import { MCPOAuthHandler } from '~/mcp/oauth';
|
||||
import { MCPOAuthHandler, MCPTokenStorage } from '~/mcp/oauth';
|
||||
import { processMCPEnv } from '~/utils';
|
||||
|
||||
jest.mock('~/mcp/connection');
|
||||
|
|
@ -24,6 +24,7 @@ const mockLogger = logger as jest.Mocked<typeof logger>;
|
|||
const mockProcessMCPEnv = processMCPEnv as jest.MockedFunction<typeof processMCPEnv>;
|
||||
const mockMCPConnection = MCPConnection as jest.MockedClass<typeof MCPConnection>;
|
||||
const mockMCPOAuthHandler = MCPOAuthHandler as jest.Mocked<typeof MCPOAuthHandler>;
|
||||
const mockMCPTokenStorage = MCPTokenStorage as jest.Mocked<typeof MCPTokenStorage>;
|
||||
|
||||
describe('MCPConnectionFactory', () => {
|
||||
let mockUser: IUser | undefined;
|
||||
|
|
@ -270,6 +271,7 @@ describe('MCPConnectionFactory', () => {
|
|||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
oauthOptions.tokenMethods.findToken,
|
||||
);
|
||||
|
||||
// initFlow must be awaited BEFORE the redirect to guarantee state is stored
|
||||
|
|
@ -292,6 +294,78 @@ describe('MCPConnectionFactory', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('should clear stale client registration when returnOnOAuth flow fails with client rejection', async () => {
|
||||
const basicOptions = {
|
||||
serverName: 'test-server',
|
||||
serverConfig: {
|
||||
...mockServerConfig,
|
||||
url: 'https://api.example.com',
|
||||
type: 'sse' as const,
|
||||
} as t.SSEOptions,
|
||||
};
|
||||
|
||||
const deleteTokensSpy = jest.fn().mockResolvedValue({ acknowledged: true, deletedCount: 1 });
|
||||
const oauthOptions = {
|
||||
useOAuth: true as const,
|
||||
user: mockUser,
|
||||
flowManager: mockFlowManager,
|
||||
returnOnOAuth: true,
|
||||
oauthStart: jest.fn(),
|
||||
tokenMethods: {
|
||||
findToken: jest.fn(),
|
||||
createToken: jest.fn(),
|
||||
updateToken: jest.fn(),
|
||||
deleteTokens: deleteTokensSpy,
|
||||
},
|
||||
};
|
||||
|
||||
const mockFlowData = {
|
||||
authorizationUrl: 'https://auth.example.com',
|
||||
flowId: 'flow123',
|
||||
flowMetadata: {
|
||||
serverName: 'test-server',
|
||||
userId: 'user123',
|
||||
serverUrl: 'https://api.example.com',
|
||||
state: 'random-state',
|
||||
clientInfo: { client_id: 'stale-client' },
|
||||
reusedStoredClient: true,
|
||||
},
|
||||
};
|
||||
|
||||
mockMCPOAuthHandler.initiateOAuthFlow.mockResolvedValue(mockFlowData);
|
||||
mockMCPTokenStorage.deleteClientRegistration.mockResolvedValue(undefined);
|
||||
// createFlow rejects with invalid_client — simulating stale client rejection
|
||||
mockFlowManager.createFlow.mockRejectedValue(new Error('invalid_client'));
|
||||
mockConnectionInstance.isConnected.mockResolvedValue(false);
|
||||
|
||||
let oauthRequiredHandler: (data: Record<string, unknown>) => Promise<void>;
|
||||
mockConnectionInstance.on.mockImplementation((event, handler) => {
|
||||
if (event === 'oauthRequired') {
|
||||
oauthRequiredHandler = handler as (data: Record<string, unknown>) => Promise<void>;
|
||||
}
|
||||
return mockConnectionInstance;
|
||||
});
|
||||
|
||||
try {
|
||||
await MCPConnectionFactory.create(basicOptions, oauthOptions);
|
||||
} catch {
|
||||
// Expected
|
||||
}
|
||||
|
||||
await oauthRequiredHandler!({ serverUrl: 'https://api.example.com' });
|
||||
|
||||
// Drain microtasks so the background .catch() handler completes
|
||||
await new Promise((r) => setImmediate(r));
|
||||
|
||||
// deleteClientRegistration should have been called via clearStaleClientIfRejected
|
||||
expect(mockMCPTokenStorage.deleteClientRegistration).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
userId: 'user123',
|
||||
serverName: 'test-server',
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should skip new OAuth flow initiation when a PENDING flow already exists (returnOnOAuth)', async () => {
|
||||
const basicOptions = {
|
||||
serverName: 'test-server',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,508 @@
|
|||
/**
|
||||
* Tests for MCP OAuth client registration reuse on reconnection.
|
||||
*
|
||||
* Documents the client_id mismatch bug in horizontally scaled deployments:
|
||||
*
|
||||
* When LibreChat runs with multiple replicas (e.g., 3 behind a load balancer),
|
||||
* each replica independently calls registerClient() on the OAuth server's /register
|
||||
* endpoint, getting a different client_id. The check-then-act race between the
|
||||
* PENDING flow check and storing the flow state means that even with a shared
|
||||
* Redis-backed flow store, replicas slip through before any has stored PENDING:
|
||||
*
|
||||
* Replica A: getFlowState() → null → initiateOAuthFlow() → registers client_A
|
||||
* Replica B: getFlowState() → null → initiateOAuthFlow() → registers client_B
|
||||
* Replica A: initFlow(metadata with client_A) → stored in Redis
|
||||
* Replica B: initFlow(metadata with client_B) → OVERWRITES in Redis
|
||||
* User completes OAuth in browser with client_A in the URL
|
||||
* Callback reads Redis → finds client_B → token exchange fails: "client_id mismatch"
|
||||
*
|
||||
* The fix stabilizes reconnection flows: before calling registerClient(), check
|
||||
* MongoDB (shared across replicas) for an existing client registration from a prior
|
||||
* successful OAuth flow and reuse it. This eliminates redundant /register calls on
|
||||
* reconnection. Note: the first-time concurrent auth race is NOT addressed by this
|
||||
* fix and would require a distributed lock (e.g., Redis SETNX) around registration.
|
||||
*/
|
||||
|
||||
import type { OAuthClientInformation } from '@modelcontextprotocol/sdk/shared/auth.js';
|
||||
import type { OAuthTestServer } from './helpers/oauthTestServer';
|
||||
import { InMemoryTokenStore, createOAuthMCPServer } from './helpers/oauthTestServer';
|
||||
import { MCPConnectionFactory } from '~/mcp/MCPConnectionFactory';
|
||||
import { MCPOAuthHandler, MCPTokenStorage } from '~/mcp/oauth';
|
||||
|
||||
jest.mock('@librechat/data-schemas', () => ({
|
||||
logger: {
|
||||
info: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
error: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
},
|
||||
getTenantId: jest.fn(),
|
||||
SYSTEM_TENANT_ID: '__SYSTEM__',
|
||||
encryptV2: jest.fn(async (val: string) => `enc:${val}`),
|
||||
decryptV2: jest.fn(async (val: string) => val.replace(/^enc:/, '')),
|
||||
}));
|
||||
|
||||
jest.mock('~/auth', () => ({
|
||||
createSSRFSafeUndiciConnect: jest.fn(() => undefined),
|
||||
resolveHostnameSSRF: jest.fn(async () => false),
|
||||
isSSRFTarget: jest.fn(async () => false),
|
||||
isOAuthUrlAllowed: jest.fn(() => true),
|
||||
}));
|
||||
|
||||
jest.mock('~/mcp/mcpConfig', () => ({
|
||||
mcpConfig: { CONNECTION_CHECK_TTL: 0, USER_CONNECTION_IDLE_TIMEOUT: 30 * 60 * 1000 },
|
||||
}));
|
||||
|
||||
describe('MCPOAuthHandler - client registration reuse on reconnection', () => {
|
||||
let server: OAuthTestServer | undefined;
|
||||
let originalDomainServer: string | undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
originalDomainServer = process.env.DOMAIN_SERVER;
|
||||
process.env.DOMAIN_SERVER = 'http://localhost:3080';
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (originalDomainServer !== undefined) {
|
||||
process.env.DOMAIN_SERVER = originalDomainServer;
|
||||
} else {
|
||||
delete process.env.DOMAIN_SERVER;
|
||||
}
|
||||
if (server) {
|
||||
await server.close();
|
||||
server = undefined;
|
||||
}
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('Race condition reproduction: concurrent replicas re-register', () => {
|
||||
it('should produce duplicate client registrations when two replicas initiate flows concurrently', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
|
||||
const [resultA, resultB] = await Promise.all([
|
||||
MCPOAuthHandler.initiateOAuthFlow('test-server', server.url, 'user-1', {}),
|
||||
MCPOAuthHandler.initiateOAuthFlow('test-server', server.url, 'user-1', {}),
|
||||
]);
|
||||
|
||||
expect(resultA.authorizationUrl).toBeTruthy();
|
||||
expect(resultB.authorizationUrl).toBeTruthy();
|
||||
expect(server.registeredClients.size).toBe(2);
|
||||
|
||||
const clientA = resultA.flowMetadata.clientInfo?.client_id;
|
||||
const clientB = resultB.flowMetadata.clientInfo?.client_id;
|
||||
expect(clientA).not.toBe(clientB);
|
||||
});
|
||||
|
||||
it('should re-register on every sequential initiateOAuthFlow call (reconnections)', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
|
||||
await MCPOAuthHandler.initiateOAuthFlow('test-server', server.url, 'user-1', {});
|
||||
expect(server.registeredClients.size).toBe(1);
|
||||
|
||||
await MCPOAuthHandler.initiateOAuthFlow('test-server', server.url, 'user-1', {});
|
||||
expect(server.registeredClients.size).toBe(2);
|
||||
|
||||
await MCPOAuthHandler.initiateOAuthFlow('test-server', server.url, 'user-1', {});
|
||||
expect(server.registeredClients.size).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Client reuse via findToken on reconnection', () => {
|
||||
it('should reuse an existing client registration when findToken returns stored client info', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
const tokenStore = new InMemoryTokenStore();
|
||||
|
||||
const firstResult = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
);
|
||||
expect(server.registeredClients.size).toBe(1);
|
||||
const firstClientId = firstResult.flowMetadata.clientInfo?.client_id;
|
||||
|
||||
await MCPTokenStorage.storeTokens({
|
||||
userId: 'user-1',
|
||||
serverName: 'test-server',
|
||||
tokens: { access_token: 'test-token', token_type: 'Bearer' },
|
||||
createToken: tokenStore.createToken,
|
||||
updateToken: tokenStore.updateToken,
|
||||
findToken: tokenStore.findToken,
|
||||
clientInfo: firstResult.flowMetadata.clientInfo,
|
||||
metadata: firstResult.flowMetadata.metadata,
|
||||
});
|
||||
|
||||
const secondResult = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
);
|
||||
|
||||
expect(server.registeredClients.size).toBe(1);
|
||||
expect(secondResult.flowMetadata.clientInfo?.client_id).toBe(firstClientId);
|
||||
expect(secondResult.flowMetadata.reusedStoredClient).toBe(true);
|
||||
});
|
||||
|
||||
it('should reuse the same client when two reconnections fire concurrently with pre-seeded token', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
const tokenStore = new InMemoryTokenStore();
|
||||
|
||||
const initialResult = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
);
|
||||
const storedClientId = initialResult.flowMetadata.clientInfo?.client_id;
|
||||
|
||||
await MCPTokenStorage.storeTokens({
|
||||
userId: 'user-1',
|
||||
serverName: 'test-server',
|
||||
tokens: { access_token: 'test-token', token_type: 'Bearer' },
|
||||
createToken: tokenStore.createToken,
|
||||
updateToken: tokenStore.updateToken,
|
||||
findToken: tokenStore.findToken,
|
||||
clientInfo: initialResult.flowMetadata.clientInfo,
|
||||
metadata: initialResult.flowMetadata.metadata,
|
||||
});
|
||||
|
||||
const [resultA, resultB] = await Promise.all([
|
||||
MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
),
|
||||
MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
),
|
||||
]);
|
||||
|
||||
// Both should reuse the stored client — only the initial registration should exist
|
||||
expect(server.registeredClients.size).toBe(1);
|
||||
expect(resultA.flowMetadata.clientInfo?.client_id).toBe(storedClientId);
|
||||
expect(resultB.flowMetadata.clientInfo?.client_id).toBe(storedClientId);
|
||||
});
|
||||
|
||||
it('should re-register when stored redirect_uri differs from current', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
const tokenStore = new InMemoryTokenStore();
|
||||
|
||||
await MCPTokenStorage.storeTokens({
|
||||
userId: 'user-1',
|
||||
serverName: 'test-server',
|
||||
tokens: { access_token: 'old-token', token_type: 'Bearer' },
|
||||
createToken: tokenStore.createToken,
|
||||
updateToken: tokenStore.updateToken,
|
||||
findToken: tokenStore.findToken,
|
||||
clientInfo: {
|
||||
client_id: 'old-client-id',
|
||||
client_secret: 'old-secret',
|
||||
redirect_uris: ['http://old-domain.com/api/mcp/test-server/oauth/callback'],
|
||||
} as OAuthClientInformation & { redirect_uris: string[] },
|
||||
});
|
||||
|
||||
const result = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
);
|
||||
|
||||
expect(server.registeredClients.size).toBe(1);
|
||||
expect(result.flowMetadata.clientInfo?.client_id).not.toBe('old-client-id');
|
||||
});
|
||||
|
||||
it('should re-register when stored client has empty redirect_uris', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
const tokenStore = new InMemoryTokenStore();
|
||||
|
||||
await MCPTokenStorage.storeTokens({
|
||||
userId: 'user-1',
|
||||
serverName: 'test-server',
|
||||
tokens: { access_token: 'old-token', token_type: 'Bearer' },
|
||||
createToken: tokenStore.createToken,
|
||||
updateToken: tokenStore.updateToken,
|
||||
findToken: tokenStore.findToken,
|
||||
clientInfo: {
|
||||
client_id: 'empty-redirect-client',
|
||||
client_secret: 'secret',
|
||||
redirect_uris: [],
|
||||
} as OAuthClientInformation & { redirect_uris: string[] },
|
||||
});
|
||||
|
||||
const result = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
);
|
||||
|
||||
// Should NOT reuse the client with empty redirect_uris — must re-register
|
||||
expect(server.registeredClients.size).toBe(1);
|
||||
expect(result.flowMetadata.clientInfo?.client_id).not.toBe('empty-redirect-client');
|
||||
});
|
||||
|
||||
it('should fall back to registration when findToken lookup throws', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
const failingFindToken = jest.fn().mockRejectedValue(new Error('DB connection lost'));
|
||||
|
||||
const result = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
failingFindToken,
|
||||
);
|
||||
|
||||
expect(server.registeredClients.size).toBe(1);
|
||||
expect(result.flowMetadata.clientInfo?.client_id).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should not reuse a stale client on retry after a failed flow', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
const tokenStore = new InMemoryTokenStore();
|
||||
|
||||
// Seed a stored client with a client_id that the OAuth server doesn't recognize,
|
||||
// but with matching issuer and redirect_uri so reuse logic accepts it
|
||||
const serverIssuer = `http://127.0.0.1:${server.port}`;
|
||||
await MCPTokenStorage.storeTokens({
|
||||
userId: 'user-1',
|
||||
serverName: 'test-server',
|
||||
tokens: { access_token: 'old-token', token_type: 'Bearer' },
|
||||
createToken: tokenStore.createToken,
|
||||
updateToken: tokenStore.updateToken,
|
||||
findToken: tokenStore.findToken,
|
||||
clientInfo: {
|
||||
client_id: 'stale-client-that-oauth-server-deleted',
|
||||
client_secret: 'stale-secret',
|
||||
redirect_uris: ['http://localhost:3080/api/mcp/test-server/oauth/callback'],
|
||||
} as OAuthClientInformation & { redirect_uris: string[] },
|
||||
metadata: {
|
||||
issuer: serverIssuer,
|
||||
authorization_endpoint: `${serverIssuer}/authorize`,
|
||||
token_endpoint: `${serverIssuer}/token`,
|
||||
},
|
||||
});
|
||||
|
||||
// First attempt: reuses the stale client (this is expected — we don't know it's stale yet)
|
||||
const firstResult = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
);
|
||||
expect(firstResult.flowMetadata.clientInfo?.client_id).toBe(
|
||||
'stale-client-that-oauth-server-deleted',
|
||||
);
|
||||
expect(firstResult.flowMetadata.reusedStoredClient).toBe(true);
|
||||
expect(server.registeredClients.size).toBe(0);
|
||||
|
||||
// Simulate what MCPConnectionFactory does on failure when reusedStoredClient is set:
|
||||
// clear the stored client registration so the next attempt does a fresh DCR
|
||||
await MCPTokenStorage.deleteClientRegistration({
|
||||
userId: 'user-1',
|
||||
serverName: 'test-server',
|
||||
deleteTokens: tokenStore.deleteTokens,
|
||||
});
|
||||
|
||||
// Second attempt (retry after failure): should do a fresh DCR
|
||||
const secondResult = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
);
|
||||
|
||||
expect(server.registeredClients.size).toBe(1);
|
||||
expect(secondResult.flowMetadata.clientInfo?.client_id).not.toBe(
|
||||
'stale-client-that-oauth-server-deleted',
|
||||
);
|
||||
expect(secondResult.flowMetadata.reusedStoredClient).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should re-register when stored client was issued by a different authorization server', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
const tokenStore = new InMemoryTokenStore();
|
||||
|
||||
// Seed a stored client that was registered with a different issuer
|
||||
await MCPTokenStorage.storeTokens({
|
||||
userId: 'user-1',
|
||||
serverName: 'test-server',
|
||||
tokens: { access_token: 'old-token', token_type: 'Bearer' },
|
||||
createToken: tokenStore.createToken,
|
||||
updateToken: tokenStore.updateToken,
|
||||
findToken: tokenStore.findToken,
|
||||
clientInfo: {
|
||||
client_id: 'old-issuer-client',
|
||||
client_secret: 'secret',
|
||||
redirect_uris: ['http://localhost:3080/api/mcp/test-server/oauth/callback'],
|
||||
} as OAuthClientInformation & { redirect_uris: string[] },
|
||||
metadata: {
|
||||
issuer: 'https://old-auth-server.example.com',
|
||||
authorization_endpoint: 'https://old-auth-server.example.com/authorize',
|
||||
token_endpoint: 'https://old-auth-server.example.com/token',
|
||||
},
|
||||
});
|
||||
|
||||
const result = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
server.url,
|
||||
'user-1',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
tokenStore.findToken,
|
||||
);
|
||||
|
||||
// Should have registered a NEW client because the issuer changed
|
||||
expect(server.registeredClients.size).toBe(1);
|
||||
expect(result.flowMetadata.clientInfo?.client_id).not.toBe('old-issuer-client');
|
||||
expect(result.flowMetadata.reusedStoredClient).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not call getClientInfoAndMetadata when findToken is not provided', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000 });
|
||||
const spy = jest.spyOn(MCPTokenStorage, 'getClientInfoAndMetadata');
|
||||
|
||||
await MCPOAuthHandler.initiateOAuthFlow('test-server', server.url, 'user-1', {});
|
||||
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('isClientRejection', () => {
|
||||
it('should detect invalid_client errors', () => {
|
||||
expect(MCPConnectionFactory.isClientRejection(new Error('invalid_client'))).toBe(true);
|
||||
expect(
|
||||
MCPConnectionFactory.isClientRejection(
|
||||
new Error('OAuth token exchange failed: invalid_client'),
|
||||
),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect unauthorized_client errors', () => {
|
||||
expect(MCPConnectionFactory.isClientRejection(new Error('unauthorized_client'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect client_id mismatch errors', () => {
|
||||
expect(
|
||||
MCPConnectionFactory.isClientRejection(
|
||||
new Error('Token exchange rejected: client_id mismatch'),
|
||||
),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('should detect client not found errors', () => {
|
||||
expect(MCPConnectionFactory.isClientRejection(new Error('client not found'))).toBe(true);
|
||||
expect(MCPConnectionFactory.isClientRejection(new Error('unknown client'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should not match unrelated errors', () => {
|
||||
expect(MCPConnectionFactory.isClientRejection(new Error('timeout'))).toBe(false);
|
||||
expect(MCPConnectionFactory.isClientRejection(new Error('Flow state not found'))).toBe(false);
|
||||
expect(MCPConnectionFactory.isClientRejection(new Error('user denied access'))).toBe(false);
|
||||
expect(MCPConnectionFactory.isClientRejection(null)).toBe(false);
|
||||
expect(MCPConnectionFactory.isClientRejection(undefined)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Token exchange with enforced client_id', () => {
|
||||
it('should reject token exchange when client_id does not match registered client', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000, enforceClientId: true });
|
||||
|
||||
// Register a real client via DCR
|
||||
const regRes = await fetch(`${server.url}register`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ redirect_uris: ['http://localhost/callback'] }),
|
||||
});
|
||||
const registered = (await regRes.json()) as { client_id: string };
|
||||
|
||||
// Get an auth code bound to the registered client_id
|
||||
const authRes = await fetch(
|
||||
`${server.url}authorize?redirect_uri=http://localhost/callback&state=s1&client_id=${registered.client_id}`,
|
||||
{ redirect: 'manual' },
|
||||
);
|
||||
const location = authRes.headers.get('location') ?? '';
|
||||
const code = new URL(location).searchParams.get('code');
|
||||
|
||||
// Try to exchange the code with a DIFFERENT (stale) client_id
|
||||
const tokenRes = await fetch(`${server.url}token`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||||
body: `grant_type=authorization_code&code=${code}&client_id=stale-client-id`,
|
||||
});
|
||||
|
||||
expect(tokenRes.status).toBe(401);
|
||||
const body = (await tokenRes.json()) as { error: string; error_description?: string };
|
||||
expect(body.error).toBe('invalid_client');
|
||||
|
||||
// Verify isClientRejection would match this error
|
||||
const errorMsg = body.error_description ?? body.error;
|
||||
expect(MCPConnectionFactory.isClientRejection(new Error(errorMsg))).toBe(true);
|
||||
});
|
||||
|
||||
it('should accept token exchange when client_id matches', async () => {
|
||||
server = await createOAuthMCPServer({ tokenTTLMs: 60000, enforceClientId: true });
|
||||
|
||||
const regRes = await fetch(`${server.url}register`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ redirect_uris: ['http://localhost/callback'] }),
|
||||
});
|
||||
const registered = (await regRes.json()) as { client_id: string };
|
||||
|
||||
const authRes = await fetch(
|
||||
`${server.url}authorize?redirect_uri=http://localhost/callback&state=s1&client_id=${registered.client_id}`,
|
||||
{ redirect: 'manual' },
|
||||
);
|
||||
const location = authRes.headers.get('location') ?? '';
|
||||
const code = new URL(location).searchParams.get('code');
|
||||
|
||||
const tokenRes = await fetch(`${server.url}token`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||||
body: `grant_type=authorization_code&code=${code}&client_id=${registered.client_id}`,
|
||||
});
|
||||
|
||||
expect(tokenRes.status).toBe(200);
|
||||
const body = (await tokenRes.json()) as { access_token: string };
|
||||
expect(body.access_token).toBeTruthy();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -20,6 +20,12 @@ jest.mock('@modelcontextprotocol/sdk/client/auth.js', () => ({
|
|||
exchangeAuthorization: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('../../mcp/oauth/tokens', () => ({
|
||||
MCPTokenStorage: {
|
||||
getClientInfoAndMetadata: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
import {
|
||||
startAuthorization,
|
||||
discoverAuthorizationServerMetadata,
|
||||
|
|
@ -27,6 +33,7 @@ import {
|
|||
registerClient,
|
||||
exchangeAuthorization,
|
||||
} from '@modelcontextprotocol/sdk/client/auth.js';
|
||||
import { MCPTokenStorage } from '../../mcp/oauth/tokens';
|
||||
import { FlowStateManager } from '../../flow/manager';
|
||||
|
||||
const mockStartAuthorization = startAuthorization as jest.MockedFunction<typeof startAuthorization>;
|
||||
|
|
@ -42,6 +49,10 @@ const mockRegisterClient = registerClient as jest.MockedFunction<typeof register
|
|||
const mockExchangeAuthorization = exchangeAuthorization as jest.MockedFunction<
|
||||
typeof exchangeAuthorization
|
||||
>;
|
||||
const mockGetClientInfoAndMetadata =
|
||||
MCPTokenStorage.getClientInfoAndMetadata as jest.MockedFunction<
|
||||
typeof MCPTokenStorage.getClientInfoAndMetadata
|
||||
>;
|
||||
|
||||
describe('MCPOAuthHandler - Configurable OAuth Metadata', () => {
|
||||
const mockServerName = 'test-server';
|
||||
|
|
@ -1391,6 +1402,348 @@ describe('MCPOAuthHandler - Configurable OAuth Metadata', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('Client Registration Reuse', () => {
|
||||
const originalFetch = global.fetch;
|
||||
const mockFetch = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
global.fetch = mockFetch as unknown as typeof fetch;
|
||||
mockFetch.mockResolvedValue({ ok: true, json: async () => ({}) } as Response);
|
||||
process.env.DOMAIN_SERVER = 'http://localhost:3080';
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
global.fetch = originalFetch;
|
||||
});
|
||||
|
||||
const mockFindToken = jest.fn();
|
||||
|
||||
it('should reuse existing client registration when findToken is provided and client exists', async () => {
|
||||
const existingClientInfo = {
|
||||
client_id: 'existing-client-id',
|
||||
client_secret: 'existing-client-secret',
|
||||
redirect_uris: ['http://localhost:3080/api/mcp/test-server/oauth/callback'],
|
||||
token_endpoint_auth_method: 'client_secret_basic',
|
||||
};
|
||||
|
||||
mockGetClientInfoAndMetadata.mockResolvedValueOnce({
|
||||
clientInfo: existingClientInfo,
|
||||
clientMetadata: { issuer: 'https://example.com' },
|
||||
});
|
||||
|
||||
// Mock resource metadata discovery to fail
|
||||
mockDiscoverOAuthProtectedResourceMetadata.mockRejectedValueOnce(
|
||||
new Error('No resource metadata'),
|
||||
);
|
||||
|
||||
// Mock authorization server metadata discovery
|
||||
mockDiscoverAuthorizationServerMetadata.mockResolvedValueOnce({
|
||||
issuer: 'https://example.com',
|
||||
authorization_endpoint: 'https://example.com/authorize',
|
||||
token_endpoint: 'https://example.com/token',
|
||||
registration_endpoint: 'https://example.com/register',
|
||||
response_types_supported: ['code'],
|
||||
jwks_uri: 'https://example.com/.well-known/jwks.json',
|
||||
subject_types_supported: ['public'],
|
||||
id_token_signing_alg_values_supported: ['RS256'],
|
||||
} as AuthorizationServerMetadata);
|
||||
|
||||
mockStartAuthorization.mockResolvedValueOnce({
|
||||
authorizationUrl: new URL('https://example.com/authorize?client_id=existing-client-id'),
|
||||
codeVerifier: 'test-code-verifier',
|
||||
});
|
||||
|
||||
const result = await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
'https://example.com/mcp',
|
||||
'user-123',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
mockFindToken,
|
||||
);
|
||||
|
||||
// Should NOT have called registerClient since we reused the existing one
|
||||
expect(mockRegisterClient).not.toHaveBeenCalled();
|
||||
|
||||
// Should have used the existing client info for startAuthorization
|
||||
expect(mockStartAuthorization).toHaveBeenCalledWith(
|
||||
'https://example.com/mcp',
|
||||
expect.objectContaining({
|
||||
clientInformation: existingClientInfo,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result.authorizationUrl).toBeDefined();
|
||||
expect(result.flowId).toBeDefined();
|
||||
});
|
||||
|
||||
it('should register a new client when findToken is provided but no existing registration found', async () => {
|
||||
mockGetClientInfoAndMetadata.mockResolvedValueOnce(null);
|
||||
|
||||
// Mock resource metadata discovery to fail
|
||||
mockDiscoverOAuthProtectedResourceMetadata.mockRejectedValueOnce(
|
||||
new Error('No resource metadata'),
|
||||
);
|
||||
|
||||
// Mock authorization server metadata discovery
|
||||
mockDiscoverAuthorizationServerMetadata.mockResolvedValueOnce({
|
||||
issuer: 'https://example.com',
|
||||
authorization_endpoint: 'https://example.com/authorize',
|
||||
token_endpoint: 'https://example.com/token',
|
||||
registration_endpoint: 'https://example.com/register',
|
||||
response_types_supported: ['code'],
|
||||
jwks_uri: 'https://example.com/.well-known/jwks.json',
|
||||
subject_types_supported: ['public'],
|
||||
id_token_signing_alg_values_supported: ['RS256'],
|
||||
} as AuthorizationServerMetadata);
|
||||
|
||||
mockRegisterClient.mockResolvedValueOnce({
|
||||
client_id: 'new-client-id',
|
||||
client_secret: 'new-client-secret',
|
||||
redirect_uris: ['http://localhost:3080/api/mcp/test-server/oauth/callback'],
|
||||
logo_uri: undefined,
|
||||
tos_uri: undefined,
|
||||
});
|
||||
|
||||
mockStartAuthorization.mockResolvedValueOnce({
|
||||
authorizationUrl: new URL('https://example.com/authorize?client_id=new-client-id'),
|
||||
codeVerifier: 'test-code-verifier',
|
||||
});
|
||||
|
||||
await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
'https://example.com/mcp',
|
||||
'user-123',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
mockFindToken,
|
||||
);
|
||||
|
||||
// Should have called registerClient since no existing registration was found
|
||||
expect(mockRegisterClient).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should register a new client when findToken is not provided', async () => {
|
||||
// Mock resource metadata discovery to fail
|
||||
mockDiscoverOAuthProtectedResourceMetadata.mockRejectedValueOnce(
|
||||
new Error('No resource metadata'),
|
||||
);
|
||||
|
||||
// Mock authorization server metadata discovery
|
||||
mockDiscoverAuthorizationServerMetadata.mockResolvedValueOnce({
|
||||
issuer: 'https://example.com',
|
||||
authorization_endpoint: 'https://example.com/authorize',
|
||||
token_endpoint: 'https://example.com/token',
|
||||
registration_endpoint: 'https://example.com/register',
|
||||
response_types_supported: ['code'],
|
||||
jwks_uri: 'https://example.com/.well-known/jwks.json',
|
||||
subject_types_supported: ['public'],
|
||||
id_token_signing_alg_values_supported: ['RS256'],
|
||||
} as AuthorizationServerMetadata);
|
||||
|
||||
mockRegisterClient.mockResolvedValueOnce({
|
||||
client_id: 'new-client-id',
|
||||
client_secret: 'new-client-secret',
|
||||
redirect_uris: ['http://localhost:3080/api/mcp/test-server/oauth/callback'],
|
||||
logo_uri: undefined,
|
||||
tos_uri: undefined,
|
||||
});
|
||||
|
||||
mockStartAuthorization.mockResolvedValueOnce({
|
||||
authorizationUrl: new URL('https://example.com/authorize?client_id=new-client-id'),
|
||||
codeVerifier: 'test-code-verifier',
|
||||
});
|
||||
|
||||
// No findToken passed
|
||||
await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
'https://example.com/mcp',
|
||||
'user-123',
|
||||
{},
|
||||
undefined,
|
||||
);
|
||||
|
||||
// Should NOT have tried to look up existing registration
|
||||
expect(mockGetClientInfoAndMetadata).not.toHaveBeenCalled();
|
||||
|
||||
// Should have called registerClient
|
||||
expect(mockRegisterClient).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should fall back to registration when getClientInfoAndMetadata throws', async () => {
|
||||
mockGetClientInfoAndMetadata.mockRejectedValueOnce(new Error('DB error'));
|
||||
|
||||
// Mock resource metadata discovery to fail
|
||||
mockDiscoverOAuthProtectedResourceMetadata.mockRejectedValueOnce(
|
||||
new Error('No resource metadata'),
|
||||
);
|
||||
|
||||
// Mock authorization server metadata discovery
|
||||
mockDiscoverAuthorizationServerMetadata.mockResolvedValueOnce({
|
||||
issuer: 'https://example.com',
|
||||
authorization_endpoint: 'https://example.com/authorize',
|
||||
token_endpoint: 'https://example.com/token',
|
||||
registration_endpoint: 'https://example.com/register',
|
||||
response_types_supported: ['code'],
|
||||
jwks_uri: 'https://example.com/.well-known/jwks.json',
|
||||
subject_types_supported: ['public'],
|
||||
id_token_signing_alg_values_supported: ['RS256'],
|
||||
} as AuthorizationServerMetadata);
|
||||
|
||||
mockRegisterClient.mockResolvedValueOnce({
|
||||
client_id: 'new-client-id',
|
||||
client_secret: 'new-client-secret',
|
||||
redirect_uris: ['http://localhost:3080/api/mcp/test-server/oauth/callback'],
|
||||
logo_uri: undefined,
|
||||
tos_uri: undefined,
|
||||
});
|
||||
|
||||
mockStartAuthorization.mockResolvedValueOnce({
|
||||
authorizationUrl: new URL('https://example.com/authorize?client_id=new-client-id'),
|
||||
codeVerifier: 'test-code-verifier',
|
||||
});
|
||||
|
||||
await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
'https://example.com/mcp',
|
||||
'user-123',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
mockFindToken,
|
||||
);
|
||||
|
||||
// Should have fallen back to registerClient
|
||||
expect(mockRegisterClient).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should re-register when stored redirect_uri differs from current configuration', async () => {
|
||||
const existingClientInfo = {
|
||||
client_id: 'existing-client-id',
|
||||
client_secret: 'existing-client-secret',
|
||||
redirect_uris: ['http://old-domain.com/api/mcp/test-server/oauth/callback'],
|
||||
token_endpoint_auth_method: 'client_secret_basic',
|
||||
};
|
||||
|
||||
mockGetClientInfoAndMetadata.mockResolvedValueOnce({
|
||||
clientInfo: existingClientInfo,
|
||||
clientMetadata: {},
|
||||
});
|
||||
|
||||
mockDiscoverOAuthProtectedResourceMetadata.mockRejectedValueOnce(
|
||||
new Error('No resource metadata'),
|
||||
);
|
||||
|
||||
mockDiscoverAuthorizationServerMetadata.mockResolvedValueOnce({
|
||||
issuer: 'https://example.com',
|
||||
authorization_endpoint: 'https://example.com/authorize',
|
||||
token_endpoint: 'https://example.com/token',
|
||||
registration_endpoint: 'https://example.com/register',
|
||||
response_types_supported: ['code'],
|
||||
jwks_uri: 'https://example.com/.well-known/jwks.json',
|
||||
subject_types_supported: ['public'],
|
||||
id_token_signing_alg_values_supported: ['RS256'],
|
||||
} as AuthorizationServerMetadata);
|
||||
|
||||
mockRegisterClient.mockResolvedValueOnce({
|
||||
client_id: 'new-client-id',
|
||||
client_secret: 'new-client-secret',
|
||||
redirect_uris: ['http://localhost:3080/api/mcp/test-server/oauth/callback'],
|
||||
logo_uri: undefined,
|
||||
tos_uri: undefined,
|
||||
});
|
||||
|
||||
mockStartAuthorization.mockResolvedValueOnce({
|
||||
authorizationUrl: new URL('https://example.com/authorize?client_id=new-client-id'),
|
||||
codeVerifier: 'test-code-verifier',
|
||||
});
|
||||
|
||||
await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
'https://example.com/mcp',
|
||||
'user-123',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
mockFindToken,
|
||||
);
|
||||
|
||||
expect(mockRegisterClient).toHaveBeenCalled();
|
||||
expect(mockStartAuthorization).toHaveBeenCalledWith(
|
||||
'https://example.com/mcp',
|
||||
expect.objectContaining({
|
||||
clientInformation: expect.objectContaining({
|
||||
client_id: 'new-client-id',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should re-register when stored client has empty redirect_uris', async () => {
|
||||
const existingClientInfo = {
|
||||
client_id: 'empty-redirect-client',
|
||||
client_secret: 'secret',
|
||||
redirect_uris: [],
|
||||
};
|
||||
|
||||
mockGetClientInfoAndMetadata.mockResolvedValueOnce({
|
||||
clientInfo: existingClientInfo,
|
||||
clientMetadata: {},
|
||||
});
|
||||
|
||||
mockDiscoverOAuthProtectedResourceMetadata.mockRejectedValueOnce(
|
||||
new Error('No resource metadata'),
|
||||
);
|
||||
|
||||
mockDiscoverAuthorizationServerMetadata.mockResolvedValueOnce({
|
||||
issuer: 'https://example.com',
|
||||
authorization_endpoint: 'https://example.com/authorize',
|
||||
token_endpoint: 'https://example.com/token',
|
||||
registration_endpoint: 'https://example.com/register',
|
||||
response_types_supported: ['code'],
|
||||
jwks_uri: 'https://example.com/.well-known/jwks.json',
|
||||
subject_types_supported: ['public'],
|
||||
id_token_signing_alg_values_supported: ['RS256'],
|
||||
} as AuthorizationServerMetadata);
|
||||
|
||||
mockRegisterClient.mockResolvedValueOnce({
|
||||
client_id: 'new-client-id',
|
||||
client_secret: 'new-client-secret',
|
||||
redirect_uris: ['http://localhost:3080/api/mcp/test-server/oauth/callback'],
|
||||
logo_uri: undefined,
|
||||
tos_uri: undefined,
|
||||
});
|
||||
|
||||
mockStartAuthorization.mockResolvedValueOnce({
|
||||
authorizationUrl: new URL('https://example.com/authorize?client_id=new-client-id'),
|
||||
codeVerifier: 'test-code-verifier',
|
||||
});
|
||||
|
||||
await MCPOAuthHandler.initiateOAuthFlow(
|
||||
'test-server',
|
||||
'https://example.com/mcp',
|
||||
'user-123',
|
||||
{},
|
||||
undefined,
|
||||
undefined,
|
||||
mockFindToken,
|
||||
);
|
||||
|
||||
expect(mockRegisterClient).toHaveBeenCalled();
|
||||
expect(mockStartAuthorization).toHaveBeenCalledWith(
|
||||
'https://example.com/mcp',
|
||||
expect.objectContaining({
|
||||
clientInformation: expect.objectContaining({
|
||||
client_id: 'new-client-id',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Fallback OAuth Metadata (Legacy Server Support)', () => {
|
||||
const originalFetch = global.fetch;
|
||||
const mockFetch = jest.fn();
|
||||
|
|
|
|||
|
|
@ -59,6 +59,8 @@ export interface OAuthTestServerOptions {
|
|||
issueRefreshTokens?: boolean;
|
||||
refreshTokenTTLMs?: number;
|
||||
rotateRefreshTokens?: boolean;
|
||||
/** When true, /token validates client_id against the registered client that initiated /authorize */
|
||||
enforceClientId?: boolean;
|
||||
}
|
||||
|
||||
export interface OAuthTestServer {
|
||||
|
|
@ -81,6 +83,17 @@ async function readRequestBody(req: http.IncomingMessage): Promise<string> {
|
|||
return Buffer.concat(chunks).toString();
|
||||
}
|
||||
|
||||
function parseBasicAuth(
|
||||
header: string | undefined,
|
||||
): { clientId: string; clientSecret: string } | null {
|
||||
if (!header?.startsWith('Basic ')) {
|
||||
return null;
|
||||
}
|
||||
const decoded = Buffer.from(header.slice(6), 'base64').toString();
|
||||
const [clientId, clientSecret] = decoded.split(':');
|
||||
return clientId ? { clientId, clientSecret: clientSecret ?? '' } : null;
|
||||
}
|
||||
|
||||
function parseTokenRequest(body: string, contentType: string | undefined): URLSearchParams | null {
|
||||
if (contentType?.includes('application/x-www-form-urlencoded')) {
|
||||
return new URLSearchParams(body);
|
||||
|
|
@ -100,6 +113,7 @@ export async function createOAuthMCPServer(
|
|||
issueRefreshTokens = false,
|
||||
refreshTokenTTLMs = 365 * 24 * 60 * 60 * 1000,
|
||||
rotateRefreshTokens = false,
|
||||
enforceClientId = false,
|
||||
} = options;
|
||||
|
||||
const sessions = new Map<string, StreamableHTTPServerTransport>();
|
||||
|
|
@ -107,7 +121,10 @@ export async function createOAuthMCPServer(
|
|||
const tokenIssueTimes = new Map<string, number>();
|
||||
const issuedRefreshTokens = new Map<string, string>();
|
||||
const refreshTokenIssueTimes = new Map<string, number>();
|
||||
const authCodes = new Map<string, { codeChallenge?: string; codeChallengeMethod?: string }>();
|
||||
const authCodes = new Map<
|
||||
string,
|
||||
{ codeChallenge?: string; codeChallengeMethod?: string; clientId?: string }
|
||||
>();
|
||||
const registeredClients = new Map<string, { client_id: string; client_secret: string }>();
|
||||
|
||||
let port = 0;
|
||||
|
|
@ -155,7 +172,8 @@ export async function createOAuthMCPServer(
|
|||
const code = randomUUID();
|
||||
const codeChallenge = url.searchParams.get('code_challenge') ?? undefined;
|
||||
const codeChallengeMethod = url.searchParams.get('code_challenge_method') ?? undefined;
|
||||
authCodes.set(code, { codeChallenge, codeChallengeMethod });
|
||||
const clientId = url.searchParams.get('client_id') ?? undefined;
|
||||
authCodes.set(code, { codeChallenge, codeChallengeMethod, clientId });
|
||||
const redirectUri = url.searchParams.get('redirect_uri') ?? '';
|
||||
const state = url.searchParams.get('state') ?? '';
|
||||
res.writeHead(302, {
|
||||
|
|
@ -202,6 +220,23 @@ export async function createOAuthMCPServer(
|
|||
}
|
||||
}
|
||||
|
||||
if (enforceClientId && codeData.clientId) {
|
||||
const requestClientId =
|
||||
params.get('client_id') ?? parseBasicAuth(req.headers.authorization)?.clientId;
|
||||
if (!requestClientId || !registeredClients.has(requestClientId)) {
|
||||
res.writeHead(401, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'invalid_client' }));
|
||||
return;
|
||||
}
|
||||
if (requestClientId !== codeData.clientId) {
|
||||
res.writeHead(401, { 'Content-Type': 'application/json' });
|
||||
res.end(
|
||||
JSON.stringify({ error: 'invalid_client', error_description: 'client_id mismatch' }),
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
authCodes.delete(code);
|
||||
|
||||
const accessToken = randomUUID();
|
||||
|
|
@ -439,6 +474,25 @@ export class InMemoryTokenStore {
|
|||
this.tokens.delete(this.key(filter));
|
||||
};
|
||||
|
||||
deleteTokens = async (query: {
|
||||
userId?: string;
|
||||
type?: string;
|
||||
identifier?: string;
|
||||
}): Promise<{ acknowledged: boolean; deletedCount: number }> => {
|
||||
let deletedCount = 0;
|
||||
for (const [key, token] of this.tokens.entries()) {
|
||||
const match =
|
||||
(!query.userId || token.userId === query.userId) &&
|
||||
(!query.type || token.type === query.type) &&
|
||||
(!query.identifier || token.identifier === query.identifier);
|
||||
if (match) {
|
||||
this.tokens.delete(key);
|
||||
deletedCount++;
|
||||
}
|
||||
}
|
||||
return { acknowledged: true, deletedCount };
|
||||
};
|
||||
|
||||
getAll(): InMemoryToken[] {
|
||||
return [...this.tokens.values()];
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import {
|
|||
discoverOAuthProtectedResourceMetadata,
|
||||
} from '@modelcontextprotocol/sdk/client/auth.js';
|
||||
import { TokenExchangeMethodEnum, type MCPOptions } from 'librechat-data-provider';
|
||||
import type { TokenMethods } from '@librechat/data-schemas';
|
||||
import type { FlowStateManager } from '~/flow/manager';
|
||||
import type {
|
||||
OAuthClientInformation,
|
||||
|
|
@ -25,6 +26,7 @@ import {
|
|||
inferClientAuthMethod,
|
||||
} from './methods';
|
||||
import { isSSRFTarget, resolveHostnameSSRF, isOAuthUrlAllowed } from '~/auth';
|
||||
import { MCPTokenStorage } from './tokens';
|
||||
import { sanitizeUrlForLogging } from '~/mcp/utils';
|
||||
|
||||
/** Type for the OAuth metadata from the SDK */
|
||||
|
|
@ -368,6 +370,7 @@ export class MCPOAuthHandler {
|
|||
oauthHeaders: Record<string, string>,
|
||||
config?: MCPOptions['oauth'],
|
||||
allowedDomains?: string[] | null,
|
||||
findToken?: TokenMethods['findToken'],
|
||||
): Promise<{ authorizationUrl: string; flowId: string; flowMetadata: MCPOAuthFlowMetadata }> {
|
||||
logger.debug(
|
||||
`[MCPOAuth] initiateOAuthFlow called for ${serverName} with URL: ${sanitizeUrlForLogging(serverUrl)}`,
|
||||
|
|
@ -494,18 +497,62 @@ export class MCPOAuthHandler {
|
|||
);
|
||||
|
||||
const redirectUri = this.getDefaultRedirectUri(serverName);
|
||||
logger.debug(`[MCPOAuth] Registering OAuth client with redirect URI: ${redirectUri}`);
|
||||
logger.debug(`[MCPOAuth] Resolving OAuth client with redirect URI: ${redirectUri}`);
|
||||
|
||||
const clientInfo = await this.registerOAuthClient(
|
||||
authServerUrl.toString(),
|
||||
metadata,
|
||||
oauthHeaders,
|
||||
resourceMetadata,
|
||||
redirectUri,
|
||||
config?.token_exchange_method,
|
||||
);
|
||||
let clientInfo: OAuthClientInformation | undefined;
|
||||
let reusedStoredClient = false;
|
||||
|
||||
logger.debug(`[MCPOAuth] Client registered with ID: ${clientInfo.client_id}`);
|
||||
if (findToken) {
|
||||
try {
|
||||
const existing = await MCPTokenStorage.getClientInfoAndMetadata({
|
||||
userId,
|
||||
serverName,
|
||||
findToken,
|
||||
});
|
||||
if (existing?.clientInfo?.client_id) {
|
||||
const storedRedirectUri = (existing.clientInfo as OAuthClientInformation)
|
||||
.redirect_uris?.[0];
|
||||
const storedIssuer =
|
||||
typeof existing.clientMetadata?.issuer === 'string'
|
||||
? existing.clientMetadata.issuer.replace(/\/+$/, '')
|
||||
: null;
|
||||
const currentIssuer = (metadata.issuer ?? authServerUrl.toString()).replace(/\/+$/, '');
|
||||
|
||||
if (!storedRedirectUri || storedRedirectUri !== redirectUri) {
|
||||
logger.debug(
|
||||
`[MCPOAuth] Stored redirect_uri "${storedRedirectUri}" differs from current "${redirectUri}", will re-register`,
|
||||
);
|
||||
} else if (!storedIssuer || storedIssuer !== currentIssuer) {
|
||||
logger.debug(
|
||||
`[MCPOAuth] Issuer mismatch (stored: ${storedIssuer ?? 'none'}, current: ${currentIssuer}), will re-register`,
|
||||
);
|
||||
} else {
|
||||
logger.debug(
|
||||
`[MCPOAuth] Reusing existing client registration: ${existing.clientInfo.client_id}`,
|
||||
);
|
||||
clientInfo = existing.clientInfo;
|
||||
reusedStoredClient = true;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`[MCPOAuth] Failed to look up existing client registration, falling back to new registration`,
|
||||
{ error, serverName, userId },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!clientInfo) {
|
||||
clientInfo = await this.registerOAuthClient(
|
||||
authServerUrl.toString(),
|
||||
metadata,
|
||||
oauthHeaders,
|
||||
resourceMetadata,
|
||||
redirectUri,
|
||||
config?.token_exchange_method,
|
||||
);
|
||||
logger.debug(`[MCPOAuth] Client registered with ID: ${clientInfo.client_id}`);
|
||||
}
|
||||
|
||||
/** Authorization Scope */
|
||||
const scope =
|
||||
|
|
@ -575,6 +622,7 @@ export class MCPOAuthHandler {
|
|||
metadata,
|
||||
resourceMetadata,
|
||||
...(Object.keys(oauthHeaders).length > 0 && { oauthHeaders }),
|
||||
...(reusedStoredClient && { reusedStoredClient }),
|
||||
};
|
||||
|
||||
logger.debug(
|
||||
|
|
|
|||
|
|
@ -476,6 +476,26 @@ export class MCPTokenStorage {
|
|||
};
|
||||
}
|
||||
|
||||
/** Deletes only the stored client registration for a specific user and server */
|
||||
static async deleteClientRegistration({
|
||||
userId,
|
||||
serverName,
|
||||
deleteTokens,
|
||||
}: {
|
||||
userId: string;
|
||||
serverName: string;
|
||||
deleteTokens: TokenMethods['deleteTokens'];
|
||||
}): Promise<void> {
|
||||
const identifier = `mcp:${serverName}`;
|
||||
await deleteTokens({
|
||||
userId,
|
||||
type: 'mcp_oauth_client',
|
||||
identifier: `${identifier}:client`,
|
||||
});
|
||||
const logPrefix = this.getLogPrefix(userId, serverName);
|
||||
logger.debug(`${logPrefix} Cleared stored client registration`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes all OAuth-related tokens for a specific user and server
|
||||
*/
|
||||
|
|
|
|||
|
|
@ -91,6 +91,8 @@ export interface MCPOAuthFlowMetadata extends FlowMetadata {
|
|||
authorizationUrl?: string;
|
||||
/** Custom headers for OAuth token exchange, persisted at flow initiation for the callback. */
|
||||
oauthHeaders?: Record<string, string>;
|
||||
/** True when the flow reused a stored client registration from a prior successful OAuth flow */
|
||||
reusedStoredClient?: boolean;
|
||||
}
|
||||
|
||||
export interface MCPOAuthTokens extends OAuthTokens {
|
||||
|
|
|
|||
|
|
@ -1,21 +1,16 @@
|
|||
import { useEffect } from 'react';
|
||||
import { useCallback } from 'react';
|
||||
import { TOptions } from 'i18next';
|
||||
import { useAtomValue } from 'jotai';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { resources } from '~/locales/i18n';
|
||||
import { langAtom } from '~/store';
|
||||
|
||||
export type TranslationKeys = keyof typeof resources.en.translation;
|
||||
|
||||
/** Language lifecycle is managed by the host app — do not add i18n.changeLanguage() calls here. */
|
||||
export default function useLocalize() {
|
||||
const lang = useAtomValue(langAtom);
|
||||
const { t, i18n } = useTranslation();
|
||||
const { t } = useTranslation();
|
||||
|
||||
useEffect(() => {
|
||||
if (i18n.language !== lang) {
|
||||
i18n.changeLanguage(lang);
|
||||
}
|
||||
}, [lang, i18n]);
|
||||
|
||||
return (phraseKey: TranslationKeys, options?: TOptions) => t(phraseKey, options);
|
||||
return useCallback(
|
||||
(phraseKey: TranslationKeys, options?: TOptions) => t(phraseKey, options),
|
||||
[t],
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import { atom } from 'jotai';
|
||||
import { NotificationSeverity } from '~/common';
|
||||
|
||||
export const langAtom = atom<string>('en');
|
||||
export const chatDirectionAtom = atom<string>('ltr');
|
||||
export const fontSizeAtom = atom<string>('text-base');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "librechat-data-provider",
|
||||
"version": "0.8.406",
|
||||
"version": "0.8.407",
|
||||
"description": "data services for librechat apps",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.es.js",
|
||||
|
|
|
|||
|
|
@ -1,10 +1,15 @@
|
|||
import {
|
||||
endpointSchema,
|
||||
paramDefinitionSchema,
|
||||
agentsEndpointSchema,
|
||||
azureEndpointSchema,
|
||||
endpointSchema,
|
||||
configSchema,
|
||||
interfaceSchema,
|
||||
fileStorageSchema,
|
||||
fileStrategiesSchema,
|
||||
} from '../src/config';
|
||||
import { tModelSpecPresetSchema, EModelEndpoint } from '../src/schemas';
|
||||
import { FileSources } from '../src/types/files';
|
||||
|
||||
describe('paramDefinitionSchema', () => {
|
||||
it('accepts a minimal definition with only key', () => {
|
||||
|
|
@ -222,6 +227,109 @@ describe('endpointSchema deprecated fields', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('endpointSchema addParams validation', () => {
|
||||
const validEndpoint = {
|
||||
name: 'CustomEndpoint',
|
||||
apiKey: 'test-key',
|
||||
baseURL: 'https://api.example.com',
|
||||
models: { default: ['model-1'] },
|
||||
};
|
||||
const nestedAddParams = {
|
||||
provider: {
|
||||
only: ['z-ai'],
|
||||
quantizations: ['int4'],
|
||||
},
|
||||
};
|
||||
|
||||
it('accepts nested addParams objects and arrays', () => {
|
||||
const result = endpointSchema.safeParse({
|
||||
...validEndpoint,
|
||||
addParams: nestedAddParams,
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data.addParams).toEqual(nestedAddParams);
|
||||
}
|
||||
});
|
||||
|
||||
it('keeps configSchema validation intact with nested custom addParams', () => {
|
||||
const result = configSchema.safeParse({
|
||||
version: '1.0.0',
|
||||
endpoints: {
|
||||
custom: [
|
||||
{
|
||||
...validEndpoint,
|
||||
addParams: nestedAddParams,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('accepts boolean web_search in addParams', () => {
|
||||
const result = endpointSchema.safeParse({
|
||||
...validEndpoint,
|
||||
addParams: {
|
||||
provider: {
|
||||
only: ['z-ai'],
|
||||
},
|
||||
web_search: true,
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('accepts scalar addParams values', () => {
|
||||
const result = endpointSchema.safeParse({
|
||||
...validEndpoint,
|
||||
addParams: {
|
||||
model: 'custom-model',
|
||||
retries: 2,
|
||||
metadata: null,
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('rejects non-boolean web_search objects in addParams', () => {
|
||||
const result = endpointSchema.safeParse({
|
||||
...validEndpoint,
|
||||
addParams: {
|
||||
provider: {
|
||||
only: ['z-ai'],
|
||||
},
|
||||
web_search: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('rejects configSchema entries with non-boolean web_search objects in custom addParams', () => {
|
||||
const result = configSchema.safeParse({
|
||||
version: '1.0.0',
|
||||
endpoints: {
|
||||
custom: [
|
||||
{
|
||||
...validEndpoint,
|
||||
addParams: {
|
||||
provider: {
|
||||
only: ['z-ai'],
|
||||
},
|
||||
web_search: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('agentsEndpointSchema', () => {
|
||||
it('does not accept baseURL', () => {
|
||||
const result = agentsEndpointSchema.safeParse({
|
||||
|
|
@ -251,4 +359,146 @@ describe('azureEndpointSchema', () => {
|
|||
expect(result.data).not.toHaveProperty('plugins');
|
||||
}
|
||||
});
|
||||
|
||||
it('accepts nested addParams in azure groups', () => {
|
||||
const result = azureEndpointSchema.safeParse({
|
||||
groups: [
|
||||
{
|
||||
group: 'test-group',
|
||||
apiKey: 'test-key',
|
||||
models: { 'gpt-4': true },
|
||||
addParams: {
|
||||
provider: {
|
||||
only: ['z-ai'],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
if (result.success) {
|
||||
expect(result.data.groups[0].addParams).toEqual({
|
||||
provider: {
|
||||
only: ['z-ai'],
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
it('accepts boolean web_search in azure addParams', () => {
|
||||
const result = azureEndpointSchema.safeParse({
|
||||
groups: [
|
||||
{
|
||||
group: 'test-group',
|
||||
apiKey: 'test-key',
|
||||
models: { 'gpt-4': true },
|
||||
addParams: {
|
||||
provider: {
|
||||
only: ['z-ai'],
|
||||
},
|
||||
web_search: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('rejects non-boolean web_search objects in azure addParams', () => {
|
||||
const result = azureEndpointSchema.safeParse({
|
||||
groups: [
|
||||
{
|
||||
group: 'test-group',
|
||||
apiKey: 'test-key',
|
||||
models: { 'gpt-4': true },
|
||||
addParams: {
|
||||
provider: {
|
||||
only: ['z-ai'],
|
||||
},
|
||||
web_search: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('fileStorageSchema', () => {
|
||||
const validStrategies = [
|
||||
FileSources.local,
|
||||
FileSources.firebase,
|
||||
FileSources.s3,
|
||||
FileSources.azure_blob,
|
||||
];
|
||||
const invalidStrategies = [
|
||||
FileSources.openai,
|
||||
FileSources.azure,
|
||||
FileSources.vectordb,
|
||||
FileSources.execute_code,
|
||||
FileSources.mistral_ocr,
|
||||
FileSources.azure_mistral_ocr,
|
||||
FileSources.vertexai_mistral_ocr,
|
||||
FileSources.text,
|
||||
FileSources.document_parser,
|
||||
];
|
||||
|
||||
for (const strategy of validStrategies) {
|
||||
it(`accepts storage strategy "${strategy}"`, () => {
|
||||
expect(fileStorageSchema.safeParse(strategy).success).toBe(true);
|
||||
});
|
||||
}
|
||||
|
||||
for (const strategy of invalidStrategies) {
|
||||
it(`rejects processing strategy "${strategy}"`, () => {
|
||||
expect(fileStorageSchema.safeParse(strategy).success).toBe(false);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
describe('fileStrategiesSchema', () => {
|
||||
it('accepts valid storage strategies for all sub-fields', () => {
|
||||
const result = fileStrategiesSchema.safeParse({
|
||||
default: FileSources.s3,
|
||||
avatar: FileSources.local,
|
||||
image: FileSources.firebase,
|
||||
document: FileSources.azure_blob,
|
||||
});
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('rejects processing strategies in sub-fields', () => {
|
||||
const result = fileStrategiesSchema.safeParse({
|
||||
default: FileSources.vectordb,
|
||||
});
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('configSchema fileStrategy', () => {
|
||||
it('rejects a processing strategy as fileStrategy', () => {
|
||||
const result = configSchema.safeParse({ version: '1.3.7', fileStrategy: FileSources.vectordb });
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('defaults fileStrategy to local when absent', () => {
|
||||
const result = configSchema.safeParse({ version: '1.3.7' });
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.data?.fileStrategy).toBe(FileSources.local);
|
||||
});
|
||||
});
|
||||
|
||||
describe('interfaceSchema', () => {
|
||||
it('silently strips removed legacy fields', () => {
|
||||
const result = interfaceSchema.parse({
|
||||
endpointsMenu: true,
|
||||
sidePanel: true,
|
||||
modelSelect: false,
|
||||
});
|
||||
expect(result).not.toHaveProperty('endpointsMenu');
|
||||
expect(result).not.toHaveProperty('sidePanel');
|
||||
expect(result.modelSelect).toBe(false);
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -361,7 +361,8 @@ export const getAllPromptGroups = () => `${prompts()}/all`;
|
|||
|
||||
/* Roles */
|
||||
export const roles = () => `${BASE_URL}/api/roles`;
|
||||
export const getRole = (roleName: string) => `${roles()}/${roleName.toLowerCase()}`;
|
||||
export const adminRoles = () => `${BASE_URL}/api/admin/roles`;
|
||||
export const getRole = (roleName: string) => `${roles()}/${encodeURIComponent(roleName)}`;
|
||||
export const updatePromptPermissions = (roleName: string) => `${getRole(roleName)}/prompts`;
|
||||
export const updateMemoryPermissions = (roleName: string) => `${getRole(roleName)}/memories`;
|
||||
export const updateAgentPermissions = (roleName: string) => `${getRole(roleName)}/agents`;
|
||||
|
|
|
|||
|
|
@ -63,14 +63,27 @@ export enum SettingsViews {
|
|||
advanced = 'advanced',
|
||||
}
|
||||
|
||||
/** Validates any FileSources value — use for file metadata, DB records, and upload routing. */
|
||||
export const fileSourceSchema = z.nativeEnum(FileSources);
|
||||
|
||||
/** Storage backend strategies only — use for config fields that set where files are stored. */
|
||||
const FILE_STORAGE_BACKENDS = [
|
||||
FileSources.local,
|
||||
FileSources.firebase,
|
||||
FileSources.s3,
|
||||
FileSources.azure_blob,
|
||||
] as const satisfies ReadonlyArray<FileSources>;
|
||||
|
||||
export const fileStorageSchema = z.enum(FILE_STORAGE_BACKENDS);
|
||||
|
||||
export type FileStorage = z.infer<typeof fileStorageSchema>;
|
||||
|
||||
export const fileStrategiesSchema = z
|
||||
.object({
|
||||
default: fileSourceSchema.optional(),
|
||||
avatar: fileSourceSchema.optional(),
|
||||
image: fileSourceSchema.optional(),
|
||||
document: fileSourceSchema.optional(),
|
||||
default: fileStorageSchema.optional(),
|
||||
avatar: fileStorageSchema.optional(),
|
||||
image: fileStorageSchema.optional(),
|
||||
document: fileStorageSchema.optional(),
|
||||
})
|
||||
.optional();
|
||||
|
||||
|
|
@ -116,13 +129,39 @@ export const modelConfigSchema = z
|
|||
|
||||
export type TAzureModelConfig = z.infer<typeof modelConfigSchema>;
|
||||
|
||||
const paramValueSchema: z.ZodType<unknown> = z.lazy(() =>
|
||||
z.union([
|
||||
z.string(),
|
||||
z.number(),
|
||||
z.boolean(),
|
||||
z.null(),
|
||||
z.array(paramValueSchema),
|
||||
z.record(z.string(), paramValueSchema),
|
||||
]),
|
||||
);
|
||||
|
||||
/** Validates addParams while keeping web_search aligned with current runtime boolean handling. */
|
||||
const addParamsSchema: z.ZodType<Record<string, unknown>> = z
|
||||
.record(z.string(), paramValueSchema)
|
||||
.superRefine((params, ctx) => {
|
||||
if (params.web_search === undefined || typeof params.web_search === 'boolean') {
|
||||
return;
|
||||
}
|
||||
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
path: ['web_search'],
|
||||
message: '`web_search` must be a boolean in addParams',
|
||||
});
|
||||
});
|
||||
|
||||
export const azureBaseSchema = z.object({
|
||||
apiKey: z.string(),
|
||||
serverless: z.boolean().optional(),
|
||||
instanceName: z.string().optional(),
|
||||
deploymentName: z.string().optional(),
|
||||
assistants: z.boolean().optional(),
|
||||
addParams: z.record(z.union([z.string(), z.number(), z.boolean(), z.null()])).optional(),
|
||||
addParams: addParamsSchema.optional(),
|
||||
dropParams: z.array(z.string()).optional(),
|
||||
version: z.string().optional(),
|
||||
baseURL: z.string().optional(),
|
||||
|
|
@ -362,7 +401,7 @@ export const endpointSchema = baseEndpointSchema.merge(
|
|||
iconURL: z.string().optional(),
|
||||
modelDisplayLabel: z.string().optional(),
|
||||
headers: z.record(z.string()).optional(),
|
||||
addParams: z.record(z.union([z.string(), z.number(), z.boolean(), z.null()])).optional(),
|
||||
addParams: addParamsSchema.optional(),
|
||||
dropParams: z.array(z.string()).optional(),
|
||||
customParams: z
|
||||
.object({
|
||||
|
|
@ -657,10 +696,8 @@ export const interfaceSchema = z
|
|||
termsOfService: termsOfServiceSchema.optional(),
|
||||
customWelcome: z.string().optional(),
|
||||
mcpServers: mcpServersSchema.optional(),
|
||||
endpointsMenu: z.boolean().optional(),
|
||||
modelSelect: z.boolean().optional(),
|
||||
parameters: z.boolean().optional(),
|
||||
sidePanel: z.boolean().optional(),
|
||||
multiConvo: z.boolean().optional(),
|
||||
bookmarks: z.boolean().optional(),
|
||||
memories: z.boolean().optional(),
|
||||
|
|
@ -716,10 +753,8 @@ export const interfaceSchema = z
|
|||
.optional(),
|
||||
})
|
||||
.default({
|
||||
endpointsMenu: true,
|
||||
modelSelect: true,
|
||||
parameters: true,
|
||||
sidePanel: true,
|
||||
presets: true,
|
||||
multiConvo: true,
|
||||
bookmarks: true,
|
||||
|
|
@ -1040,7 +1075,7 @@ export const configSchema = z.object({
|
|||
.optional(),
|
||||
interface: interfaceSchema,
|
||||
turnstile: turnstileSchema.optional(),
|
||||
fileStrategy: fileSourceSchema.default(FileSources.local),
|
||||
fileStrategy: fileStorageSchema.default(FileSources.local),
|
||||
fileStrategies: fileStrategiesSchema,
|
||||
actions: z
|
||||
.object({
|
||||
|
|
@ -1814,7 +1849,7 @@ export enum Constants {
|
|||
/** Key for the app's version. */
|
||||
VERSION = 'v0.8.4',
|
||||
/** Key for the Custom Config's version (librechat.yaml). */
|
||||
CONFIG_VERSION = '1.3.6',
|
||||
CONFIG_VERSION = '1.3.7',
|
||||
/** Standard value for the first message's `parentMessageId` value, to indicate no parent exists. */
|
||||
NO_PARENT = '00000000-0000-0000-0000-000000000000',
|
||||
/** Standard value to use whatever the submission prelim. `responseMessageId` is */
|
||||
|
|
|
|||
|
|
@ -866,6 +866,10 @@ export function getRandomPrompts(
|
|||
}
|
||||
|
||||
/* Roles */
|
||||
export function listRoles(): Promise<q.ListRolesResponse> {
|
||||
return request.get(`${endpoints.adminRoles()}?limit=200`);
|
||||
}
|
||||
|
||||
export function getRole(roleName: string): Promise<r.TRole> {
|
||||
return request.get(endpoints.getRole(roleName));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ export enum QueryKeys {
|
|||
agentCategories = 'agentCategories',
|
||||
marketplaceAgents = 'marketplaceAgents',
|
||||
roles = 'roles',
|
||||
rolesList = 'rolesList',
|
||||
conversationTags = 'conversationTags',
|
||||
health = 'health',
|
||||
userTerms = 'userTerms',
|
||||
|
|
|
|||
|
|
@ -111,6 +111,16 @@ const defaultRolesSchema = z.object({
|
|||
}),
|
||||
});
|
||||
|
||||
const systemRoleSet = new Set(Object.values(SystemRoles).map((r) => r.toUpperCase()));
|
||||
|
||||
/** Case-insensitive check for reserved system role names. */
|
||||
export function isSystemRoleName(name: string | undefined | null): boolean {
|
||||
if (!name) {
|
||||
return false;
|
||||
}
|
||||
return systemRoleSet.has(name.toUpperCase());
|
||||
}
|
||||
|
||||
export const roleDefaults = defaultRolesSchema.parse({
|
||||
[SystemRoles.ADMIN]: {
|
||||
name: SystemRoles.ADMIN,
|
||||
|
|
|
|||
|
|
@ -172,6 +172,13 @@ export type AccessRole = {
|
|||
|
||||
export type AccessRolesResponse = AccessRole[];
|
||||
|
||||
export type ListRolesResponse = {
|
||||
roles: Array<{ _id?: string; name: string; description?: string }>;
|
||||
total: number;
|
||||
limit: number;
|
||||
offset?: number;
|
||||
};
|
||||
|
||||
export interface MCPServerStatus {
|
||||
requiresOAuth: boolean;
|
||||
connectionState: 'disconnected' | 'connecting' | 'connected' | 'error';
|
||||
|
|
|
|||
|
|
@ -55,7 +55,7 @@
|
|||
"@types/node": "^20.3.0",
|
||||
"jest": "^30.2.0",
|
||||
"jest-junit": "^16.0.0",
|
||||
"mongodb-memory-server": "^10.1.4",
|
||||
"mongodb-memory-server": "^11.0.1",
|
||||
"rimraf": "^6.1.3",
|
||||
"rollup": "^4.34.9",
|
||||
"rollup-plugin-peer-deps-external": "^2.2.4",
|
||||
|
|
|
|||
|
|
@ -29,14 +29,11 @@ export async function loadDefaultInterface({
|
|||
|
||||
const loadedInterface: AppConfig['interfaceConfig'] = removeNullishValues({
|
||||
// UI elements - use schema defaults
|
||||
endpointsMenu:
|
||||
interfaceConfig?.endpointsMenu ?? (hasModelSpecs ? false : defaults.endpointsMenu),
|
||||
modelSelect:
|
||||
interfaceConfig?.modelSelect ??
|
||||
(hasModelSpecs ? includesAddedEndpoints : defaults.modelSelect),
|
||||
parameters: interfaceConfig?.parameters ?? (hasModelSpecs ? false : defaults.parameters),
|
||||
presets: interfaceConfig?.presets ?? (hasModelSpecs ? false : defaults.presets),
|
||||
sidePanel: interfaceConfig?.sidePanel ?? defaults.sidePanel,
|
||||
privacyPolicy: interfaceConfig?.privacyPolicy ?? defaults.privacyPolicy,
|
||||
termsOfService: interfaceConfig?.termsOfService ?? defaults.termsOfService,
|
||||
mcpServers: interfaceConfig?.mcpServers ?? defaults.mcpServers,
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ function fakeConfig(overrides: Record<string, unknown>, priority: number): IConf
|
|||
}
|
||||
|
||||
const baseConfig = {
|
||||
interfaceConfig: { endpointsMenu: true, sidePanel: true },
|
||||
interfaceConfig: { modelSelect: true, parameters: true },
|
||||
registration: { enabled: true },
|
||||
endpoints: ['openAI'],
|
||||
} as unknown as AppConfig;
|
||||
|
|
@ -32,11 +32,11 @@ describe('mergeConfigOverrides', () => {
|
|||
});
|
||||
|
||||
it('deep merges interface UI fields into interfaceConfig', () => {
|
||||
const configs = [fakeConfig({ interface: { endpointsMenu: false } }, 10)];
|
||||
const configs = [fakeConfig({ interface: { modelSelect: false } }, 10)];
|
||||
const result = mergeConfigOverrides(baseConfig, configs) as unknown as Record<string, unknown>;
|
||||
const iface = result.interfaceConfig as Record<string, unknown>;
|
||||
expect(iface.endpointsMenu).toBe(false);
|
||||
expect(iface.sidePanel).toBe(true);
|
||||
expect(iface.modelSelect).toBe(false);
|
||||
expect(iface.parameters).toBe(true);
|
||||
});
|
||||
|
||||
it('sorts by priority — higher priority wins', () => {
|
||||
|
|
@ -58,16 +58,16 @@ describe('mergeConfigOverrides', () => {
|
|||
|
||||
it('does not mutate the base config', () => {
|
||||
const original = JSON.parse(JSON.stringify(baseConfig));
|
||||
const configs = [fakeConfig({ interface: { endpointsMenu: false } }, 10)];
|
||||
const configs = [fakeConfig({ interface: { modelSelect: false } }, 10)];
|
||||
mergeConfigOverrides(baseConfig, configs);
|
||||
expect(baseConfig).toEqual(original);
|
||||
});
|
||||
|
||||
it('handles null override values', () => {
|
||||
const configs = [fakeConfig({ interface: { endpointsMenu: null } }, 10)];
|
||||
const configs = [fakeConfig({ interface: { modelSelect: null } }, 10)];
|
||||
const result = mergeConfigOverrides(baseConfig, configs) as unknown as Record<string, unknown>;
|
||||
const iface = result.interfaceConfig as Record<string, unknown>;
|
||||
expect(iface.endpointsMenu).toBeNull();
|
||||
expect(iface.modelSelect).toBeNull();
|
||||
});
|
||||
|
||||
it('skips configs with no overrides object', () => {
|
||||
|
|
@ -97,20 +97,20 @@ describe('mergeConfigOverrides', () => {
|
|||
|
||||
it('merges three priority levels in order', () => {
|
||||
const configs = [
|
||||
fakeConfig({ interface: { endpointsMenu: false } }, 0),
|
||||
fakeConfig({ interface: { endpointsMenu: true, sidePanel: false } }, 10),
|
||||
fakeConfig({ interface: { sidePanel: true } }, 100),
|
||||
fakeConfig({ interface: { modelSelect: false } }, 0),
|
||||
fakeConfig({ interface: { modelSelect: true, parameters: false } }, 10),
|
||||
fakeConfig({ interface: { parameters: true } }, 100),
|
||||
];
|
||||
const result = mergeConfigOverrides(baseConfig, configs) as unknown as Record<string, unknown>;
|
||||
const iface = result.interfaceConfig as Record<string, unknown>;
|
||||
expect(iface.endpointsMenu).toBe(true);
|
||||
expect(iface.sidePanel).toBe(true);
|
||||
expect(iface.modelSelect).toBe(true);
|
||||
expect(iface.parameters).toBe(true);
|
||||
});
|
||||
|
||||
it('remaps all renamed YAML keys (exhaustiveness check)', () => {
|
||||
const base = {
|
||||
mcpConfig: null,
|
||||
interfaceConfig: { endpointsMenu: true },
|
||||
interfaceConfig: { modelSelect: true },
|
||||
turnstileConfig: {},
|
||||
} as unknown as AppConfig;
|
||||
|
||||
|
|
@ -118,7 +118,7 @@ describe('mergeConfigOverrides', () => {
|
|||
fakeConfig(
|
||||
{
|
||||
mcpServers: { srv: { url: 'http://mcp' } },
|
||||
interface: { endpointsMenu: false },
|
||||
interface: { modelSelect: false },
|
||||
turnstile: { siteKey: 'key-123' },
|
||||
},
|
||||
10,
|
||||
|
|
@ -127,7 +127,7 @@ describe('mergeConfigOverrides', () => {
|
|||
const result = mergeConfigOverrides(base, configs) as unknown as Record<string, unknown>;
|
||||
|
||||
expect(result.mcpConfig).toEqual({ srv: { url: 'http://mcp' } });
|
||||
expect((result.interfaceConfig as Record<string, unknown>).endpointsMenu).toBe(false);
|
||||
expect((result.interfaceConfig as Record<string, unknown>).modelSelect).toBe(false);
|
||||
expect((result.turnstileConfig as Record<string, unknown>).siteKey).toBe('key-123');
|
||||
|
||||
expect(result.mcpServers).toBeUndefined();
|
||||
|
|
@ -137,14 +137,14 @@ describe('mergeConfigOverrides', () => {
|
|||
|
||||
it('strips interface permission fields from overrides', () => {
|
||||
const base = {
|
||||
interfaceConfig: { endpointsMenu: true, sidePanel: true },
|
||||
interfaceConfig: { modelSelect: true, parameters: true },
|
||||
} as unknown as AppConfig;
|
||||
|
||||
const configs = [
|
||||
fakeConfig(
|
||||
{
|
||||
interface: {
|
||||
endpointsMenu: false,
|
||||
modelSelect: false,
|
||||
prompts: false,
|
||||
agents: { use: false },
|
||||
marketplace: { use: false },
|
||||
|
|
@ -157,14 +157,14 @@ describe('mergeConfigOverrides', () => {
|
|||
const iface = result.interfaceConfig as Record<string, unknown>;
|
||||
|
||||
// UI field should be merged
|
||||
expect(iface.endpointsMenu).toBe(false);
|
||||
expect(iface.modelSelect).toBe(false);
|
||||
// Boolean permission fields should be stripped
|
||||
expect(iface.prompts).toBeUndefined();
|
||||
// Object permission fields with only permission sub-keys should be stripped
|
||||
expect(iface.agents).toBeUndefined();
|
||||
expect(iface.marketplace).toBeUndefined();
|
||||
// Untouched base field preserved
|
||||
expect(iface.sidePanel).toBe(true);
|
||||
expect(iface.parameters).toBe(true);
|
||||
});
|
||||
|
||||
it('preserves UI sub-keys in composite permission fields like mcpServers', () => {
|
||||
|
|
@ -220,7 +220,7 @@ describe('mergeConfigOverrides', () => {
|
|||
|
||||
it('drops interface entirely when only permission fields are present', () => {
|
||||
const base = {
|
||||
interfaceConfig: { endpointsMenu: true },
|
||||
interfaceConfig: { modelSelect: true },
|
||||
} as unknown as AppConfig;
|
||||
|
||||
const configs = [fakeConfig({ interface: { prompts: false, agents: false } }, 10)];
|
||||
|
|
@ -228,7 +228,7 @@ describe('mergeConfigOverrides', () => {
|
|||
const iface = result.interfaceConfig as Record<string, unknown>;
|
||||
|
||||
// Base should be unchanged
|
||||
expect(iface.endpointsMenu).toBe(true);
|
||||
expect(iface.modelSelect).toBe(true);
|
||||
expect(iface.prompts).toBeUndefined();
|
||||
expect(iface.agents).toBeUndefined();
|
||||
});
|
||||
|
|
@ -281,7 +281,7 @@ describe('INTERFACE_PERMISSION_FIELDS', () => {
|
|||
});
|
||||
|
||||
it('does not contain UI-only fields', () => {
|
||||
const uiFields = ['endpointsMenu', 'modelSelect', 'parameters', 'presets', 'sidePanel'];
|
||||
const uiFields = ['modelSelect', 'parameters', 'presets'];
|
||||
for (const field of uiFields) {
|
||||
expect(INTERFACE_PERMISSION_FIELDS.has(field)).toBe(false);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ describe('upsertConfig', () => {
|
|||
PrincipalType.ROLE,
|
||||
'admin',
|
||||
PrincipalModel.ROLE,
|
||||
{ interface: { endpointsMenu: false } },
|
||||
{ interface: { modelSelect: false } },
|
||||
10,
|
||||
);
|
||||
|
||||
|
|
@ -49,7 +49,7 @@ describe('upsertConfig', () => {
|
|||
PrincipalType.ROLE,
|
||||
'admin',
|
||||
PrincipalModel.ROLE,
|
||||
{ interface: { endpointsMenu: false } },
|
||||
{ interface: { modelSelect: false } },
|
||||
10,
|
||||
);
|
||||
|
||||
|
|
@ -57,7 +57,7 @@ describe('upsertConfig', () => {
|
|||
PrincipalType.ROLE,
|
||||
'admin',
|
||||
PrincipalModel.ROLE,
|
||||
{ interface: { endpointsMenu: true } },
|
||||
{ interface: { modelSelect: true } },
|
||||
10,
|
||||
);
|
||||
|
||||
|
|
@ -70,7 +70,7 @@ describe('upsertConfig', () => {
|
|||
PrincipalType.ROLE,
|
||||
'admin',
|
||||
PrincipalModel.ROLE,
|
||||
{ interface: { endpointsMenu: true } },
|
||||
{ interface: { modelSelect: true } },
|
||||
10,
|
||||
);
|
||||
|
||||
|
|
@ -78,7 +78,7 @@ describe('upsertConfig', () => {
|
|||
PrincipalType.ROLE,
|
||||
'admin',
|
||||
PrincipalModel.ROLE,
|
||||
{ interface: { endpointsMenu: false } },
|
||||
{ interface: { modelSelect: false } },
|
||||
10,
|
||||
);
|
||||
|
||||
|
|
@ -240,7 +240,7 @@ describe('patchConfigFields', () => {
|
|||
PrincipalType.ROLE,
|
||||
'admin',
|
||||
PrincipalModel.ROLE,
|
||||
{ interface: { endpointsMenu: true, sidePanel: true } },
|
||||
{ interface: { modelSelect: true, parameters: true } },
|
||||
10,
|
||||
);
|
||||
|
||||
|
|
@ -248,14 +248,14 @@ describe('patchConfigFields', () => {
|
|||
PrincipalType.ROLE,
|
||||
'admin',
|
||||
PrincipalModel.ROLE,
|
||||
{ 'interface.endpointsMenu': false },
|
||||
{ 'interface.modelSelect': false },
|
||||
10,
|
||||
);
|
||||
|
||||
const overrides = result!.overrides as Record<string, unknown>;
|
||||
const iface = overrides.interface as Record<string, unknown>;
|
||||
expect(iface.endpointsMenu).toBe(false);
|
||||
expect(iface.sidePanel).toBe(true);
|
||||
expect(iface.modelSelect).toBe(false);
|
||||
expect(iface.parameters).toBe(true);
|
||||
});
|
||||
|
||||
it('creates a config if none exists (upsert)', async () => {
|
||||
|
|
@ -263,7 +263,7 @@ describe('patchConfigFields', () => {
|
|||
PrincipalType.ROLE,
|
||||
'newrole',
|
||||
PrincipalModel.ROLE,
|
||||
{ 'interface.endpointsMenu': false },
|
||||
{ 'interface.modelSelect': false },
|
||||
10,
|
||||
);
|
||||
|
||||
|
|
@ -278,19 +278,19 @@ describe('unsetConfigField', () => {
|
|||
PrincipalType.ROLE,
|
||||
'admin',
|
||||
PrincipalModel.ROLE,
|
||||
{ interface: { endpointsMenu: false, sidePanel: false } },
|
||||
{ interface: { modelSelect: false, parameters: false } },
|
||||
10,
|
||||
);
|
||||
|
||||
const result = await methods.unsetConfigField(
|
||||
PrincipalType.ROLE,
|
||||
'admin',
|
||||
'interface.endpointsMenu',
|
||||
'interface.modelSelect',
|
||||
);
|
||||
const overrides = result!.overrides as Record<string, unknown>;
|
||||
const iface = overrides.interface as Record<string, unknown>;
|
||||
expect(iface.endpointsMenu).toBeUndefined();
|
||||
expect(iface.sidePanel).toBe(false);
|
||||
expect(iface.modelSelect).toBeUndefined();
|
||||
expect(iface.parameters).toBe(false);
|
||||
});
|
||||
|
||||
it('returns null for non-existent config', async () => {
|
||||
|
|
|
|||
|
|
@ -116,11 +116,11 @@ describe('Conversation Structure Tests', () => {
|
|||
conversationId,
|
||||
user: userId,
|
||||
text: `Message ${i}`,
|
||||
createdAt: new Date(Date.now() + (i % 2 === 0 ? i * 500000 : -i * 500000)),
|
||||
createdAt: new Date(Date.now() + i * 1000),
|
||||
}));
|
||||
|
||||
// Save messages with new timestamps being generated (message objects ignored)
|
||||
await bulkSaveMessages(messages);
|
||||
// Save messages preserving original timestamps so sort order is deterministic
|
||||
await bulkSaveMessages(messages, true);
|
||||
|
||||
// Retrieve messages (this will sort by createdAt, but it shouldn't matter now)
|
||||
const retrievedMessages = await getMessages({ conversationId, user: userId });
|
||||
|
|
|
|||
|
|
@ -73,7 +73,10 @@ describe('Meilisearch Mongoose plugin', () => {
|
|||
title: 'Test Conversation',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
expect(mockAddDocuments).toHaveBeenCalled();
|
||||
expect(mockAddDocuments).toHaveBeenCalledWith(
|
||||
[expect.objectContaining({ conversationId: expect.anything() })],
|
||||
{ primaryKey: 'conversationId' },
|
||||
);
|
||||
});
|
||||
|
||||
test('saving conversation indexes with expiredAt=null w/ meilisearch', async () => {
|
||||
|
|
@ -105,7 +108,10 @@ describe('Meilisearch Mongoose plugin', () => {
|
|||
user: new mongoose.Types.ObjectId(),
|
||||
isCreatedByUser: true,
|
||||
});
|
||||
expect(mockAddDocuments).toHaveBeenCalled();
|
||||
expect(mockAddDocuments).toHaveBeenCalledWith(
|
||||
[expect.objectContaining({ messageId: expect.anything() })],
|
||||
{ primaryKey: 'messageId' },
|
||||
);
|
||||
});
|
||||
|
||||
test('saving messages with expiredAt=null indexes w/ meilisearch', async () => {
|
||||
|
|
@ -130,6 +136,87 @@ describe('Meilisearch Mongoose plugin', () => {
|
|||
expect(mockAddDocuments).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('updating an indexed conversation calls updateDocuments with primaryKey', async () => {
|
||||
const conversationModel = createConversationModel(mongoose);
|
||||
const convo = await conversationModel.create({
|
||||
conversationId: new mongoose.Types.ObjectId().toString(),
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
title: 'Original Title',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
mockUpdateDocuments.mockClear();
|
||||
|
||||
convo._meiliIndex = true;
|
||||
convo.title = 'Updated Title';
|
||||
await convo.save();
|
||||
|
||||
expect(mockUpdateDocuments).toHaveBeenCalledWith(
|
||||
[expect.objectContaining({ conversationId: expect.anything() })],
|
||||
{ primaryKey: 'conversationId' },
|
||||
);
|
||||
});
|
||||
|
||||
test('updating an indexed message calls updateDocuments with primaryKey: messageId', async () => {
|
||||
const messageModel = createMessageModel(mongoose);
|
||||
const msg = await messageModel.create({
|
||||
messageId: new mongoose.Types.ObjectId().toString(),
|
||||
conversationId: new mongoose.Types.ObjectId(),
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
isCreatedByUser: true,
|
||||
});
|
||||
mockUpdateDocuments.mockClear();
|
||||
|
||||
msg._meiliIndex = true;
|
||||
msg.text = 'Updated text';
|
||||
await msg.save();
|
||||
|
||||
expect(mockUpdateDocuments).toHaveBeenCalledWith(
|
||||
[expect.objectContaining({ messageId: expect.anything() })],
|
||||
{ primaryKey: 'messageId' },
|
||||
);
|
||||
});
|
||||
|
||||
test('deleteObjectFromMeili calls deleteDocument with messageId, not _id', async () => {
|
||||
const messageModel = createMessageModel(mongoose);
|
||||
const msgId = new mongoose.Types.ObjectId().toString();
|
||||
const msg = await messageModel.create({
|
||||
messageId: msgId,
|
||||
conversationId: new mongoose.Types.ObjectId(),
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
isCreatedByUser: true,
|
||||
});
|
||||
mockDeleteDocument.mockClear();
|
||||
|
||||
const typedMsg = msg as unknown as import('./mongoMeili').DocumentWithMeiliIndex;
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
typedMsg.deleteObjectFromMeili!((err) => (err ? reject(err) : resolve()));
|
||||
});
|
||||
|
||||
expect(mockDeleteDocument).toHaveBeenCalledWith(msgId);
|
||||
expect(mockDeleteDocument).not.toHaveBeenCalledWith(String(msg._id));
|
||||
});
|
||||
|
||||
test('updateDocuments receives preprocessed data with primaryKey', async () => {
|
||||
const conversationModel = createConversationModel(mongoose);
|
||||
const conversationId = 'abc|def|ghi';
|
||||
const convo = await conversationModel.create({
|
||||
conversationId,
|
||||
user: new mongoose.Types.ObjectId(),
|
||||
title: 'Pipe Test',
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
});
|
||||
mockUpdateDocuments.mockClear();
|
||||
|
||||
convo._meiliIndex = true;
|
||||
convo.title = 'Updated Pipe Test';
|
||||
await convo.save();
|
||||
|
||||
expect(mockUpdateDocuments).toHaveBeenCalledWith(
|
||||
[expect.objectContaining({ conversationId: 'abc--def--ghi' })],
|
||||
{ primaryKey: 'conversationId' },
|
||||
);
|
||||
});
|
||||
|
||||
test('sync w/ meili does not include TTL documents', async () => {
|
||||
const conversationModel = createConversationModel(mongoose) as SchemaWithMeiliMethods;
|
||||
await conversationModel.create({
|
||||
|
|
@ -299,8 +386,10 @@ describe('Meilisearch Mongoose plugin', () => {
|
|||
// Run sync which should call processSyncBatch internally
|
||||
await conversationModel.syncWithMeili();
|
||||
|
||||
// Verify addDocumentsInBatches was called (new batch method)
|
||||
expect(mockAddDocumentsInBatches).toHaveBeenCalled();
|
||||
// Verify addDocumentsInBatches was called with explicit primaryKey
|
||||
expect(mockAddDocumentsInBatches).toHaveBeenCalledWith(expect.any(Array), undefined, {
|
||||
primaryKey: 'conversationId',
|
||||
});
|
||||
});
|
||||
|
||||
test('addObjectToMeili retries on failure', async () => {
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ const getSyncConfig = () => ({
|
|||
* Validates the required options for configuring the mongoMeili plugin.
|
||||
*/
|
||||
const validateOptions = (options: Partial<MongoMeiliOptions>): void => {
|
||||
const requiredKeys: (keyof MongoMeiliOptions)[] = ['host', 'apiKey', 'indexName'];
|
||||
const requiredKeys: (keyof MongoMeiliOptions)[] = ['host', 'apiKey', 'indexName', 'primaryKey'];
|
||||
requiredKeys.forEach((key) => {
|
||||
if (!options[key]) {
|
||||
throw new Error(`Missing mongoMeili Option: ${key}`);
|
||||
|
|
@ -130,19 +130,21 @@ const processBatch = async <T>(
|
|||
* @param config - Configuration object.
|
||||
* @param config.index - The MeiliSearch index object.
|
||||
* @param config.attributesToIndex - List of attributes to index.
|
||||
* @param config.primaryKey - The primary key field for MeiliSearch document operations.
|
||||
* @param config.syncOptions - Sync configuration options.
|
||||
* @returns A class definition that will be loaded into the Mongoose schema.
|
||||
*/
|
||||
const createMeiliMongooseModel = ({
|
||||
index,
|
||||
attributesToIndex,
|
||||
primaryKey,
|
||||
syncOptions,
|
||||
}: {
|
||||
index: Index<MeiliIndexable>;
|
||||
attributesToIndex: string[];
|
||||
primaryKey: string;
|
||||
syncOptions: { batchSize: number; delayMs: number };
|
||||
}) => {
|
||||
const primaryKey = attributesToIndex[0];
|
||||
const syncConfig = { ...getSyncConfig(), ...syncOptions };
|
||||
|
||||
class MeiliMongooseModel {
|
||||
|
|
@ -255,7 +257,7 @@ const createMeiliMongooseModel = ({
|
|||
|
||||
try {
|
||||
// Add documents to MeiliSearch
|
||||
await index.addDocumentsInBatches(formattedDocs);
|
||||
await index.addDocumentsInBatches(formattedDocs, undefined, { primaryKey });
|
||||
|
||||
// Update MongoDB to mark documents as indexed.
|
||||
// { timestamps: false } prevents Mongoose from touching updatedAt, preserving
|
||||
|
|
@ -422,7 +424,7 @@ const createMeiliMongooseModel = ({
|
|||
|
||||
while (retryCount < maxRetries) {
|
||||
try {
|
||||
await index.addDocuments([object]);
|
||||
await index.addDocuments([object], { primaryKey });
|
||||
break;
|
||||
} catch (error) {
|
||||
retryCount++;
|
||||
|
|
@ -436,7 +438,8 @@ const createMeiliMongooseModel = ({
|
|||
}
|
||||
|
||||
try {
|
||||
await this.collection.updateMany(
|
||||
// eslint-disable-next-line no-restricted-syntax -- _meiliIndex is an internal bookkeeping flag, not tenant-scoped data
|
||||
await this.collection.updateOne(
|
||||
{ _id: this._id as Types.ObjectId },
|
||||
{ $set: { _meiliIndex: true } },
|
||||
);
|
||||
|
|
@ -456,10 +459,8 @@ const createMeiliMongooseModel = ({
|
|||
next: CallbackWithoutResultAndOptionalError,
|
||||
): Promise<void> {
|
||||
try {
|
||||
const object = _.omitBy(_.pick(this.toJSON(), attributesToIndex), (v, k) =>
|
||||
k.startsWith('$'),
|
||||
);
|
||||
await index.updateDocuments([object]);
|
||||
const object = this.preprocessObjectForIndex!();
|
||||
await index.updateDocuments([object], { primaryKey });
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('[updateObjectToMeili] Error updating document in Meili:', error);
|
||||
|
|
@ -477,7 +478,7 @@ const createMeiliMongooseModel = ({
|
|||
next: CallbackWithoutResultAndOptionalError,
|
||||
): Promise<void> {
|
||||
try {
|
||||
await index.deleteDocument(this._id as string);
|
||||
await index.deleteDocument(String(this[primaryKey as keyof DocumentWithMeiliIndex]));
|
||||
next();
|
||||
} catch (error) {
|
||||
logger.error('[deleteObjectFromMeili] Error deleting document from Meili:', error);
|
||||
|
|
@ -643,7 +644,7 @@ export default function mongoMeili(schema: Schema, options: MongoMeiliOptions):
|
|||
logger.debug(`[mongoMeili] Added 'user' field to ${indexName} index attributes`);
|
||||
}
|
||||
|
||||
schema.loadClass(createMeiliMongooseModel({ index, attributesToIndex, syncOptions }));
|
||||
schema.loadClass(createMeiliMongooseModel({ index, attributesToIndex, primaryKey, syncOptions }));
|
||||
|
||||
// Register Mongoose hooks
|
||||
schema.post('save', function (doc: DocumentWithMeiliIndex, next) {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import type {
|
||||
TEndpoint,
|
||||
FileSources,
|
||||
FileStorage,
|
||||
TFileConfig,
|
||||
TAzureConfig,
|
||||
TCustomConfig,
|
||||
|
|
@ -62,7 +62,7 @@ export interface AppConfig {
|
|||
/** Web search configuration */
|
||||
webSearch?: TCustomConfig['webSearch'];
|
||||
/** File storage strategy ('local', 's3', 'firebase', 'azure_blob') */
|
||||
fileStrategy: FileSources.local | FileSources.s3 | FileSources.firebase | FileSources.azure_blob;
|
||||
fileStrategy: FileStorage;
|
||||
/** File strategies configuration */
|
||||
fileStrategies?: TCustomConfig['fileStrategies'];
|
||||
/** Registration configurations */
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue