🐘 feat: FerretDB Compatibility (#11769)

* feat: replace unsupported MongoDB aggregation operators for FerretDB compatibility

Replace $lookup, $unwind, $sample, $replaceRoot, and $addFields aggregation
stages which are unsupported on FerretDB v2.x (postgres-documentdb backend).

- Prompt.js: Replace $lookup/$unwind/$project pipelines with find().select().lean()
  + attachProductionPrompts() batch helper. Replace $group/$replaceRoot/$sample
  in getRandomPromptGroups with distinct() + Fisher-Yates shuffle.
- Agent/Prompt migration scripts: Replace $lookup anti-join pattern with
  distinct() + $nin two-step queries for finding un-migrated resources.

All replacement patterns verified against FerretDB v2.7.0.

Co-authored-by: Cursor <cursoragent@cursor.com>

* fix: use $pullAll for simple array removals, fix memberIds type mismatches

Replace $pull with $pullAll for exact-value scalar array removals. Both
operators work on MongoDB and FerretDB, but $pullAll is more explicit for
exact matching (no condition expressions).

Fix critical type mismatch bugs where ObjectId values were used against
String[] memberIds arrays in Group queries:
- config/delete-user.js: use string uid instead of ObjectId user._id
- e2e/setup/cleanupUser.ts: convert userId.toString() before query

Harden PermissionService.bulkUpdateResourcePermissions abort handling to
prevent crash when abortTransaction is called after commitTransaction.

All changes verified against FerretDB v2.7.0 and MongoDB Memory Server.

Co-authored-by: Cursor <cursoragent@cursor.com>

* fix: harden transaction support probe for FerretDB compatibility

Commit the transaction before aborting in supportsTransactions probe, and
wrap abortTransaction in try-catch to prevent crashes when abort is called
after a successful commit (observed behavior on FerretDB).

Co-authored-by: Cursor <cursoragent@cursor.com>

* feat: add FerretDB compatibility test suite, retry utilities, and CI config

Add comprehensive FerretDB integration test suite covering:
- $pullAll scalar array operations
- $pull with subdocument conditions
- $lookup replacement (find + manual join)
- $sample replacement (distinct + Fisher-Yates)
- $bit and $bitsAllSet operations
- Migration anti-join pattern
- Multi-tenancy (useDb, scaling, write amplification)
- Sharding proof-of-concept
- Production operations (backup/restore, schema migration, deadlock retry)

Add production retryWithBackoff utility for deadlock recovery during
concurrent index creation on FerretDB/DocumentDB backends.

Add UserController.spec.js tests for deleteUserController (runs in CI).

Configure jest and eslint to isolate FerretDB tests from CI pipelines:
- packages/data-schemas/jest.config.mjs: ignore misc/ directory
- eslint.config.mjs: ignore packages/data-schemas/misc/

Include Docker Compose config for local FerretDB v2.7 + postgres-documentdb,
dedicated jest/tsconfig for the test files, and multi-tenancy findings doc.

Co-authored-by: Cursor <cursoragent@cursor.com>

* style: brace formatting in aclEntry.ts modifyPermissionBits

Co-authored-by: Cursor <cursoragent@cursor.com>

* refactor: reorganize retry utilities and update imports

- Moved retryWithBackoff utility to a new file `retry.ts` for better structure.
- Updated imports in `orgOperations.ferretdb.spec.ts` to reflect the new location of retry utilities.
- Removed old import statement for retryWithBackoff from index.ts to streamline exports.

* test: add $pullAll coverage for ConversationTag and PermissionService

Add integration tests for deleteConversationTag verifying $pullAll
removes tags from conversations correctly, and for
syncUserEntraGroupMemberships verifying $pullAll removes user from
non-matching Entra groups while preserving local group membership.

---------

Co-authored-by: Cursor <cursoragent@cursor.com>
This commit is contained in:
Danny Avila 2026-02-13 02:14:34 -05:00
parent dc489e7b25
commit 3398f6a17a
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
35 changed files with 4727 additions and 347 deletions

View file

@ -546,16 +546,15 @@ const removeAgentResourceFiles = async ({ agent_id, files }) => {
return acc;
}, {});
// Step 1: Atomically remove file IDs using $pull
const pullOps = {};
const pullAllOps = {};
const resourcesToCheck = new Set();
for (const [resource, fileIds] of Object.entries(filesByResource)) {
const fileIdsPath = `tool_resources.${resource}.file_ids`;
pullOps[fileIdsPath] = { $in: fileIds };
pullAllOps[fileIdsPath] = fileIds;
resourcesToCheck.add(resource);
}
const updatePullData = { $pull: pullOps };
const updatePullData = { $pullAll: pullAllOps };
const agentAfterPull = await Agent.findOneAndUpdate(searchParameter, updatePullData, {
new: true,
}).lean();
@ -775,7 +774,7 @@ const updateAgentProjects = async ({ user, agentId, projectIds, removeProjectIds
for (const projectId of removeProjectIds) {
await removeAgentIdsFromProject(projectId, [agentId]);
}
updateOps.$pull = { projectIds: { $in: removeProjectIds } };
updateOps.$pullAll = { projectIds: removeProjectIds };
}
if (projectIds && projectIds.length > 0) {

View file

@ -165,7 +165,7 @@ const deleteConversationTag = async (user, tag) => {
return null;
}
await Conversation.updateMany({ user, tags: tag }, { $pull: { tags: tag } });
await Conversation.updateMany({ user, tags: tag }, { $pullAll: { tags: [tag] } });
await ConversationTag.updateMany(
{ user, position: { $gt: deletedTag.position } },

View file

@ -0,0 +1,114 @@
const mongoose = require('mongoose');
const { MongoMemoryServer } = require('mongodb-memory-server');
const { ConversationTag, Conversation } = require('~/db/models');
const { deleteConversationTag } = require('./ConversationTag');
let mongoServer;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
await mongoose.connect(mongoServer.getUri());
});
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
});
afterEach(async () => {
await ConversationTag.deleteMany({});
await Conversation.deleteMany({});
});
describe('ConversationTag model - $pullAll operations', () => {
const userId = new mongoose.Types.ObjectId().toString();
describe('deleteConversationTag', () => {
it('should remove the tag from all conversations that have it', async () => {
await ConversationTag.create({ tag: 'work', user: userId, position: 1 });
await Conversation.create([
{ conversationId: 'conv1', user: userId, endpoint: 'openAI', tags: ['work', 'important'] },
{ conversationId: 'conv2', user: userId, endpoint: 'openAI', tags: ['work'] },
{ conversationId: 'conv3', user: userId, endpoint: 'openAI', tags: ['personal'] },
]);
await deleteConversationTag(userId, 'work');
const convos = await Conversation.find({ user: userId }).sort({ conversationId: 1 }).lean();
expect(convos[0].tags).toEqual(['important']);
expect(convos[1].tags).toEqual([]);
expect(convos[2].tags).toEqual(['personal']);
});
it('should delete the tag document itself', async () => {
await ConversationTag.create({ tag: 'temp', user: userId, position: 1 });
const result = await deleteConversationTag(userId, 'temp');
expect(result).toBeDefined();
expect(result.tag).toBe('temp');
const remaining = await ConversationTag.find({ user: userId }).lean();
expect(remaining).toHaveLength(0);
});
it('should return null when the tag does not exist', async () => {
const result = await deleteConversationTag(userId, 'nonexistent');
expect(result).toBeNull();
});
it('should adjust positions of tags after the deleted one', async () => {
await ConversationTag.create([
{ tag: 'first', user: userId, position: 1 },
{ tag: 'second', user: userId, position: 2 },
{ tag: 'third', user: userId, position: 3 },
]);
await deleteConversationTag(userId, 'first');
const tags = await ConversationTag.find({ user: userId }).sort({ position: 1 }).lean();
expect(tags).toHaveLength(2);
expect(tags[0].tag).toBe('second');
expect(tags[0].position).toBe(1);
expect(tags[1].tag).toBe('third');
expect(tags[1].position).toBe(2);
});
it('should not affect conversations of other users', async () => {
const otherUser = new mongoose.Types.ObjectId().toString();
await ConversationTag.create({ tag: 'shared-name', user: userId, position: 1 });
await ConversationTag.create({ tag: 'shared-name', user: otherUser, position: 1 });
await Conversation.create([
{ conversationId: 'mine', user: userId, endpoint: 'openAI', tags: ['shared-name'] },
{ conversationId: 'theirs', user: otherUser, endpoint: 'openAI', tags: ['shared-name'] },
]);
await deleteConversationTag(userId, 'shared-name');
const myConvo = await Conversation.findOne({ conversationId: 'mine' }).lean();
const theirConvo = await Conversation.findOne({ conversationId: 'theirs' }).lean();
expect(myConvo.tags).toEqual([]);
expect(theirConvo.tags).toEqual(['shared-name']);
});
it('should handle duplicate tags in conversations correctly', async () => {
await ConversationTag.create({ tag: 'dup', user: userId, position: 1 });
const conv = await Conversation.create({
conversationId: 'conv-dup',
user: userId,
endpoint: 'openAI',
tags: ['dup', 'other', 'dup'],
});
await deleteConversationTag(userId, 'dup');
const updated = await Conversation.findById(conv._id).lean();
expect(updated.tags).toEqual(['other']);
});
});
});

View file

@ -64,7 +64,7 @@ const addGroupIdsToProject = async function (projectId, promptGroupIds) {
const removeGroupIdsFromProject = async function (projectId, promptGroupIds) {
return await Project.findByIdAndUpdate(
projectId,
{ $pull: { promptGroupIds: { $in: promptGroupIds } } },
{ $pullAll: { promptGroupIds: promptGroupIds } },
{ new: true },
);
};
@ -76,7 +76,7 @@ const removeGroupIdsFromProject = async function (projectId, promptGroupIds) {
* @returns {Promise<void>}
*/
const removeGroupFromAllProjects = async (promptGroupId) => {
await Project.updateMany({}, { $pull: { promptGroupIds: promptGroupId } });
await Project.updateMany({}, { $pullAll: { promptGroupIds: [promptGroupId] } });
};
/**
@ -104,7 +104,7 @@ const addAgentIdsToProject = async function (projectId, agentIds) {
const removeAgentIdsFromProject = async function (projectId, agentIds) {
return await Project.findByIdAndUpdate(
projectId,
{ $pull: { agentIds: { $in: agentIds } } },
{ $pullAll: { agentIds: agentIds } },
{ new: true },
);
};
@ -116,7 +116,7 @@ const removeAgentIdsFromProject = async function (projectId, agentIds) {
* @returns {Promise<void>}
*/
const removeAgentFromAllProjects = async (agentId) => {
await Project.updateMany({}, { $pull: { agentIds: agentId } });
await Project.updateMany({}, { $pullAll: { agentIds: [agentId] } });
};
module.exports = {

View file

@ -17,83 +17,25 @@ const { removeAllPermissions } = require('~/server/services/PermissionService');
const { PromptGroup, Prompt, AclEntry } = require('~/db/models');
/**
* Create a pipeline for the aggregation to get prompt groups
* @param {Object} query
* @param {number} skip
* @param {number} limit
* @returns {[Object]} - The pipeline for the aggregation
* Batch-fetches production prompts for an array of prompt groups
* and attaches them as `productionPrompt` field.
* Replaces $lookup aggregation for FerretDB compatibility.
*/
const createGroupPipeline = (query, skip, limit) => {
return [
{ $match: query },
{ $sort: { createdAt: -1 } },
{ $skip: skip },
{ $limit: limit },
{
$lookup: {
from: 'prompts',
localField: 'productionId',
foreignField: '_id',
as: 'productionPrompt',
},
},
{ $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
{
$project: {
name: 1,
numberOfGenerations: 1,
oneliner: 1,
category: 1,
projectIds: 1,
productionId: 1,
author: 1,
authorName: 1,
createdAt: 1,
updatedAt: 1,
'productionPrompt.prompt': 1,
// 'productionPrompt._id': 1,
// 'productionPrompt.type': 1,
},
},
];
};
const attachProductionPrompts = async (groups) => {
const uniqueIds = [...new Set(groups.map((g) => g.productionId?.toString()).filter(Boolean))];
if (uniqueIds.length === 0) {
return groups.map((g) => ({ ...g, productionPrompt: null }));
}
/**
* Create a pipeline for the aggregation to get all prompt groups
* @param {Object} query
* @param {Partial<MongoPromptGroup>} $project
* @returns {[Object]} - The pipeline for the aggregation
*/
const createAllGroupsPipeline = (
query,
$project = {
name: 1,
oneliner: 1,
category: 1,
author: 1,
authorName: 1,
createdAt: 1,
updatedAt: 1,
command: 1,
'productionPrompt.prompt': 1,
},
) => {
return [
{ $match: query },
{ $sort: { createdAt: -1 } },
{
$lookup: {
from: 'prompts',
localField: 'productionId',
foreignField: '_id',
as: 'productionPrompt',
},
},
{ $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
{
$project,
},
];
const prompts = await Prompt.find({ _id: { $in: uniqueIds } })
.select('prompt')
.lean();
const promptMap = new Map(prompts.map((p) => [p._id.toString(), p]));
return groups.map((g) => ({
...g,
productionPrompt: g.productionId ? (promptMap.get(g.productionId.toString()) ?? null) : null,
}));
};
/**
@ -134,8 +76,11 @@ const getAllPromptGroups = async (req, filter) => {
}
}
const promptGroupsPipeline = createAllGroupsPipeline(combinedQuery);
return await PromptGroup.aggregate(promptGroupsPipeline).exec();
const groups = await PromptGroup.find(combinedQuery)
.sort({ createdAt: -1 })
.select('name oneliner category author authorName createdAt updatedAt command productionId')
.lean();
return await attachProductionPrompts(groups);
} catch (error) {
console.error('Error getting all prompt groups', error);
return { message: 'Error getting all prompt groups' };
@ -175,7 +120,6 @@ const getPromptGroups = async (req, filter) => {
let combinedQuery = query;
if (searchShared) {
// const projects = req.user.projects || []; // TODO: handle multiple projects
const project = await getProjectByName(Constants.GLOBAL_PROJECT_NAME, 'promptGroupIds');
if (project && project.promptGroupIds && project.promptGroupIds.length > 0) {
const projectQuery = { _id: { $in: project.promptGroupIds }, ...query };
@ -187,17 +131,19 @@ const getPromptGroups = async (req, filter) => {
const skip = (validatedPageNumber - 1) * validatedPageSize;
const limit = validatedPageSize;
const promptGroupsPipeline = createGroupPipeline(combinedQuery, skip, limit);
const totalPromptGroupsPipeline = [{ $match: combinedQuery }, { $count: 'total' }];
const [promptGroupsResults, totalPromptGroupsResults] = await Promise.all([
PromptGroup.aggregate(promptGroupsPipeline).exec(),
PromptGroup.aggregate(totalPromptGroupsPipeline).exec(),
const [groups, totalPromptGroups] = await Promise.all([
PromptGroup.find(combinedQuery)
.sort({ createdAt: -1 })
.skip(skip)
.limit(limit)
.select(
'name numberOfGenerations oneliner category projectIds productionId author authorName createdAt updatedAt',
)
.lean(),
PromptGroup.countDocuments(combinedQuery),
]);
const promptGroups = promptGroupsResults;
const totalPromptGroups =
totalPromptGroupsResults.length > 0 ? totalPromptGroupsResults[0].total : 0;
const promptGroups = await attachProductionPrompts(groups);
return {
promptGroups,
@ -265,10 +211,8 @@ async function getListPromptGroupsByAccess({
const isPaginated = limit !== null && limit !== undefined;
const normalizedLimit = isPaginated ? Math.min(Math.max(1, parseInt(limit) || 20), 100) : null;
// Build base query combining ACL accessible prompt groups with other filters
const baseQuery = { ...otherParams, _id: { $in: accessibleIds } };
// Add cursor condition
if (after && typeof after === 'string' && after !== 'undefined' && after !== 'null') {
try {
const cursor = JSON.parse(Buffer.from(after, 'base64').toString('utf8'));
@ -281,10 +225,8 @@ async function getListPromptGroupsByAccess({
],
};
// Merge cursor condition with base query
if (Object.keys(baseQuery).length > 0) {
baseQuery.$and = [{ ...baseQuery }, cursorCondition];
// Remove the original conditions from baseQuery to avoid duplication
Object.keys(baseQuery).forEach((key) => {
if (key !== '$and') delete baseQuery[key];
});
@ -296,43 +238,18 @@ async function getListPromptGroupsByAccess({
}
}
// Build aggregation pipeline
const pipeline = [{ $match: baseQuery }, { $sort: { updatedAt: -1, _id: 1 } }];
const findQuery = PromptGroup.find(baseQuery)
.sort({ updatedAt: -1, _id: 1 })
.select(
'name numberOfGenerations oneliner category projectIds productionId author authorName createdAt updatedAt',
);
// Only apply limit if pagination is requested
if (isPaginated) {
pipeline.push({ $limit: normalizedLimit + 1 });
findQuery.limit(normalizedLimit + 1);
}
// Add lookup for production prompt
pipeline.push(
{
$lookup: {
from: 'prompts',
localField: 'productionId',
foreignField: '_id',
as: 'productionPrompt',
},
},
{ $unwind: { path: '$productionPrompt', preserveNullAndEmptyArrays: true } },
{
$project: {
name: 1,
numberOfGenerations: 1,
oneliner: 1,
category: 1,
projectIds: 1,
productionId: 1,
author: 1,
authorName: 1,
createdAt: 1,
updatedAt: 1,
'productionPrompt.prompt': 1,
},
},
);
const promptGroups = await PromptGroup.aggregate(pipeline).exec();
const groups = await findQuery.lean();
const promptGroups = await attachProductionPrompts(groups);
const hasMore = isPaginated ? promptGroups.length > normalizedLimit : false;
const data = (isPaginated ? promptGroups.slice(0, normalizedLimit) : promptGroups).map(
@ -344,7 +261,6 @@ async function getListPromptGroupsByAccess({
},
);
// Generate next cursor only if paginated
let nextCursor = null;
if (isPaginated && hasMore && data.length > 0) {
const lastGroup = promptGroups[normalizedLimit - 1];
@ -477,32 +393,33 @@ module.exports = {
*/
getRandomPromptGroups: async (filter) => {
try {
const result = await PromptGroup.aggregate([
{
$match: {
category: { $ne: '' },
},
},
{
$group: {
_id: '$category',
promptGroup: { $first: '$$ROOT' },
},
},
{
$replaceRoot: { newRoot: '$promptGroup' },
},
{
$sample: { size: +filter.limit + +filter.skip },
},
{
$skip: +filter.skip,
},
{
$limit: +filter.limit,
},
]);
return { prompts: result };
const categories = await PromptGroup.distinct('category', { category: { $ne: '' } });
for (let i = categories.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[categories[i], categories[j]] = [categories[j], categories[i]];
}
const skip = +filter.skip;
const limit = +filter.limit;
const selectedCategories = categories.slice(skip, skip + limit);
if (selectedCategories.length === 0) {
return { prompts: [] };
}
const groups = await PromptGroup.find({ category: { $in: selectedCategories } }).lean();
const groupByCategory = new Map();
for (const group of groups) {
if (!groupByCategory.has(group.category)) {
groupByCategory.set(group.category, group);
}
}
const prompts = selectedCategories.map((cat) => groupByCategory.get(cat)).filter(Boolean);
return { prompts };
} catch (error) {
logger.error('Error getting prompt groups', error);
return { message: 'Error getting prompt groups' };
@ -635,7 +552,7 @@ module.exports = {
await removeGroupIdsFromProject(projectId, [filter._id]);
}
updateOps.$pull = { projectIds: { $in: data.removeProjectIds } };
updateOps.$pullAll = { projectIds: data.removeProjectIds };
delete data.removeProjectIds;
}

View file

@ -266,11 +266,7 @@ const deleteUserController = async (req, res) => {
await deleteUserPrompts(req, user.id); // delete user prompts
await Action.deleteMany({ user: user.id }); // delete user actions
await Token.deleteMany({ userId: user.id }); // delete user OAuth tokens
await Group.updateMany(
// remove user from all groups
{ memberIds: user.id },
{ $pull: { memberIds: user.id } },
);
await Group.updateMany({ memberIds: user.id }, { $pullAll: { memberIds: [user.id] } });
await AclEntry.deleteMany({ principalId: user._id }); // delete user ACL entries
logger.info(`User deleted account. Email: ${user.email} ID: ${user.id}`);
res.status(200).send({ message: 'User deleted' });

View file

@ -0,0 +1,208 @@
const mongoose = require('mongoose');
const { MongoMemoryServer } = require('mongodb-memory-server');
jest.mock('@librechat/data-schemas', () => {
const actual = jest.requireActual('@librechat/data-schemas');
return {
...actual,
logger: {
debug: jest.fn(),
error: jest.fn(),
warn: jest.fn(),
info: jest.fn(),
},
};
});
jest.mock('~/models', () => ({
deleteAllUserSessions: jest.fn().mockResolvedValue(undefined),
deleteAllSharedLinks: jest.fn().mockResolvedValue(undefined),
updateUserPlugins: jest.fn(),
deleteUserById: jest.fn().mockResolvedValue(undefined),
deleteMessages: jest.fn().mockResolvedValue(undefined),
deletePresets: jest.fn().mockResolvedValue(undefined),
deleteUserKey: jest.fn().mockResolvedValue(undefined),
deleteConvos: jest.fn().mockResolvedValue(undefined),
deleteFiles: jest.fn().mockResolvedValue(undefined),
updateUser: jest.fn(),
findToken: jest.fn(),
getFiles: jest.fn().mockResolvedValue([]),
}));
jest.mock('~/server/services/PluginService', () => ({
updateUserPluginAuth: jest.fn(),
deleteUserPluginAuth: jest.fn().mockResolvedValue(undefined),
}));
jest.mock('~/server/services/AuthService', () => ({
verifyEmail: jest.fn(),
resendVerificationEmail: jest.fn(),
}));
jest.mock('~/server/services/Files/S3/crud', () => ({
needsRefresh: jest.fn(),
getNewS3URL: jest.fn(),
}));
jest.mock('~/server/services/Files/process', () => ({
processDeleteRequest: jest.fn().mockResolvedValue(undefined),
}));
jest.mock('~/server/services/Config', () => ({
getAppConfig: jest.fn().mockResolvedValue({}),
getMCPManager: jest.fn(),
getFlowStateManager: jest.fn(),
getMCPServersRegistry: jest.fn(),
}));
jest.mock('~/models/ToolCall', () => ({
deleteToolCalls: jest.fn().mockResolvedValue(undefined),
}));
jest.mock('~/models/Prompt', () => ({
deleteUserPrompts: jest.fn().mockResolvedValue(undefined),
}));
jest.mock('~/models/Agent', () => ({
deleteUserAgents: jest.fn().mockResolvedValue(undefined),
}));
jest.mock('~/cache', () => ({
getLogStores: jest.fn(),
}));
let mongoServer;
beforeAll(async () => {
mongoServer = await MongoMemoryServer.create();
await mongoose.connect(mongoServer.getUri());
});
afterAll(async () => {
await mongoose.disconnect();
await mongoServer.stop();
});
afterEach(async () => {
const collections = mongoose.connection.collections;
for (const key in collections) {
await collections[key].deleteMany({});
}
});
const { deleteUserController } = require('./UserController');
const { Group } = require('~/db/models');
const { deleteConvos } = require('~/models');
describe('deleteUserController', () => {
const mockRes = {
status: jest.fn().mockReturnThis(),
send: jest.fn().mockReturnThis(),
json: jest.fn().mockReturnThis(),
};
beforeEach(() => {
jest.clearAllMocks();
});
it('should return 200 on successful deletion', async () => {
const userId = new mongoose.Types.ObjectId();
const req = { user: { id: userId.toString(), _id: userId, email: 'test@test.com' } };
await deleteUserController(req, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(200);
expect(mockRes.send).toHaveBeenCalledWith({ message: 'User deleted' });
});
it('should remove the user from all groups via $pullAll', async () => {
const userId = new mongoose.Types.ObjectId();
const userIdStr = userId.toString();
const otherUser = new mongoose.Types.ObjectId().toString();
await Group.create([
{ name: 'Group A', memberIds: [userIdStr, otherUser], source: 'local' },
{ name: 'Group B', memberIds: [userIdStr], source: 'local' },
{ name: 'Group C', memberIds: [otherUser], source: 'local' },
]);
const req = { user: { id: userIdStr, _id: userId, email: 'del@test.com' } };
await deleteUserController(req, mockRes);
const groups = await Group.find({}).sort({ name: 1 }).lean();
expect(groups[0].memberIds).toEqual([otherUser]);
expect(groups[1].memberIds).toEqual([]);
expect(groups[2].memberIds).toEqual([otherUser]);
});
it('should handle user that exists in no groups', async () => {
const userId = new mongoose.Types.ObjectId();
await Group.create({ name: 'Empty', memberIds: ['someone-else'], source: 'local' });
const req = { user: { id: userId.toString(), _id: userId, email: 'no-groups@test.com' } };
await deleteUserController(req, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(200);
const group = await Group.findOne({ name: 'Empty' }).lean();
expect(group.memberIds).toEqual(['someone-else']);
});
it('should remove duplicate memberIds if the user appears more than once', async () => {
const userId = new mongoose.Types.ObjectId();
const userIdStr = userId.toString();
await Group.create({
name: 'Dupes',
memberIds: [userIdStr, 'other', userIdStr],
source: 'local',
});
const req = { user: { id: userIdStr, _id: userId, email: 'dupe@test.com' } };
await deleteUserController(req, mockRes);
const group = await Group.findOne({ name: 'Dupes' }).lean();
expect(group.memberIds).toEqual(['other']);
});
it('should still succeed when deleteConvos throws', async () => {
const userId = new mongoose.Types.ObjectId();
deleteConvos.mockRejectedValueOnce(new Error('no convos'));
const req = { user: { id: userId.toString(), _id: userId, email: 'convos@test.com' } };
await deleteUserController(req, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(200);
expect(mockRes.send).toHaveBeenCalledWith({ message: 'User deleted' });
});
it('should return 500 when a critical operation fails', async () => {
const userId = new mongoose.Types.ObjectId();
const { deleteMessages } = require('~/models');
deleteMessages.mockRejectedValueOnce(new Error('db down'));
const req = { user: { id: userId.toString(), _id: userId, email: 'fail@test.com' } };
await deleteUserController(req, mockRes);
expect(mockRes.status).toHaveBeenCalledWith(500);
expect(mockRes.json).toHaveBeenCalledWith({ message: 'Something went wrong.' });
});
it('should use string user.id (not ObjectId user._id) for memberIds removal', async () => {
const userId = new mongoose.Types.ObjectId();
const userIdStr = userId.toString();
const otherUser = 'other-user-id';
await Group.create({
name: 'StringCheck',
memberIds: [userIdStr, otherUser],
source: 'local',
});
const req = { user: { id: userIdStr, _id: userId, email: 'stringcheck@test.com' } };
await deleteUserController(req, mockRes);
const group = await Group.findOne({ name: 'StringCheck' }).lean();
expect(group.memberIds).toEqual([otherUser]);
expect(group.memberIds).not.toContain(userIdStr);
});
});

View file

@ -557,7 +557,6 @@ describe('Agent Controllers - Mass Assignment Protection', () => {
const updatedAgent = mockRes.json.mock.calls[0][0];
expect(updatedAgent).toBeDefined();
// Note: updateAgentProjects requires more setup, so we just verify the handler doesn't crash
});
test('should validate tool_resources in updates', async () => {

View file

@ -536,7 +536,7 @@ const syncUserEntraGroupMemberships = async (user, accessToken, session = null)
memberIds: user.idOnTheSource,
idOnTheSource: { $nin: allGroupIds },
},
{ $pull: { memberIds: user.idOnTheSource } },
{ $pullAll: { memberIds: [user.idOnTheSource] } },
sessionOptions,
);
} catch (error) {
@ -788,7 +788,15 @@ const bulkUpdateResourcePermissions = async ({
return results;
} catch (error) {
if (shouldEndSession && supportsTransactions) {
await localSession.abortTransaction();
try {
await localSession.abortTransaction();
} catch (transactionError) {
/** best-effort abort; may fail if commit already succeeded */
logger.error(
`[PermissionService.bulkUpdateResourcePermissions] Error aborting transaction:`,
transactionError,
);
}
}
logger.error(`[PermissionService.bulkUpdateResourcePermissions] Error: ${error.message}`);
throw error;

View file

@ -9,6 +9,7 @@ const {
} = require('librechat-data-provider');
const {
bulkUpdateResourcePermissions,
syncUserEntraGroupMemberships,
getEffectivePermissions,
findAccessibleResources,
getAvailableRoles,
@ -26,7 +27,11 @@ jest.mock('@librechat/data-schemas', () => ({
// Mock GraphApiService to prevent config loading issues
jest.mock('~/server/services/GraphApiService', () => ({
entraIdPrincipalFeatureEnabled: jest.fn().mockReturnValue(false),
getUserOwnedEntraGroups: jest.fn().mockResolvedValue([]),
getUserEntraGroups: jest.fn().mockResolvedValue([]),
getGroupMembers: jest.fn().mockResolvedValue([]),
getGroupOwners: jest.fn().mockResolvedValue([]),
}));
// Mock the logger
@ -1933,3 +1938,134 @@ describe('PermissionService', () => {
});
});
});
describe('syncUserEntraGroupMemberships - $pullAll on Group.memberIds', () => {
const {
entraIdPrincipalFeatureEnabled,
getUserEntraGroups,
} = require('~/server/services/GraphApiService');
const { Group } = require('~/db/models');
const userEntraId = 'entra-user-001';
const user = {
openidId: 'openid-sub-001',
idOnTheSource: userEntraId,
provider: 'openid',
};
beforeEach(async () => {
await Group.deleteMany({});
entraIdPrincipalFeatureEnabled.mockReturnValue(true);
});
afterEach(() => {
entraIdPrincipalFeatureEnabled.mockReturnValue(false);
getUserEntraGroups.mockResolvedValue([]);
});
it('should add user to matching Entra groups and remove from non-matching ones', async () => {
await Group.create([
{ name: 'Group A', source: 'entra', idOnTheSource: 'entra-group-a', memberIds: [] },
{
name: 'Group B',
source: 'entra',
idOnTheSource: 'entra-group-b',
memberIds: [userEntraId],
},
{
name: 'Group C',
source: 'entra',
idOnTheSource: 'entra-group-c',
memberIds: [userEntraId],
},
]);
getUserEntraGroups.mockResolvedValue(['entra-group-a', 'entra-group-c']);
await syncUserEntraGroupMemberships(user, 'fake-access-token');
const groups = await Group.find({ source: 'entra' }).sort({ name: 1 }).lean();
expect(groups[0].memberIds).toContain(userEntraId);
expect(groups[1].memberIds).not.toContain(userEntraId);
expect(groups[2].memberIds).toContain(userEntraId);
});
it('should not modify groups when API returns empty list (early return)', async () => {
await Group.create([
{
name: 'Group X',
source: 'entra',
idOnTheSource: 'entra-x',
memberIds: [userEntraId, 'other-user'],
},
{ name: 'Group Y', source: 'entra', idOnTheSource: 'entra-y', memberIds: [userEntraId] },
]);
getUserEntraGroups.mockResolvedValue([]);
await syncUserEntraGroupMemberships(user, 'fake-token');
const groups = await Group.find({ source: 'entra' }).sort({ name: 1 }).lean();
expect(groups[0].memberIds).toContain(userEntraId);
expect(groups[0].memberIds).toContain('other-user');
expect(groups[1].memberIds).toContain(userEntraId);
});
it('should remove user from groups not in the API response via $pullAll', async () => {
await Group.create([
{ name: 'Keep', source: 'entra', idOnTheSource: 'entra-keep', memberIds: [userEntraId] },
{
name: 'Remove',
source: 'entra',
idOnTheSource: 'entra-remove',
memberIds: [userEntraId, 'other-user'],
},
]);
getUserEntraGroups.mockResolvedValue(['entra-keep']);
await syncUserEntraGroupMemberships(user, 'fake-token');
const keep = await Group.findOne({ idOnTheSource: 'entra-keep' }).lean();
const remove = await Group.findOne({ idOnTheSource: 'entra-remove' }).lean();
expect(keep.memberIds).toContain(userEntraId);
expect(remove.memberIds).not.toContain(userEntraId);
expect(remove.memberIds).toContain('other-user');
});
it('should not modify local groups', async () => {
await Group.create([
{ name: 'Local Group', source: 'local', memberIds: [userEntraId] },
{
name: 'Entra Group',
source: 'entra',
idOnTheSource: 'entra-only',
memberIds: [userEntraId],
},
]);
getUserEntraGroups.mockResolvedValue([]);
await syncUserEntraGroupMemberships(user, 'fake-token');
const localGroup = await Group.findOne({ source: 'local' }).lean();
expect(localGroup.memberIds).toContain(userEntraId);
});
it('should early-return when feature is disabled', async () => {
entraIdPrincipalFeatureEnabled.mockReturnValue(false);
await Group.create({
name: 'Should Not Touch',
source: 'entra',
idOnTheSource: 'entra-safe',
memberIds: [userEntraId],
});
getUserEntraGroups.mockResolvedValue([]);
await syncUserEntraGroupMemberships(user, 'fake-token');
const group = await Group.findOne({ idOnTheSource: 'entra-safe' }).lean();
expect(group.memberIds).toContain(userEntraId);
});
});