🕰️ fix: Preserve updatedAt Timestamps During Meilisearch Batch Sync (#12084)
Some checks are pending
Docker Dev Images Build / build (Dockerfile, librechat-dev, node) (push) Waiting to run
Docker Dev Images Build / build (Dockerfile.multi, librechat-dev-api, api-build) (push) Waiting to run
Sync Locize Translations & Create Translation PR / Sync Translation Keys with Locize (push) Waiting to run
Sync Locize Translations & Create Translation PR / Create Translation PR on Version Published (push) Blocked by required conditions

* refactor: Add timestamps option to updateMany in createMeiliMongooseModel

- Updated the updateMany call in createMeiliMongooseModel to include a timestamps option set to false, ensuring that the operation does not modify the document's timestamps during the indexing process. This change improves the accuracy of document state management in MongoDB.

* test: Add tests to ensure updatedAt timestamps are preserved during syncWithMeili

- Introduced new test cases for the processSyncBatch function to verify that the original updatedAt timestamps on conversations and messages remain unchanged after synchronization with Meilisearch. This enhancement ensures data integrity during the indexing process.

* docs: Update comments in createMeiliMongooseModel to clarify timestamp preservation

- Enhanced comments in the createMeiliMongooseModel function to explain the use of the { timestamps: false } option in the updateMany call, ensuring that original conversation/message timestamps are preserved during the indexing process. This change improves code clarity and maintains the integrity of document timestamps.

* test: Enhance Meilisearch sync tests to verify updatedAt timestamp preservation

- Added assertions to ensure that the updatedAt timestamps of documents remain unchanged before and after synchronization with Meilisearch. This update improves the test coverage for the syncWithMeili function, reinforcing data integrity during the indexing process.
This commit is contained in:
Danny Avila 2026-03-05 10:40:43 -05:00 committed by GitHub
parent 9956a72694
commit d74a62ecd5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 107 additions and 2 deletions

View file

@ -1016,6 +1016,105 @@ describe('Meilisearch Mongoose plugin', () => {
});
});
describe('processSyncBatch does not modify updatedAt timestamps', () => {
test('syncWithMeili preserves original updatedAt on conversations', async () => {
const conversationModel = createConversationModel(mongoose) as SchemaWithMeiliMethods;
await conversationModel.deleteMany({});
mockAddDocumentsInBatches.mockClear();
const pastDate = new Date('2024-01-15T12:00:00Z');
// Insert documents with specific updatedAt timestamps using raw collection
await conversationModel.collection.insertMany([
{
conversationId: new mongoose.Types.ObjectId().toString(),
user: new mongoose.Types.ObjectId(),
title: 'Old Conversation 1',
endpoint: EModelEndpoint.openAI,
_meiliIndex: false,
expiredAt: null,
createdAt: pastDate,
updatedAt: pastDate,
},
{
conversationId: new mongoose.Types.ObjectId().toString(),
user: new mongoose.Types.ObjectId(),
title: 'Old Conversation 2',
endpoint: EModelEndpoint.openAI,
_meiliIndex: false,
expiredAt: null,
createdAt: pastDate,
updatedAt: pastDate,
},
]);
// Verify timestamps before sync
const beforeSync = await conversationModel.find({}).lean();
for (const doc of beforeSync) {
expect(new Date(doc.updatedAt as Date).getTime()).toBe(pastDate.getTime());
}
// Run sync which calls processSyncBatch internally
await conversationModel.syncWithMeili();
// Verify _meiliIndex was updated
const indexedCount = await conversationModel.countDocuments({ _meiliIndex: true });
expect(indexedCount).toBe(2);
// Verify updatedAt was NOT modified
const afterSync = await conversationModel.find({}).lean();
for (const doc of afterSync) {
expect(new Date(doc.updatedAt as Date).getTime()).toBe(pastDate.getTime());
}
});
test('syncWithMeili preserves original updatedAt on messages', async () => {
const messageModel = createMessageModel(mongoose) as SchemaWithMeiliMethods;
await messageModel.deleteMany({});
mockAddDocumentsInBatches.mockClear();
const pastDate = new Date('2023-06-01T08:30:00Z');
await messageModel.collection.insertMany([
{
messageId: new mongoose.Types.ObjectId().toString(),
conversationId: new mongoose.Types.ObjectId().toString(),
user: new mongoose.Types.ObjectId(),
isCreatedByUser: true,
_meiliIndex: false,
expiredAt: null,
createdAt: pastDate,
updatedAt: pastDate,
},
{
messageId: new mongoose.Types.ObjectId().toString(),
conversationId: new mongoose.Types.ObjectId().toString(),
user: new mongoose.Types.ObjectId(),
isCreatedByUser: false,
_meiliIndex: false,
expiredAt: null,
createdAt: pastDate,
updatedAt: pastDate,
},
]);
const beforeSync = await messageModel.find({}).lean();
for (const doc of beforeSync) {
expect(new Date(doc.updatedAt as Date).getTime()).toBe(pastDate.getTime());
}
await messageModel.syncWithMeili();
const indexedCount = await messageModel.countDocuments({ _meiliIndex: true });
expect(indexedCount).toBe(2);
const afterSync = await messageModel.find({}).lean();
for (const doc of afterSync) {
expect(new Date(doc.updatedAt as Date).getTime()).toBe(pastDate.getTime());
}
});
});
describe('Missing _meiliIndex property handling in sync process', () => {
test('syncWithMeili includes documents with missing _meiliIndex', async () => {
const conversationModel = createConversationModel(mongoose) as SchemaWithMeiliMethods;

View file

@ -257,9 +257,15 @@ const createMeiliMongooseModel = ({
// Add documents to MeiliSearch
await index.addDocumentsInBatches(formattedDocs);
// Update MongoDB to mark documents as indexed
// Update MongoDB to mark documents as indexed.
// { timestamps: false } prevents Mongoose from touching updatedAt, preserving
// original conversation/message timestamps (fixes sidebar chronological sort).
const docsIds = documents.map((doc) => doc._id);
await this.updateMany({ _id: { $in: docsIds } }, { $set: { _meiliIndex: true } });
await this.updateMany(
{ _id: { $in: docsIds } },
{ $set: { _meiliIndex: true } },
{ timestamps: false },
);
} catch (error) {
logger.error('[processSyncBatch] Error processing batch:', error);
throw error;