mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-01-10 20:48:54 +01:00
🔧 fix: Sorting and Pagination logic for Conversations (#11242)
- Changed default sorting from 'createdAt' to 'updatedAt' in both Conversation and Message routes. - Updated pagination logic to ensure the cursor is created from the last returned item instead of the popped item, preventing skipped items at page boundaries. - Added comprehensive tests for pagination behavior, ensuring no messages or conversations are skipped and that sorting works as expected.
This commit is contained in:
parent
a95fccc5f3
commit
9434d4a070
5 changed files with 602 additions and 9 deletions
|
|
@ -163,7 +163,7 @@ module.exports = {
|
|||
isArchived = false,
|
||||
tags,
|
||||
search,
|
||||
sortBy = 'createdAt',
|
||||
sortBy = 'updatedAt',
|
||||
sortDirection = 'desc',
|
||||
} = {},
|
||||
) => {
|
||||
|
|
@ -251,10 +251,12 @@ module.exports = {
|
|||
|
||||
let nextCursor = null;
|
||||
if (convos.length > limit) {
|
||||
const lastConvo = convos.pop();
|
||||
const primaryValue = lastConvo[finalSortBy];
|
||||
convos.pop(); // Remove extra item used to detect next page
|
||||
// Create cursor from the last RETURNED item (not the popped one)
|
||||
const lastReturned = convos[convos.length - 1];
|
||||
const primaryValue = lastReturned[finalSortBy];
|
||||
const primaryStr = finalSortBy === 'title' ? primaryValue : primaryValue.toISOString();
|
||||
const secondaryStr = lastConvo.updatedAt.toISOString();
|
||||
const secondaryStr = lastReturned.updatedAt.toISOString();
|
||||
const composite = { primary: primaryStr, secondary: secondaryStr };
|
||||
nextCursor = Buffer.from(JSON.stringify(composite)).toString('base64');
|
||||
}
|
||||
|
|
@ -290,8 +292,9 @@ module.exports = {
|
|||
const limited = filtered.slice(0, limit + 1);
|
||||
let nextCursor = null;
|
||||
if (limited.length > limit) {
|
||||
const lastConvo = limited.pop();
|
||||
nextCursor = lastConvo.updatedAt.toISOString();
|
||||
limited.pop(); // Remove extra item used to detect next page
|
||||
// Create cursor from the last RETURNED item (not the popped one)
|
||||
nextCursor = limited[limited.length - 1].updatedAt.toISOString();
|
||||
}
|
||||
|
||||
const convoMap = {};
|
||||
|
|
|
|||
|
|
@ -567,4 +567,267 @@ describe('Conversation Operations', () => {
|
|||
await mongoose.connect(mongoServer.getUri());
|
||||
});
|
||||
});
|
||||
|
||||
describe('getConvosByCursor pagination', () => {
|
||||
/**
|
||||
* Helper to create conversations with specific timestamps
|
||||
* Uses collection.insertOne to bypass Mongoose timestamps entirely
|
||||
*/
|
||||
const createConvoWithTimestamps = async (index, createdAt, updatedAt) => {
|
||||
const conversationId = uuidv4();
|
||||
// Use collection-level insert to bypass Mongoose timestamps
|
||||
await Conversation.collection.insertOne({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
title: `Conversation ${index}`,
|
||||
endpoint: EModelEndpoint.openAI,
|
||||
expiredAt: null,
|
||||
isArchived: false,
|
||||
createdAt,
|
||||
updatedAt,
|
||||
});
|
||||
return Conversation.findOne({ conversationId }).lean();
|
||||
};
|
||||
|
||||
it('should not skip conversations at page boundaries', async () => {
|
||||
// Create 30 conversations to ensure pagination (limit is 25)
|
||||
const baseTime = new Date('2026-01-01T00:00:00.000Z');
|
||||
const convos = [];
|
||||
|
||||
for (let i = 0; i < 30; i++) {
|
||||
const updatedAt = new Date(baseTime.getTime() - i * 60000); // Each 1 minute apart
|
||||
const convo = await createConvoWithTimestamps(i, updatedAt, updatedAt);
|
||||
convos.push(convo);
|
||||
}
|
||||
|
||||
// Fetch first page
|
||||
const page1 = await getConvosByCursor('user123', { limit: 25 });
|
||||
|
||||
expect(page1.conversations).toHaveLength(25);
|
||||
expect(page1.nextCursor).toBeTruthy();
|
||||
|
||||
// Fetch second page using cursor
|
||||
const page2 = await getConvosByCursor('user123', {
|
||||
limit: 25,
|
||||
cursor: page1.nextCursor,
|
||||
});
|
||||
|
||||
// Should get remaining 5 conversations
|
||||
expect(page2.conversations).toHaveLength(5);
|
||||
expect(page2.nextCursor).toBeNull();
|
||||
|
||||
// Verify no duplicates and no gaps
|
||||
const allIds = [
|
||||
...page1.conversations.map((c) => c.conversationId),
|
||||
...page2.conversations.map((c) => c.conversationId),
|
||||
];
|
||||
const uniqueIds = new Set(allIds);
|
||||
|
||||
expect(uniqueIds.size).toBe(30); // All 30 conversations accounted for
|
||||
expect(allIds.length).toBe(30); // No duplicates
|
||||
});
|
||||
|
||||
it('should include conversation at exact page boundary (item 26 bug fix)', async () => {
|
||||
// This test specifically verifies the fix for the bug where item 26
|
||||
// (the first item that should appear on page 2) was being skipped
|
||||
|
||||
const baseTime = new Date('2026-01-01T12:00:00.000Z');
|
||||
|
||||
// Create exactly 26 conversations
|
||||
const convos = [];
|
||||
for (let i = 0; i < 26; i++) {
|
||||
const updatedAt = new Date(baseTime.getTime() - i * 60000);
|
||||
const convo = await createConvoWithTimestamps(i, updatedAt, updatedAt);
|
||||
convos.push(convo);
|
||||
}
|
||||
|
||||
// The 26th conversation (index 25) should be on page 2
|
||||
const item26 = convos[25];
|
||||
|
||||
// Fetch first page with limit 25
|
||||
const page1 = await getConvosByCursor('user123', { limit: 25 });
|
||||
|
||||
expect(page1.conversations).toHaveLength(25);
|
||||
expect(page1.nextCursor).toBeTruthy();
|
||||
|
||||
// Item 26 should NOT be in page 1
|
||||
const page1Ids = page1.conversations.map((c) => c.conversationId);
|
||||
expect(page1Ids).not.toContain(item26.conversationId);
|
||||
|
||||
// Fetch second page
|
||||
const page2 = await getConvosByCursor('user123', {
|
||||
limit: 25,
|
||||
cursor: page1.nextCursor,
|
||||
});
|
||||
|
||||
// Item 26 MUST be in page 2 (this was the bug - it was being skipped)
|
||||
expect(page2.conversations).toHaveLength(1);
|
||||
expect(page2.conversations[0].conversationId).toBe(item26.conversationId);
|
||||
});
|
||||
|
||||
it('should sort by updatedAt DESC by default', async () => {
|
||||
// Create conversations with different updatedAt times
|
||||
// Note: createdAt is older but updatedAt varies
|
||||
const convo1 = await createConvoWithTimestamps(
|
||||
1,
|
||||
new Date('2026-01-01T00:00:00.000Z'), // oldest created
|
||||
new Date('2026-01-03T00:00:00.000Z'), // most recently updated
|
||||
);
|
||||
|
||||
const convo2 = await createConvoWithTimestamps(
|
||||
2,
|
||||
new Date('2026-01-02T00:00:00.000Z'), // middle created
|
||||
new Date('2026-01-02T00:00:00.000Z'), // middle updated
|
||||
);
|
||||
|
||||
const convo3 = await createConvoWithTimestamps(
|
||||
3,
|
||||
new Date('2026-01-03T00:00:00.000Z'), // newest created
|
||||
new Date('2026-01-01T00:00:00.000Z'), // oldest updated
|
||||
);
|
||||
|
||||
const result = await getConvosByCursor('user123');
|
||||
|
||||
// Should be sorted by updatedAt DESC (most recent first)
|
||||
expect(result.conversations).toHaveLength(3);
|
||||
expect(result.conversations[0].conversationId).toBe(convo1.conversationId); // Jan 3 updatedAt
|
||||
expect(result.conversations[1].conversationId).toBe(convo2.conversationId); // Jan 2 updatedAt
|
||||
expect(result.conversations[2].conversationId).toBe(convo3.conversationId); // Jan 1 updatedAt
|
||||
});
|
||||
|
||||
it('should handle conversations with same updatedAt (tie-breaker)', async () => {
|
||||
const sameTime = new Date('2026-01-01T12:00:00.000Z');
|
||||
|
||||
// Create 3 conversations with exact same updatedAt
|
||||
const convo1 = await createConvoWithTimestamps(1, sameTime, sameTime);
|
||||
const convo2 = await createConvoWithTimestamps(2, sameTime, sameTime);
|
||||
const convo3 = await createConvoWithTimestamps(3, sameTime, sameTime);
|
||||
|
||||
const result = await getConvosByCursor('user123');
|
||||
|
||||
// All 3 should be returned (no skipping due to same timestamps)
|
||||
expect(result.conversations).toHaveLength(3);
|
||||
|
||||
const returnedIds = result.conversations.map((c) => c.conversationId);
|
||||
expect(returnedIds).toContain(convo1.conversationId);
|
||||
expect(returnedIds).toContain(convo2.conversationId);
|
||||
expect(returnedIds).toContain(convo3.conversationId);
|
||||
});
|
||||
|
||||
it('should handle cursor pagination with conversations updated during pagination', async () => {
|
||||
// Simulate the scenario where a conversation is updated between page fetches
|
||||
const baseTime = new Date('2026-01-01T00:00:00.000Z');
|
||||
|
||||
// Create 30 conversations
|
||||
for (let i = 0; i < 30; i++) {
|
||||
const updatedAt = new Date(baseTime.getTime() - i * 60000);
|
||||
await createConvoWithTimestamps(i, updatedAt, updatedAt);
|
||||
}
|
||||
|
||||
// Fetch first page
|
||||
const page1 = await getConvosByCursor('user123', { limit: 25 });
|
||||
expect(page1.conversations).toHaveLength(25);
|
||||
|
||||
// Now update one of the conversations that should be on page 2
|
||||
// to have a newer updatedAt (simulating user activity during pagination)
|
||||
const convosOnPage2 = await Conversation.find({ user: 'user123' })
|
||||
.sort({ updatedAt: -1 })
|
||||
.skip(25)
|
||||
.limit(5);
|
||||
|
||||
if (convosOnPage2.length > 0) {
|
||||
const updatedConvo = convosOnPage2[0];
|
||||
await Conversation.updateOne(
|
||||
{ _id: updatedConvo._id },
|
||||
{ updatedAt: new Date('2026-01-02T00:00:00.000Z') }, // Much newer
|
||||
);
|
||||
}
|
||||
|
||||
// Fetch second page with original cursor
|
||||
const page2 = await getConvosByCursor('user123', {
|
||||
limit: 25,
|
||||
cursor: page1.nextCursor,
|
||||
});
|
||||
|
||||
// The updated conversation might not be in page 2 anymore
|
||||
// (it moved to the front), but we should still get remaining items
|
||||
// without errors and without infinite loops
|
||||
expect(page2.conversations.length).toBeGreaterThanOrEqual(0);
|
||||
});
|
||||
|
||||
it('should correctly decode and use cursor for pagination', async () => {
|
||||
const baseTime = new Date('2026-01-01T00:00:00.000Z');
|
||||
|
||||
// Create 30 conversations
|
||||
for (let i = 0; i < 30; i++) {
|
||||
const updatedAt = new Date(baseTime.getTime() - i * 60000);
|
||||
await createConvoWithTimestamps(i, updatedAt, updatedAt);
|
||||
}
|
||||
|
||||
// Fetch first page
|
||||
const page1 = await getConvosByCursor('user123', { limit: 25 });
|
||||
|
||||
// Decode the cursor to verify it's based on the last RETURNED item
|
||||
const decodedCursor = JSON.parse(Buffer.from(page1.nextCursor, 'base64').toString());
|
||||
|
||||
// The cursor should match the last item in page1 (item at index 24)
|
||||
const lastReturnedItem = page1.conversations[24];
|
||||
|
||||
expect(new Date(decodedCursor.primary).getTime()).toBe(
|
||||
new Date(lastReturnedItem.updatedAt).getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
it('should support sortBy createdAt when explicitly requested', async () => {
|
||||
// Create conversations with different timestamps
|
||||
const convo1 = await createConvoWithTimestamps(
|
||||
1,
|
||||
new Date('2026-01-03T00:00:00.000Z'), // newest created
|
||||
new Date('2026-01-01T00:00:00.000Z'), // oldest updated
|
||||
);
|
||||
|
||||
const convo2 = await createConvoWithTimestamps(
|
||||
2,
|
||||
new Date('2026-01-01T00:00:00.000Z'), // oldest created
|
||||
new Date('2026-01-03T00:00:00.000Z'), // newest updated
|
||||
);
|
||||
|
||||
// Verify timestamps were set correctly
|
||||
expect(new Date(convo1.createdAt).getTime()).toBe(
|
||||
new Date('2026-01-03T00:00:00.000Z').getTime(),
|
||||
);
|
||||
expect(new Date(convo2.createdAt).getTime()).toBe(
|
||||
new Date('2026-01-01T00:00:00.000Z').getTime(),
|
||||
);
|
||||
|
||||
const result = await getConvosByCursor('user123', { sortBy: 'createdAt' });
|
||||
|
||||
// Should be sorted by createdAt DESC
|
||||
expect(result.conversations).toHaveLength(2);
|
||||
expect(result.conversations[0].conversationId).toBe(convo1.conversationId); // Jan 3 createdAt
|
||||
expect(result.conversations[1].conversationId).toBe(convo2.conversationId); // Jan 1 createdAt
|
||||
});
|
||||
|
||||
it('should handle empty result set gracefully', async () => {
|
||||
const result = await getConvosByCursor('user123');
|
||||
|
||||
expect(result.conversations).toHaveLength(0);
|
||||
expect(result.nextCursor).toBeNull();
|
||||
});
|
||||
|
||||
it('should handle exactly limit number of conversations (no next page)', async () => {
|
||||
const baseTime = new Date('2026-01-01T00:00:00.000Z');
|
||||
|
||||
// Create exactly 25 conversations (equal to default limit)
|
||||
for (let i = 0; i < 25; i++) {
|
||||
const updatedAt = new Date(baseTime.getTime() - i * 60000);
|
||||
await createConvoWithTimestamps(i, updatedAt, updatedAt);
|
||||
}
|
||||
|
||||
const result = await getConvosByCursor('user123', { limit: 25 });
|
||||
|
||||
expect(result.conversations).toHaveLength(25);
|
||||
expect(result.nextCursor).toBeNull(); // No next page
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -573,4 +573,326 @@ describe('Message Operations', () => {
|
|||
expect(bulk2.expiredAt).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Message cursor pagination', () => {
|
||||
/**
|
||||
* Helper to create messages with specific timestamps
|
||||
* Uses collection.insertOne to bypass Mongoose timestamps
|
||||
*/
|
||||
const createMessageWithTimestamp = async (index, conversationId, createdAt) => {
|
||||
const messageId = uuidv4();
|
||||
await Message.collection.insertOne({
|
||||
messageId,
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
text: `Message ${index}`,
|
||||
isCreatedByUser: index % 2 === 0,
|
||||
createdAt,
|
||||
updatedAt: createdAt,
|
||||
});
|
||||
return Message.findOne({ messageId }).lean();
|
||||
};
|
||||
|
||||
/**
|
||||
* Simulates the pagination logic from api/server/routes/messages.js
|
||||
* This tests the exact query pattern used in the route
|
||||
*/
|
||||
const getMessagesByCursor = async ({
|
||||
conversationId,
|
||||
user,
|
||||
pageSize = 25,
|
||||
cursor = null,
|
||||
sortBy = 'createdAt',
|
||||
sortDirection = 'desc',
|
||||
}) => {
|
||||
const sortOrder = sortDirection === 'asc' ? 1 : -1;
|
||||
const sortField = ['createdAt', 'updatedAt'].includes(sortBy) ? sortBy : 'createdAt';
|
||||
const cursorOperator = sortDirection === 'asc' ? '$gt' : '$lt';
|
||||
|
||||
const filter = { conversationId, user };
|
||||
if (cursor) {
|
||||
filter[sortField] = { [cursorOperator]: new Date(cursor) };
|
||||
}
|
||||
|
||||
const messages = await Message.find(filter)
|
||||
.sort({ [sortField]: sortOrder })
|
||||
.limit(pageSize + 1)
|
||||
.lean();
|
||||
|
||||
let nextCursor = null;
|
||||
if (messages.length > pageSize) {
|
||||
messages.pop(); // Remove extra item used to detect next page
|
||||
// Create cursor from the last RETURNED item (not the popped one)
|
||||
nextCursor = messages[messages.length - 1][sortField];
|
||||
}
|
||||
|
||||
return { messages, nextCursor };
|
||||
};
|
||||
|
||||
it('should return messages for a conversation with pagination', async () => {
|
||||
const conversationId = uuidv4();
|
||||
const baseTime = new Date('2026-01-01T00:00:00.000Z');
|
||||
|
||||
// Create 30 messages to test pagination
|
||||
for (let i = 0; i < 30; i++) {
|
||||
const createdAt = new Date(baseTime.getTime() - i * 60000); // Each 1 minute apart
|
||||
await createMessageWithTimestamp(i, conversationId, createdAt);
|
||||
}
|
||||
|
||||
// Fetch first page (pageSize 25)
|
||||
const page1 = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
pageSize: 25,
|
||||
});
|
||||
|
||||
expect(page1.messages).toHaveLength(25);
|
||||
expect(page1.nextCursor).toBeTruthy();
|
||||
|
||||
// Fetch second page using cursor
|
||||
const page2 = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
pageSize: 25,
|
||||
cursor: page1.nextCursor,
|
||||
});
|
||||
|
||||
// Should get remaining 5 messages
|
||||
expect(page2.messages).toHaveLength(5);
|
||||
expect(page2.nextCursor).toBeNull();
|
||||
|
||||
// Verify no duplicates and no gaps
|
||||
const allMessageIds = [
|
||||
...page1.messages.map((m) => m.messageId),
|
||||
...page2.messages.map((m) => m.messageId),
|
||||
];
|
||||
const uniqueIds = new Set(allMessageIds);
|
||||
|
||||
expect(uniqueIds.size).toBe(30); // All 30 messages accounted for
|
||||
expect(allMessageIds.length).toBe(30); // No duplicates
|
||||
});
|
||||
|
||||
it('should not skip message at page boundary (item 26 bug fix)', async () => {
|
||||
const conversationId = uuidv4();
|
||||
const baseTime = new Date('2026-01-01T12:00:00.000Z');
|
||||
|
||||
// Create exactly 26 messages
|
||||
const messages = [];
|
||||
for (let i = 0; i < 26; i++) {
|
||||
const createdAt = new Date(baseTime.getTime() - i * 60000);
|
||||
const msg = await createMessageWithTimestamp(i, conversationId, createdAt);
|
||||
messages.push(msg);
|
||||
}
|
||||
|
||||
// The 26th message (index 25) should be on page 2
|
||||
const item26 = messages[25];
|
||||
|
||||
// Fetch first page with pageSize 25
|
||||
const page1 = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
pageSize: 25,
|
||||
});
|
||||
|
||||
expect(page1.messages).toHaveLength(25);
|
||||
expect(page1.nextCursor).toBeTruthy();
|
||||
|
||||
// Item 26 should NOT be in page 1
|
||||
const page1Ids = page1.messages.map((m) => m.messageId);
|
||||
expect(page1Ids).not.toContain(item26.messageId);
|
||||
|
||||
// Fetch second page
|
||||
const page2 = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
pageSize: 25,
|
||||
cursor: page1.nextCursor,
|
||||
});
|
||||
|
||||
// Item 26 MUST be in page 2 (this was the bug - it was being skipped)
|
||||
expect(page2.messages).toHaveLength(1);
|
||||
expect(page2.messages[0].messageId).toBe(item26.messageId);
|
||||
});
|
||||
|
||||
it('should sort by createdAt DESC by default', async () => {
|
||||
const conversationId = uuidv4();
|
||||
|
||||
// Create messages with specific timestamps
|
||||
const msg1 = await createMessageWithTimestamp(
|
||||
1,
|
||||
conversationId,
|
||||
new Date('2026-01-01T00:00:00.000Z'),
|
||||
);
|
||||
const msg2 = await createMessageWithTimestamp(
|
||||
2,
|
||||
conversationId,
|
||||
new Date('2026-01-02T00:00:00.000Z'),
|
||||
);
|
||||
const msg3 = await createMessageWithTimestamp(
|
||||
3,
|
||||
conversationId,
|
||||
new Date('2026-01-03T00:00:00.000Z'),
|
||||
);
|
||||
|
||||
const result = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
});
|
||||
|
||||
// Should be sorted by createdAt DESC (newest first) by default
|
||||
expect(result.messages).toHaveLength(3);
|
||||
expect(result.messages[0].messageId).toBe(msg3.messageId);
|
||||
expect(result.messages[1].messageId).toBe(msg2.messageId);
|
||||
expect(result.messages[2].messageId).toBe(msg1.messageId);
|
||||
});
|
||||
|
||||
it('should support ascending sort direction', async () => {
|
||||
const conversationId = uuidv4();
|
||||
|
||||
const msg1 = await createMessageWithTimestamp(
|
||||
1,
|
||||
conversationId,
|
||||
new Date('2026-01-01T00:00:00.000Z'),
|
||||
);
|
||||
const msg2 = await createMessageWithTimestamp(
|
||||
2,
|
||||
conversationId,
|
||||
new Date('2026-01-02T00:00:00.000Z'),
|
||||
);
|
||||
|
||||
const result = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
sortDirection: 'asc',
|
||||
});
|
||||
|
||||
// Should be sorted by createdAt ASC (oldest first)
|
||||
expect(result.messages).toHaveLength(2);
|
||||
expect(result.messages[0].messageId).toBe(msg1.messageId);
|
||||
expect(result.messages[1].messageId).toBe(msg2.messageId);
|
||||
});
|
||||
|
||||
it('should handle empty conversation', async () => {
|
||||
const conversationId = uuidv4();
|
||||
|
||||
const result = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
});
|
||||
|
||||
expect(result.messages).toHaveLength(0);
|
||||
expect(result.nextCursor).toBeNull();
|
||||
});
|
||||
|
||||
it('should only return messages for the specified user', async () => {
|
||||
const conversationId = uuidv4();
|
||||
const createdAt = new Date();
|
||||
|
||||
// Create a message for user123
|
||||
await Message.collection.insertOne({
|
||||
messageId: uuidv4(),
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
text: 'User message',
|
||||
createdAt,
|
||||
updatedAt: createdAt,
|
||||
});
|
||||
|
||||
// Create a message for a different user
|
||||
await Message.collection.insertOne({
|
||||
messageId: uuidv4(),
|
||||
conversationId,
|
||||
user: 'otherUser',
|
||||
text: 'Other user message',
|
||||
createdAt,
|
||||
updatedAt: createdAt,
|
||||
});
|
||||
|
||||
const result = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
});
|
||||
|
||||
// Should only return user123's message
|
||||
expect(result.messages).toHaveLength(1);
|
||||
expect(result.messages[0].user).toBe('user123');
|
||||
});
|
||||
|
||||
it('should handle exactly pageSize number of messages (no next page)', async () => {
|
||||
const conversationId = uuidv4();
|
||||
const baseTime = new Date('2026-01-01T00:00:00.000Z');
|
||||
|
||||
// Create exactly 25 messages (equal to default pageSize)
|
||||
for (let i = 0; i < 25; i++) {
|
||||
const createdAt = new Date(baseTime.getTime() - i * 60000);
|
||||
await createMessageWithTimestamp(i, conversationId, createdAt);
|
||||
}
|
||||
|
||||
const result = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
pageSize: 25,
|
||||
});
|
||||
|
||||
expect(result.messages).toHaveLength(25);
|
||||
expect(result.nextCursor).toBeNull(); // No next page
|
||||
});
|
||||
|
||||
it('should handle pageSize of 1', async () => {
|
||||
const conversationId = uuidv4();
|
||||
const baseTime = new Date('2026-01-01T00:00:00.000Z');
|
||||
|
||||
// Create 3 messages
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const createdAt = new Date(baseTime.getTime() - i * 60000);
|
||||
await createMessageWithTimestamp(i, conversationId, createdAt);
|
||||
}
|
||||
|
||||
// Fetch with pageSize 1
|
||||
let cursor = null;
|
||||
const allMessages = [];
|
||||
|
||||
for (let page = 0; page < 5; page++) {
|
||||
const result = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
pageSize: 1,
|
||||
cursor,
|
||||
});
|
||||
|
||||
allMessages.push(...result.messages);
|
||||
cursor = result.nextCursor;
|
||||
|
||||
if (!cursor) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Should get all 3 messages without duplicates
|
||||
expect(allMessages).toHaveLength(3);
|
||||
const uniqueIds = new Set(allMessages.map((m) => m.messageId));
|
||||
expect(uniqueIds.size).toBe(3);
|
||||
});
|
||||
|
||||
it('should handle messages with same createdAt timestamp', async () => {
|
||||
const conversationId = uuidv4();
|
||||
const sameTime = new Date('2026-01-01T12:00:00.000Z');
|
||||
|
||||
// Create multiple messages with the exact same timestamp
|
||||
const messages = [];
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const msg = await createMessageWithTimestamp(i, conversationId, sameTime);
|
||||
messages.push(msg);
|
||||
}
|
||||
|
||||
const result = await getMessagesByCursor({
|
||||
conversationId,
|
||||
user: 'user123',
|
||||
pageSize: 10,
|
||||
});
|
||||
|
||||
// All messages should be returned
|
||||
expect(result.messages).toHaveLength(5);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ router.get('/', async (req, res) => {
|
|||
const cursor = req.query.cursor;
|
||||
const isArchived = isEnabled(req.query.isArchived);
|
||||
const search = req.query.search ? decodeURIComponent(req.query.search) : undefined;
|
||||
const sortBy = req.query.sortBy || 'createdAt';
|
||||
const sortBy = req.query.sortBy || 'updatedAt';
|
||||
const sortDirection = req.query.sortDirection || 'desc';
|
||||
|
||||
let tags;
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ router.get('/', async (req, res) => {
|
|||
const user = req.user.id ?? '';
|
||||
const {
|
||||
cursor = null,
|
||||
sortBy = 'createdAt',
|
||||
sortBy = 'updatedAt',
|
||||
sortDirection = 'desc',
|
||||
pageSize: pageSizeRaw,
|
||||
conversationId,
|
||||
|
|
@ -55,7 +55,12 @@ router.get('/', async (req, res) => {
|
|||
.sort({ [sortField]: sortOrder })
|
||||
.limit(pageSize + 1)
|
||||
.lean();
|
||||
const nextCursor = messages.length > pageSize ? messages.pop()[sortField] : null;
|
||||
let nextCursor = null;
|
||||
if (messages.length > pageSize) {
|
||||
messages.pop(); // Remove extra item used to detect next page
|
||||
// Create cursor from the last RETURNED item (not the popped one)
|
||||
nextCursor = messages[messages.length - 1][sortField];
|
||||
}
|
||||
response = { messages, nextCursor };
|
||||
} else if (search) {
|
||||
const searchResults = await Message.meiliSearch(search, { filter: `user = "${user}"` }, true);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue