🚑 fix: resolve missing data in infinite queries (#2852)

* An issue where the InfiniteQuery was missing data
* Post add/delete operations, inconsistencies between client-side data structures and the database could lead to data being missed or duplicated.
* To address this, implemented normalization of client data following add/delete operations.
* performed refetching of data in the last page when necessary to ensure consistency.
This commit is contained in:
Yuichi Oneda 2024-05-24 09:38:38 -07:00 committed by GitHub
parent 35ba4ba1a4
commit 4369e75ca7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 294 additions and 28 deletions

View file

@ -17,6 +17,8 @@ import {
import { dataService, MutationKeys, QueryKeys, defaultOrderQuery } from 'librechat-data-provider';
import { useSetRecoilState } from 'recoil';
import store from '~/store';
import { normalizeData } from '~/utils/collection';
import { useConversationsInfiniteQuery, useSharedLinksInfiniteQuery } from './queries';
/** Conversations */
export const useGenTitleMutation = (): UseMutationResult<
@ -85,6 +87,11 @@ export const useArchiveConversationMutation = (
unknown
> => {
const queryClient = useQueryClient();
const { refetch } = useConversationsInfiniteQuery();
const { refetch: archiveRefetch } = useConversationsInfiniteQuery({
pageNumber: '1', // dummy value not used to refetch
isArchived: true,
});
return useMutation(
(payload: t.TArchiveConversationRequest) => dataService.archiveConversation(payload),
{
@ -99,25 +106,47 @@ export const useArchiveConversationMutation = (
if (!convoData) {
return convoData;
}
if (vars.isArchived) {
return deleteConversation(convoData, id as string);
} else {
return addConversation(convoData, _data);
}
const pageSize = convoData.pages[0].pageSize as number;
return normalizeData(
vars.isArchived ? deleteConversation(convoData, id) : addConversation(convoData, _data),
'conversations',
pageSize,
);
});
if (vars.isArchived) {
const current = queryClient.getQueryData<t.ConversationData>([
QueryKeys.allConversations,
]);
refetch({ refetchPage: (page, index) => index === (current?.pages.length || 1) - 1 });
}
queryClient.setQueryData<t.ConversationData>(
[QueryKeys.archivedConversations],
(convoData) => {
if (!convoData) {
return convoData;
}
if (vars.isArchived) {
return addConversation(convoData, _data);
} else {
return deleteConversation(convoData, id as string);
}
const pageSize = convoData.pages[0].pageSize as number;
return normalizeData(
vars.isArchived
? addConversation(convoData, _data)
: deleteConversation(convoData, id),
'conversations',
pageSize,
);
},
);
if (!vars.isArchived) {
const currentArchive = queryClient.getQueryData<t.ConversationData>([
QueryKeys.archivedConversations,
]);
archiveRefetch({
refetchPage: (page, index) => index === (currentArchive?.pages.length || 1) - 1,
});
}
},
},
);
@ -127,6 +156,7 @@ export const useCreateSharedLinkMutation = (
options?: t.CreateSharedLinkOptions,
): UseMutationResult<t.TSharedLinkResponse, unknown, t.TSharedLinkRequest, unknown> => {
const queryClient = useQueryClient();
const { refetch } = useSharedLinksInfiniteQuery();
const { onSuccess, ..._options } = options || {};
return useMutation((payload: t.TSharedLinkRequest) => dataService.createSharedLink(payload), {
onSuccess: (_data, vars, context) => {
@ -138,17 +168,24 @@ export const useCreateSharedLinkMutation = (
if (!sharedLink) {
return sharedLink;
}
const pageSize = sharedLink.pages[0].pageSize as number;
return normalizeData(
// If the shared link is public, add it to the shared links cache list
if (vars.isPublic) {
return addSharedLink(sharedLink, _data);
} else {
return deleteSharedLink(sharedLink, _data.shareId);
}
vars.isPublic
? addSharedLink(sharedLink, _data)
: deleteSharedLink(sharedLink, _data.shareId),
'sharedLinks',
pageSize,
);
});
queryClient.setQueryData([QueryKeys.sharedLinks, _data.shareId], _data);
if (!vars.isPublic) {
const current = queryClient.getQueryData<t.ConversationData>([QueryKeys.sharedLinks]);
refetch({
refetchPage: (page, index) => index === (current?.pages.length || 1) - 1,
});
}
onSuccess?.(_data, vars, context);
},
...(_options || {}),
@ -159,6 +196,7 @@ export const useUpdateSharedLinkMutation = (
options?: t.UpdateSharedLinkOptions,
): UseMutationResult<t.TSharedLinkResponse, unknown, t.TSharedLinkRequest, unknown> => {
const queryClient = useQueryClient();
const { refetch } = useSharedLinksInfiniteQuery();
const { onSuccess, ..._options } = options || {};
return useMutation((payload: t.TSharedLinkRequest) => dataService.updateSharedLink(payload), {
onSuccess: (_data, vars, context) => {
@ -171,17 +209,25 @@ export const useUpdateSharedLinkMutation = (
return sharedLink;
}
return normalizeData(
// If the shared link is public, add it to the shared links cache list.
if (vars.isPublic) {
// Even if the SharedLink data exists in the database, it is not registered in the cache when isPublic is false.
vars.isPublic
? // Even if the SharedLink data exists in the database, it is not registered in the cache when isPublic is false.
// Therefore, when isPublic is true, use addSharedLink instead of updateSharedLink.
return addSharedLink(sharedLink, _data);
} else {
return deleteSharedLink(sharedLink, _data.shareId);
}
addSharedLink(sharedLink, _data)
: deleteSharedLink(sharedLink, _data.shareId),
'sharedLinks',
sharedLink.pages[0].pageSize as number,
);
});
queryClient.setQueryData([QueryKeys.sharedLinks, _data.shareId], _data);
if (!vars.isPublic) {
const current = queryClient.getQueryData<t.ConversationData>([QueryKeys.sharedLinks]);
refetch({
refetchPage: (page, index) => index === (current?.pages.length || 1) - 1,
});
}
onSuccess?.(_data, vars, context);
},
@ -193,6 +239,7 @@ export const useDeleteSharedLinkMutation = (
options?: t.DeleteSharedLinkOptions,
): UseMutationResult<t.TDeleteSharedLinkResponse, unknown, { shareId: string }, unknown> => {
const queryClient = useQueryClient();
const { refetch } = useSharedLinksInfiniteQuery();
const { onSuccess, ..._options } = options || {};
return useMutation(({ shareId }) => dataService.deleteSharedLink(shareId), {
onSuccess: (_data, vars, context) => {
@ -205,7 +252,15 @@ export const useDeleteSharedLinkMutation = (
if (!data) {
return data;
}
return deleteSharedLink(data, vars.shareId);
return normalizeData(
deleteSharedLink(data, vars.shareId),
'sharedLinks',
data.pages[0].pageSize as number,
);
});
const current = queryClient.getQueryData<t.ConversationData>([QueryKeys.sharedLinks]);
refetch({
refetchPage: (page, index) => index === (current?.pages.length || 1) - 1,
});
onSuccess?.(_data, vars, context);
},
@ -222,6 +277,7 @@ export const useDeleteConversationMutation = (
unknown
> => {
const queryClient = useQueryClient();
const { refetch } = useConversationsInfiniteQuery();
const { onSuccess, ..._options } = options || {};
return useMutation(
(payload: t.TDeleteConversationRequest) => dataService.deleteConversation(payload),
@ -235,7 +291,11 @@ export const useDeleteConversationMutation = (
if (!convoData) {
return convoData;
}
return deleteConversation(convoData, vars.conversationId as string);
return normalizeData(
deleteConversation(convoData, vars.conversationId as string),
'conversations',
convoData.pages[0].pageSize,
);
};
queryClient.setQueryData([QueryKeys.conversation, vars.conversationId], null);
@ -244,6 +304,8 @@ export const useDeleteConversationMutation = (
[QueryKeys.archivedConversations],
handleDelete,
);
const current = queryClient.getQueryData<t.ConversationData>([QueryKeys.allConversations]);
refetch({ refetchPage: (page, index) => index === (current?.pages.length || 1) - 1 });
onSuccess?.(_data, vars, context);
},
...(_options || {}),

View file

@ -65,6 +65,40 @@ export const deleteData = <TCollection, TData>(
// Delete the data from its current page
newData.pages[pageIndex][collectionName].splice(index, 1);
}
return newData;
};
/**
* Normalize the data so that the number of data on each page is within pageSize
*/
export const normalizeData = <TCollection, TData>(
data: InfiniteData<TCollection>,
collectionName: string,
pageSize: number,
): InfiniteData<TCollection> => {
const infiniteData = JSON.parse(JSON.stringify(data)) as InfiniteData<TCollection>;
const pageCount = infiniteData.pages.length;
if (pageCount === 0) {
return infiniteData;
}
const pageParams = infiniteData.pageParams;
// Combine all conversations of all pages into one array
const collection = infiniteData.pages.flatMap((page) => page[collectionName]);
if (collection.length === 0) {
return infiniteData;
}
// Create the restructured pages
const restructuredPages = Array.from({ length: pageCount }, (_, i) => ({
...infiniteData.pages[i],
[collectionName]: collection.slice(i * pageSize, (i + 1) * pageSize),
})).filter((page) => page[collectionName].length > 0); // Remove empty pages
return {
pageParams: pageParams.slice(0, restructuredPages.length),
pages: restructuredPages,
};
};

View file

@ -9,6 +9,7 @@ import {
groupConversationsByDate,
} from './convos';
import type { TConversation, ConversationData } from 'librechat-data-provider';
import { normalizeData } from './collection';
describe('Conversation Utilities', () => {
describe('groupConversationsByDate', () => {
@ -226,4 +227,173 @@ describe('Conversation Utilities with Fake Data', () => {
expect(index).toBeGreaterThanOrEqual(0);
});
});
describe('normalizeConversationData', () => {
it('normalizes the number of items on each page after data removal', () => {
// Create test data:
// Generates 15 conversation items, each with a unique conversationId and an updatedAt timestamp set to a different day.
// { conversationId: '1', updatedAt: new Date(Date.now() - 86400000 * i).toISOString() }
const conversations = Array.from({ length: 15 }, (_, i) => ({
conversationId: (i + 1).toString(),
updatedAt: new Date(Date.now() - 86400000 * i).toISOString(),
}));
// Paginate Data:
// Divides the 15 conversation items into pages, with each page containing up to 5 items (pageSize is set to 5). This results in 3 pages.
const pageSize = 5;
const totalPageNumber = Math.ceil(conversations.length / pageSize);
const paginatedData = Array.from({ length: totalPageNumber }, (_, index) => ({
conversations: conversations.slice(index * pageSize, (index + 1) * pageSize),
pages: totalPageNumber,
pageNumber: index + 1,
pageSize,
}));
const testData = { pages: paginatedData, pageParams: [null, 2, 3] };
// Removes one item from the first page, resulting in the first page having 4 items, while the second and third pages still have 5 items each.
testData.pages[0].conversations.splice(1, 1);
expect(testData.pages[0].conversations).toHaveLength(4);
expect(testData.pages[1].conversations).toHaveLength(5);
expect(testData.pages[2].conversations).toHaveLength(5);
// Normalize Data:
// Calls the normalizeData function to ensure that each page contains exactly 5 items, redistributing the items across the pages as needed.
const normalizedData = normalizeData(testData, 'conversations', pageSize);
// Verify Results:
// Asserts that the number of conversation data is 5 except for the last page,
// that the number of conversation data is 4 only for the last page,
// and that the conversation ids are in the expected order.
expect(normalizedData.pages[0].conversations).toHaveLength(5);
expect(normalizedData.pages[0].conversations[0].conversationId).toBe('1');
expect(normalizedData.pages[0].conversations[4].conversationId).toBe('6');
expect(normalizedData.pages[1].conversations).toHaveLength(5);
expect(normalizedData.pages[1].conversations[0].conversationId).toBe('7');
expect(normalizedData.pages[1].conversations[4].conversationId).toBe('11');
expect(normalizedData.pages[2].conversations).toHaveLength(4);
expect(normalizedData.pages[2].conversations[0].conversationId).toBe('12');
expect(normalizedData.pages[2].conversations[3].conversationId).toBe('15');
});
it('normalizes the number of items on each page after data addition', () => {
// Create test data:
// Generates 15 conversation items, each with a unique conversationId and an updatedAt timestamp set to a different day.
// { conversationId: '1', updatedAt: new Date(Date.now() - 86400000 * i).toISOString() }
const conversations = Array.from({ length: 15 }, (_, i) => ({
conversationId: (i + 1).toString(),
updatedAt: new Date(Date.now() - 86400000 * i).toISOString(),
}));
// Paginate Data:
// Divides the 15 conversation items into pages,
// with each page containing up to 5 items (pageSize is set to 5). This results in 3 pages.
const pageSize = 5;
const totalPageNumber = Math.ceil(conversations.length / pageSize);
const paginatedData = Array.from({ length: totalPageNumber }, (_, index) => ({
conversations: conversations.slice(index * pageSize, (index + 1) * pageSize),
pages: totalPageNumber,
pageNumber: index + 1,
pageSize,
}));
const testData = { pages: paginatedData, pageParams: [null, 2, 3] };
// Inserts a new conversation item at the beginning of the first page,
testData.pages[0].conversations.unshift({
conversationId: '16',
updatedAt: new Date().toISOString(),
});
// resulting in the first page having 6 items,
// while the second and third pages still have 5 items each.
expect(testData.pages[0].conversations).toHaveLength(6);
expect(testData.pages[1].conversations).toHaveLength(5);
expect(testData.pages[2].conversations).toHaveLength(5);
expect(testData.pages[2].conversations[4].conversationId).toBe('15');
expect(testData.pages).toHaveLength(3);
const normalizedData = normalizeData(testData, 'conversations', pageSize);
// Verify Results:
// Asserts that after normalization, each page contains the correct number of items (5 items per page).
expect(normalizedData.pages[0].conversations).toHaveLength(5);
expect(normalizedData.pages[1].conversations).toHaveLength(5);
expect(normalizedData.pages[2].conversations).toHaveLength(5);
expect(normalizedData.pages).toHaveLength(3);
// Checks that the items are in the expected order, ensuring that the conversationId values are correctly distributed across the pages.
expect(normalizedData.pages[0].conversations[0].conversationId).toBe('16');
expect(normalizedData.pages[0].conversations[4].conversationId).toBe('4');
expect(normalizedData.pages[1].conversations[0].conversationId).toBe('5');
expect(normalizedData.pages[1].conversations[4].conversationId).toBe('9');
expect(normalizedData.pages[2].conversations[0].conversationId).toBe('10');
expect(normalizedData.pages[2].conversations[4].conversationId).toBe('14');
expect(normalizedData.pageParams).toHaveLength(3);
});
it('returns empty data when there is no data', () => {
const normalizedData = normalizeData(
{ pages: [{ conversations: [], pageNumber: 1, pageSize: 5, pages: 1 }], pageParams: [] },
'conversations',
5,
);
console.log(normalizedData);
expect(normalizedData.pages[0].conversations).toHaveLength(0);
});
it('does not normalize data when not needed', () => {
const normalizedData = normalizeData(
{ pages: [{ conversations: ['1'], pageNumber: 1, pageSize: 5, pages: 1 }], pageParams: [] },
'conversations',
5,
);
expect(normalizedData.pages[0].conversations).toHaveLength(1);
});
it('deletes pages that have no data as a result of normalization', () => {
const conversations = Array.from({ length: 15 }, (_, i) => ({
conversationId: (i + 1).toString(),
updatedAt: new Date(Date.now() - 86400000 * i).toISOString(),
}));
const pageSize = 5;
const totalPageNumber = Math.ceil(conversations.length / pageSize);
const paginatedData = Array.from({ length: totalPageNumber }, (_, index) => ({
conversations: conversations.slice(index * pageSize, (index + 1) * pageSize),
pages: totalPageNumber,
pageNumber: index + 1,
pageSize,
}));
const testData = { pages: paginatedData, pageParams: [null, 2, 3] };
// Removes all data from the last page, resulting in the last page having 0 items.
testData.pages[2].conversations = [];
expect(testData.pages[0].conversations).toHaveLength(5);
expect(testData.pages[1].conversations).toHaveLength(5);
expect(testData.pages[2].conversations).toHaveLength(0);
expect(testData.pageParams).toHaveLength(3);
const normalizedData = normalizeData(testData, 'conversations', pageSize);
// Verify Results:
// Asserts that the last page is removed after normalization, leaving only the first and second pages.
expect(normalizedData.pages).toHaveLength(2);
expect(normalizedData.pages[0].conversations).toHaveLength(5);
expect(normalizedData.pages[1].conversations).toHaveLength(5);
expect(normalizedData.pageParams).toHaveLength(2);
});
});
});