mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-30 07:08:50 +01:00
📜 refactor: Optimize Conversation History Nav with Cursor Pagination (#5785)
* ✨ feat: improve Nav/Conversations/Convo/NewChat component performance * ✨ feat: implement cursor-based pagination for conversations API * 🔧 refactor: remove createdAt from conversation selection in API and type definitions * 🔧 refactor: include createdAt in conversation selection and update related types * ✨ fix: search functionality and bugs with loadMoreConversations * feat: move ArchivedChats to cursor and DataTable standard * 🔧 refactor: add InfiniteQueryObserverResult type import in Nav component * feat: enhance conversation listing with pagination, sorting, and search capabilities * 🔧 refactor: remove unnecessary comment regarding lodash/debounce in ArchivedChatsTable * 🔧 refactor: remove unused translation keys for archived chats and search results * 🔧 fix: Archived Chats, Delete Convo, Duplicate Convo * 🔧 refactor: improve conversation components with layout adjustments and new translations * 🔧 refactor: simplify archive conversation mutation and improve unarchive handling; fix: update fork mutation * 🔧 refactor: decode search query parameter in conversation route; improve error handling in unarchive mutation; clean up DataTable component styles * 🔧 refactor: remove unused translation key for empty archived chats * 🚀 fix: `archivedConversation` query key not updated correctly while archiving * 🧠 feat: Bedrock Anthropic Reasoning & Update Endpoint Handling (#6163) * feat: Add thinking and thinkingBudget parameters for Bedrock Anthropic models * chore: Update @librechat/agents to version 2.1.8 * refactor: change region order in params * refactor: Add maxTokens parameter to conversation preset schema * refactor: Update agent client to use bedrockInputSchema and improve error handling for model parameters * refactor: streamline/optimize llmConfig initialization and saving for bedrock * fix: ensure config titleModel is used for all endpoints * refactor: enhance OpenAIClient and agent initialization to support endpoint checks for OpenRouter * chore: bump @google/generative-ai * ✨ feat: improve Nav/Conversations/Convo/NewChat component performance * 🔧 refactor: remove unnecessary comment regarding lodash/debounce in ArchivedChatsTable * 🔧 refactor: update translation keys for clarity; simplify conversation query parameters and improve sorting functionality in SharedLinks component * 🔧 refactor: optimize conversation loading logic and improve search handling in Nav component * fix: package-lock * fix: package-lock 2 * fix: package lock 3 * refactor: remove unused utility files and exports to clean up the codebase * refactor: remove i18n and useAuthRedirect modules to streamline codebase * refactor: optimize Conversations component and remove unused ToggleContext * refactor(Convo): add RenameForm and ConvoLink components; enhance Conversations component with responsive design * fix: add missing @azure/storage-blob dependency in package.json * refactor(Search): add error handling with toast notification for search errors * refactor: make createdAt and updatedAt fields of tConvoUpdateSchema less restrictive if timestamps are missing * chore: update @azure/storage-blob dependency to version 12.27.0, ensure package-lock is correct * refactor(Search): improve conversation handling server side * fix: eslint warning and errors * refactor(Search): improved search loading state and overall UX * Refactors conversation cache management Centralizes conversation mutation logic into dedicated utility functions for adding, updating, and removing conversations from query caches. Improves reliability and maintainability by: - Consolidating duplicate cache manipulation code - Adding type safety for infinite query data structures - Implementing consistent cache update patterns across all conversation operations - Removing obsolete conversation helper functions in favor of standardized utilities * fix: conversation handling and SSE event processing - Optimizes conversation state management with useMemo and proper hook ordering - Improves SSE event handler documentation and error handling - Adds reset guard flag for conversation changes - Removes redundant navigation call - Cleans up cursor handling logic and document structure Improves code maintainability and prevents potential race conditions in conversation state updates * refactor: add type for SearchBar `onChange` * fix: type tags * style: rounded to xl all Header buttons * fix: activeConvo in Convo not working * style(Bookmarks): improved UI * a11y(AccountSettings): fixed hover style not visible when using light theme * style(SettingsTabs): improved tab switchers and dropdowns * feat: add translations keys for Speech * chore: fix package-lock * fix(mutations): legacy import after rebase * feat: refactor conversation navigation for accessibility * fix(search): convo and message create/update date not returned * fix(search): show correct iconURL and endpoint for searched messages * fix: small UI improvements * chore: console.log cleanup * chore: fix tests * fix(ChatForm): improve conversation ID handling and clean up useMemo dependencies * chore: improve typing * chore: improve typing * fix(useSSE): clear conversation ID on submission to prevent draft restoration * refactor(OpenAIClient): clean up abort handler * refactor(abortMiddleware): change handleAbort to use function expression * feat: add PENDING_CONVO constant and update conversation ID checks * fix: final event handling on abort * fix: improve title sync and query cache sync on final event * fix: prevent overwriting cached conversation data if it already exists --------- Co-authored-by: Danny Avila <danny@librechat.ai>
This commit is contained in:
parent
77a21719fd
commit
650e9b4f6c
69 changed files with 3434 additions and 2139 deletions
|
|
@ -1,13 +1,18 @@
|
|||
import { Constants } from 'librechat-data-provider';
|
||||
import type { TConversation, ConversationData } from 'librechat-data-provider';
|
||||
import { QueryClient } from '@tanstack/react-query';
|
||||
import type { TConversation, InfiniteData } from 'librechat-data-provider';
|
||||
import {
|
||||
dateKeys,
|
||||
addConversation,
|
||||
updateConvoFields,
|
||||
updateConversation,
|
||||
deleteConversation,
|
||||
findPageForConversation,
|
||||
storeEndpointSettings,
|
||||
addConversationToInfinitePages,
|
||||
updateInfiniteConvoPage,
|
||||
findConversationInInfinite,
|
||||
removeConvoFromInfinitePages,
|
||||
groupConversationsByDate,
|
||||
updateConvoFieldsInfinite,
|
||||
addConvoToAllQueries,
|
||||
updateConvoInAllQueries,
|
||||
removeConvoFromAllQueries,
|
||||
addConversationToAllConversationsQueries,
|
||||
} from './convos';
|
||||
import { convoData } from './convos.fakeData';
|
||||
import { normalizeData } from './collection';
|
||||
|
|
@ -26,9 +31,9 @@ describe('Conversation Utilities', () => {
|
|||
const conversations = [
|
||||
{ conversationId: '1', updatedAt: '2023-04-01T12:00:00Z' },
|
||||
{ conversationId: '2', updatedAt: new Date().toISOString() },
|
||||
{ conversationId: '3', updatedAt: new Date(Date.now() - 86400000).toISOString() }, // 86400 seconds ago = yesterday
|
||||
{ conversationId: '4', updatedAt: new Date(Date.now() - 86400000 * 2).toISOString() }, // 2 days ago (previous 7 days)
|
||||
{ conversationId: '5', updatedAt: new Date(Date.now() - 86400000 * 8).toISOString() }, // 8 days ago (previous 30 days)
|
||||
{ conversationId: '3', updatedAt: new Date(Date.now() - 86400000).toISOString() },
|
||||
{ conversationId: '4', updatedAt: new Date(Date.now() - 86400000 * 2).toISOString() },
|
||||
{ conversationId: '5', updatedAt: new Date(Date.now() - 86400000 * 8).toISOString() },
|
||||
];
|
||||
const grouped = groupConversationsByDate(conversations as TConversation[]);
|
||||
expect(grouped[0][0]).toBe(dateKeys.today);
|
||||
|
|
@ -43,84 +48,20 @@ describe('Conversation Utilities', () => {
|
|||
expect(grouped[4][1]).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('groups conversations correctly across multiple years', () => {
|
||||
const fixedDate = new Date('2023-07-15T12:00:00Z');
|
||||
const conversations = [
|
||||
{ conversationId: '1', updatedAt: '2023-07-15T10:00:00Z' }, // Today
|
||||
{ conversationId: '2', updatedAt: '2023-07-14T12:00:00Z' }, // Yesterday
|
||||
{ conversationId: '3', updatedAt: '2023-07-08T12:00:00Z' }, // This week
|
||||
{ conversationId: '4', updatedAt: '2023-07-01T12:00:00Z' }, // This month (within last 30 days)
|
||||
{ conversationId: '5', updatedAt: '2023-06-01T12:00:00Z' }, // Last month
|
||||
{ conversationId: '6', updatedAt: '2023-01-01T12:00:00Z' }, // This year, January
|
||||
{ conversationId: '7', updatedAt: '2022-12-01T12:00:00Z' }, // Last year, December
|
||||
{ conversationId: '8', updatedAt: '2022-06-01T12:00:00Z' }, // Last year, June
|
||||
{ conversationId: '9', updatedAt: '2021-12-01T12:00:00Z' }, // Two years ago
|
||||
{ conversationId: '10', updatedAt: '2020-06-01T12:00:00Z' }, // Three years ago
|
||||
];
|
||||
|
||||
// Mock Date.now
|
||||
const originalDateNow = Date.now;
|
||||
Date.now = jest.fn(() => fixedDate.getTime());
|
||||
|
||||
const grouped = groupConversationsByDate(conversations as TConversation[]);
|
||||
|
||||
// Restore Date.now
|
||||
Date.now = originalDateNow;
|
||||
|
||||
const expectedGroups = [
|
||||
dateKeys.today,
|
||||
dateKeys.yesterday,
|
||||
dateKeys.previous7Days,
|
||||
dateKeys.previous30Days,
|
||||
dateKeys.june,
|
||||
dateKeys.january,
|
||||
' 2022',
|
||||
' 2021',
|
||||
' 2020',
|
||||
];
|
||||
|
||||
expect(grouped.map(([key]) => key)).toEqual(expectedGroups);
|
||||
|
||||
// Helper function to safely get group length
|
||||
const getGroupLength = (key: string) => grouped.find(([k]) => k === key)?.[1]?.length ?? 0;
|
||||
|
||||
// Check specific group contents
|
||||
expect(getGroupLength(dateKeys.today)).toBe(1);
|
||||
expect(getGroupLength(dateKeys.yesterday)).toBe(1);
|
||||
expect(getGroupLength(dateKeys.previous7Days)).toBe(1);
|
||||
expect(getGroupLength(dateKeys.previous30Days)).toBe(1);
|
||||
expect(getGroupLength(dateKeys.june)).toBe(1);
|
||||
expect(getGroupLength(dateKeys.january)).toBe(1);
|
||||
expect(getGroupLength(' 2022')).toBe(2); // December and June 2022
|
||||
expect(getGroupLength(' 2021')).toBe(1);
|
||||
expect(getGroupLength(' 2020')).toBe(1);
|
||||
|
||||
// Check that all conversations are accounted for
|
||||
const totalGroupedConversations = grouped.reduce(
|
||||
(total, [, convos]) => total + convos.length,
|
||||
0,
|
||||
);
|
||||
expect(totalGroupedConversations).toBe(conversations.length);
|
||||
});
|
||||
|
||||
it('returns an empty array for no conversations', () => {
|
||||
expect(groupConversationsByDate([])).toEqual([]);
|
||||
});
|
||||
|
||||
it('skips conversations with duplicate conversationIds', () => {
|
||||
const conversations = [
|
||||
{ conversationId: '1', updatedAt: '2023-12-01T12:00:00Z' }, // " 2023"
|
||||
{ conversationId: '2', updatedAt: '2023-11-25T12:00:00Z' }, // " 2023"
|
||||
{ conversationId: '1', updatedAt: '2023-11-20T12:00:00Z' }, // Should be skipped because of duplicate ID
|
||||
{ conversationId: '3', updatedAt: '2022-12-01T12:00:00Z' }, // " 2022"
|
||||
{ conversationId: '1', updatedAt: '2023-12-01T12:00:00Z' },
|
||||
{ conversationId: '2', updatedAt: '2023-11-25T12:00:00Z' },
|
||||
{ conversationId: '1', updatedAt: '2023-11-20T12:00:00Z' },
|
||||
{ conversationId: '3', updatedAt: '2022-12-01T12:00:00Z' },
|
||||
];
|
||||
|
||||
const grouped = groupConversationsByDate(conversations as TConversation[]);
|
||||
|
||||
expect(grouped).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.arrayContaining([' 2023', expect.arrayContaining(conversations.slice(0, 2))]),
|
||||
expect.arrayContaining([' 2022', expect.arrayContaining([conversations[3]])]),
|
||||
[' 2023', expect.arrayContaining([conversations[0], conversations[1]])],
|
||||
[' 2022', expect.arrayContaining([conversations[3]])],
|
||||
]),
|
||||
);
|
||||
|
||||
|
|
@ -132,22 +73,25 @@ describe('Conversation Utilities', () => {
|
|||
|
||||
it('sorts conversations by month correctly', () => {
|
||||
const conversations = [
|
||||
{ conversationId: '1', updatedAt: '2023-01-01T12:00:00Z' }, // January 2023
|
||||
{ conversationId: '2', updatedAt: '2023-12-01T12:00:00Z' }, // December 2023
|
||||
{ conversationId: '3', updatedAt: '2023-02-01T12:00:00Z' }, // February 2023
|
||||
{ conversationId: '4', updatedAt: '2023-11-01T12:00:00Z' }, // November 2023
|
||||
{ conversationId: '5', updatedAt: '2022-12-01T12:00:00Z' }, // December 2022
|
||||
{ conversationId: '1', updatedAt: '2023-01-01T12:00:00Z' },
|
||||
{ conversationId: '2', updatedAt: '2023-12-01T12:00:00Z' },
|
||||
{ conversationId: '3', updatedAt: '2023-02-01T12:00:00Z' },
|
||||
{ conversationId: '4', updatedAt: '2023-11-01T12:00:00Z' },
|
||||
{ conversationId: '5', updatedAt: '2022-12-01T12:00:00Z' },
|
||||
];
|
||||
|
||||
const grouped = groupConversationsByDate(conversations as TConversation[]);
|
||||
|
||||
// Check if the years are in the correct order (most recent first)
|
||||
expect(grouped.map(([key]) => key)).toEqual([' 2023', ' 2022']);
|
||||
// Now expect grouping by year for 2023 and 2022
|
||||
const expectedGroups = [' 2023', ' 2022'];
|
||||
expect(grouped.map(([key]) => key)).toEqual(expectedGroups);
|
||||
|
||||
// Check if conversations within 2023 are sorted correctly by month
|
||||
// Check if conversations within 2023 are sorted correctly by updatedAt descending
|
||||
const conversationsIn2023 = grouped[0][1];
|
||||
const monthsIn2023 = conversationsIn2023.map((c) => new Date(c.updatedAt).getMonth());
|
||||
expect(monthsIn2023).toEqual([11, 10, 1, 0]); // December (11), November (10), February (1), January (0)
|
||||
const sorted = [...conversationsIn2023].sort(
|
||||
(a, b) => new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime(),
|
||||
);
|
||||
expect(conversationsIn2023).toEqual(sorted);
|
||||
|
||||
// Check if the conversation from 2022 is in its own group
|
||||
expect(grouped[1][1].length).toBe(1);
|
||||
|
|
@ -156,19 +100,19 @@ describe('Conversation Utilities', () => {
|
|||
|
||||
it('handles conversations from multiple years correctly', () => {
|
||||
const conversations = [
|
||||
{ conversationId: '1', updatedAt: '2023-01-01T12:00:00Z' }, // January 2023
|
||||
{ conversationId: '2', updatedAt: '2022-12-01T12:00:00Z' }, // December 2022
|
||||
{ conversationId: '3', updatedAt: '2021-06-01T12:00:00Z' }, // June 2021
|
||||
{ conversationId: '4', updatedAt: '2023-06-01T12:00:00Z' }, // June 2023
|
||||
{ conversationId: '5', updatedAt: '2021-12-01T12:00:00Z' }, // December 2021
|
||||
{ conversationId: '1', updatedAt: '2023-01-01T12:00:00Z' },
|
||||
{ conversationId: '2', updatedAt: '2022-12-01T12:00:00Z' },
|
||||
{ conversationId: '3', updatedAt: '2021-06-01T12:00:00Z' },
|
||||
{ conversationId: '4', updatedAt: '2023-06-01T12:00:00Z' },
|
||||
{ conversationId: '5', updatedAt: '2021-12-01T12:00:00Z' },
|
||||
];
|
||||
|
||||
const grouped = groupConversationsByDate(conversations as TConversation[]);
|
||||
|
||||
expect(grouped.map(([key]) => key)).toEqual([' 2023', ' 2022', ' 2021']);
|
||||
expect(grouped[0][1].map((c) => new Date(c.updatedAt).getMonth())).toEqual([5, 0]); // June, January
|
||||
expect(grouped[1][1].map((c) => new Date(c.updatedAt).getMonth())).toEqual([11]); // December
|
||||
expect(grouped[2][1].map((c) => new Date(c.updatedAt).getMonth())).toEqual([11, 5]); // December, June
|
||||
expect(grouped[0][1].map((c) => new Date(c.updatedAt).getFullYear())).toEqual([2023, 2023]);
|
||||
expect(grouped[1][1].map((c) => new Date(c.updatedAt).getFullYear())).toEqual([2022]);
|
||||
expect(grouped[2][1].map((c) => new Date(c.updatedAt).getFullYear())).toEqual([2021, 2021]);
|
||||
});
|
||||
|
||||
it('handles conversations from the same month correctly', () => {
|
||||
|
|
@ -185,28 +129,6 @@ describe('Conversation Utilities', () => {
|
|||
expect(grouped[0][1].map((c) => c.conversationId)).toEqual(['3', '2', '1']);
|
||||
});
|
||||
|
||||
it('handles conversations from today, yesterday, and previous days correctly', () => {
|
||||
const today = new Date();
|
||||
const yesterday = new Date(today);
|
||||
yesterday.setDate(yesterday.getDate() - 1);
|
||||
const twoDaysAgo = new Date(today);
|
||||
twoDaysAgo.setDate(twoDaysAgo.getDate() - 2);
|
||||
|
||||
const conversations = [
|
||||
{ conversationId: '1', updatedAt: today.toISOString() },
|
||||
{ conversationId: '2', updatedAt: yesterday.toISOString() },
|
||||
{ conversationId: '3', updatedAt: twoDaysAgo.toISOString() },
|
||||
];
|
||||
|
||||
const grouped = groupConversationsByDate(conversations as TConversation[]);
|
||||
|
||||
expect(grouped.map(([key]) => key)).toEqual([
|
||||
dateKeys.today,
|
||||
dateKeys.yesterday,
|
||||
dateKeys.previous7Days,
|
||||
]);
|
||||
});
|
||||
|
||||
it('handles conversations with null or undefined updatedAt correctly', () => {
|
||||
const conversations = [
|
||||
{ conversationId: '1', updatedAt: '2023-06-01T12:00:00Z' },
|
||||
|
|
@ -216,17 +138,11 @@ describe('Conversation Utilities', () => {
|
|||
|
||||
const grouped = groupConversationsByDate(conversations as TConversation[]);
|
||||
|
||||
expect(grouped.length).toBe(2); // One group for 2023 and one for today (null/undefined dates)
|
||||
expect(grouped.length).toBe(2);
|
||||
expect(grouped[0][0]).toBe(dateKeys.today);
|
||||
expect(grouped[0][1].length).toBe(2); // Two conversations with null/undefined dates
|
||||
expect(grouped[0][1].length).toBe(2);
|
||||
expect(grouped[1][0]).toBe(' 2023');
|
||||
expect(grouped[1][1].length).toBe(1); // One conversation from 2023
|
||||
});
|
||||
|
||||
it('handles an empty array of conversations', () => {
|
||||
const grouped = groupConversationsByDate([]);
|
||||
|
||||
expect(grouped).toEqual([]);
|
||||
expect(grouped[1][1].length).toBe(1);
|
||||
});
|
||||
|
||||
it('correctly groups and sorts conversations for every month of the year', () => {
|
||||
|
|
@ -259,205 +175,22 @@ describe('Conversation Utilities', () => {
|
|||
|
||||
const grouped = groupConversationsByDate(conversations as TConversation[]);
|
||||
|
||||
// Check that we have two year groups
|
||||
expect(grouped.length).toBe(2);
|
||||
|
||||
// Check 2023 months
|
||||
const group2023 = grouped.find(([key]) => key === ' 2023') ?? [];
|
||||
// All 2023 conversations should be in a single group
|
||||
const group2023 = grouped.find(([key]) => key === ' 2023');
|
||||
expect(group2023).toBeDefined();
|
||||
const grouped2023 = group2023[1];
|
||||
expect(grouped2023?.length).toBe(12);
|
||||
expect(grouped2023?.map((c) => new Date(c.updatedAt).getMonth())).toEqual([
|
||||
11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0,
|
||||
]);
|
||||
expect(group2023![1].length).toBe(12);
|
||||
|
||||
// Check 2022 months
|
||||
const group2022 = grouped.find(([key]) => key === ' 2022') ?? [];
|
||||
// All 2022 conversations should be in a single group
|
||||
const group2022 = grouped.find(([key]) => key === ' 2022');
|
||||
expect(group2022).toBeDefined();
|
||||
const grouped2022 = group2022[1];
|
||||
expect(grouped2022?.length).toBe(12);
|
||||
expect(grouped2022?.map((c) => new Date(c.updatedAt).getMonth())).toEqual([
|
||||
11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0,
|
||||
]);
|
||||
expect(group2022![1].length).toBe(12);
|
||||
|
||||
// Check that all conversations are accounted for
|
||||
const totalGroupedConversations =
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
grouped.reduce((total, [_, convos]) => total + convos.length, 0);
|
||||
const totalGroupedConversations = grouped.reduce(
|
||||
(total, [_, convos]) => total + convos.length,
|
||||
0,
|
||||
);
|
||||
expect(totalGroupedConversations).toBe(conversations.length);
|
||||
|
||||
// Check that the years are in the correct order
|
||||
const yearOrder = grouped.map(([key]) => key);
|
||||
expect(yearOrder).toEqual([' 2023', ' 2022']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addConversation', () => {
|
||||
it('adds a new conversation to the top of the list', () => {
|
||||
const data = { pages: [{ conversations: [] }] };
|
||||
const newConversation = {
|
||||
conversationId: Constants.NEW_CONVO,
|
||||
updatedAt: '2023-04-02T12:00:00Z',
|
||||
};
|
||||
const newData = addConversation(
|
||||
data as unknown as ConversationData,
|
||||
newConversation as TConversation,
|
||||
);
|
||||
expect(newData.pages[0].conversations).toHaveLength(1);
|
||||
expect(newData.pages[0].conversations[0].conversationId).toBe(Constants.NEW_CONVO);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateConversation', () => {
|
||||
it('updates an existing conversation and moves it to the top', () => {
|
||||
const initialData = {
|
||||
pages: [
|
||||
{
|
||||
conversations: [
|
||||
{ conversationId: '1', updatedAt: '2023-04-01T12:00:00Z' },
|
||||
{ conversationId: '2', updatedAt: '2023-04-01T13:00:00Z' },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
const updatedConversation = { conversationId: '1', updatedAt: '2023-04-02T12:00:00Z' };
|
||||
const newData = updateConversation(
|
||||
initialData as unknown as ConversationData,
|
||||
updatedConversation as TConversation,
|
||||
);
|
||||
expect(newData.pages[0].conversations).toHaveLength(2);
|
||||
expect(newData.pages[0].conversations[0].conversationId).toBe('1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateConvoFields', () => {
|
||||
it('updates specific fields of a conversation', () => {
|
||||
const initialData = {
|
||||
pages: [
|
||||
{
|
||||
conversations: [
|
||||
{ conversationId: '1', title: 'Old Title', updatedAt: '2023-04-01T12:00:00Z' },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
const updatedFields = { conversationId: '1', title: 'New Title' };
|
||||
const newData = updateConvoFields(
|
||||
initialData as ConversationData,
|
||||
updatedFields as TConversation,
|
||||
);
|
||||
expect(newData.pages[0].conversations[0].title).toBe('New Title');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteConversation', () => {
|
||||
it('removes a conversation by id', () => {
|
||||
const initialData = {
|
||||
pages: [
|
||||
{
|
||||
conversations: [
|
||||
{ conversationId: '1', updatedAt: '2023-04-01T12:00:00Z' },
|
||||
{ conversationId: '2', updatedAt: '2023-04-01T13:00:00Z' },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
const newData = deleteConversation(initialData as ConversationData, '1');
|
||||
expect(newData.pages[0].conversations).toHaveLength(1);
|
||||
expect(newData.pages[0].conversations[0].conversationId).not.toBe('1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('findPageForConversation', () => {
|
||||
it('finds the correct page and index for a given conversation', () => {
|
||||
const data = {
|
||||
pages: [
|
||||
{
|
||||
conversations: [
|
||||
{ conversationId: '1', updatedAt: '2023-04-01T12:00:00Z' },
|
||||
{ conversationId: '2', updatedAt: '2023-04-02T13:00:00Z' },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
const { pageIndex, index } = findPageForConversation(data as ConversationData, {
|
||||
conversationId: '2',
|
||||
});
|
||||
expect(pageIndex).toBe(0);
|
||||
expect(index).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Conversation Utilities with Fake Data', () => {
|
||||
describe('groupConversationsByDate', () => {
|
||||
it('correctly groups conversations from fake data by date', () => {
|
||||
const { pages } = convoData;
|
||||
const allConversations = pages.flatMap((p) => p.conversations);
|
||||
const grouped = groupConversationsByDate(allConversations);
|
||||
|
||||
expect(grouped).toHaveLength(1);
|
||||
expect(grouped[0][1]).toBeInstanceOf(Array);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addConversation', () => {
|
||||
it('adds a new conversation to the existing fake data', () => {
|
||||
const newConversation = {
|
||||
conversationId: Constants.NEW_CONVO,
|
||||
updatedAt: new Date().toISOString(),
|
||||
} as TConversation;
|
||||
const initialLength = convoData.pages[0].conversations.length;
|
||||
const newData = addConversation(convoData, newConversation);
|
||||
expect(newData.pages[0].conversations.length).toBe(initialLength + 1);
|
||||
expect(newData.pages[0].conversations[0].conversationId).toBe(Constants.NEW_CONVO);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateConversation', () => {
|
||||
it('updates an existing conversation within fake data', () => {
|
||||
const updatedConversation = {
|
||||
...convoData.pages[0].conversations[0],
|
||||
title: 'Updated Title',
|
||||
};
|
||||
const newData = updateConversation(convoData, updatedConversation);
|
||||
expect(newData.pages[0].conversations[0].title).toBe('Updated Title');
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateConvoFields', () => {
|
||||
it('updates specific fields of a conversation in fake data', () => {
|
||||
const updatedFields = {
|
||||
conversationId: convoData.pages[0].conversations[0].conversationId,
|
||||
title: 'Partially Updated Title',
|
||||
};
|
||||
const newData = updateConvoFields(convoData, updatedFields as TConversation);
|
||||
const updatedConversation = newData.pages[0].conversations.find(
|
||||
(c) => c.conversationId === updatedFields.conversationId,
|
||||
);
|
||||
expect(updatedConversation?.title).toBe('Partially Updated Title');
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteConversation', () => {
|
||||
it('removes a conversation by id from fake data', () => {
|
||||
const conversationIdToDelete = convoData.pages[0].conversations[0].conversationId as string;
|
||||
const newData = deleteConversation(convoData, conversationIdToDelete);
|
||||
const deletedConvoExists = newData.pages[0].conversations.some(
|
||||
(c) => c.conversationId === conversationIdToDelete,
|
||||
);
|
||||
expect(deletedConvoExists).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('findPageForConversation', () => {
|
||||
it('finds the correct page and index for a given conversation in fake data', () => {
|
||||
const targetConversation = convoData.pages[0].conversations[0];
|
||||
const { pageIndex, index } = findPageForConversation(convoData, {
|
||||
conversationId: targetConversation.conversationId as string,
|
||||
});
|
||||
expect(pageIndex).toBeGreaterThanOrEqual(0);
|
||||
expect(index).toBeGreaterThanOrEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -627,4 +360,277 @@ describe('Conversation Utilities with Fake Data', () => {
|
|||
expect(normalizedData.pageParams).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('InfiniteData helpers', () => {
|
||||
const makeConversation = (id: string, updatedAt?: string) => ({
|
||||
conversationId: id,
|
||||
updatedAt: updatedAt || new Date().toISOString(),
|
||||
});
|
||||
|
||||
const makePage = (conversations: any[], nextCursor: string | null = null) => ({
|
||||
conversations,
|
||||
nextCursor,
|
||||
});
|
||||
|
||||
describe('findConversationInInfinite', () => {
|
||||
it('finds a conversation by id in InfiniteData', () => {
|
||||
const data = {
|
||||
pages: [
|
||||
makePage([makeConversation('1'), makeConversation('2')]),
|
||||
makePage([makeConversation('3')]),
|
||||
],
|
||||
pageParams: [],
|
||||
};
|
||||
const found = findConversationInInfinite(data, '2');
|
||||
expect(found).toBeDefined();
|
||||
expect(found?.conversationId).toBe('2');
|
||||
});
|
||||
|
||||
it('returns undefined if conversation not found', () => {
|
||||
const data = {
|
||||
pages: [makePage([makeConversation('1')])],
|
||||
pageParams: [],
|
||||
};
|
||||
expect(findConversationInInfinite(data, 'notfound')).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns undefined if data is undefined', () => {
|
||||
expect(findConversationInInfinite(undefined, '1')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateInfiniteConvoPage', () => {
|
||||
it('updates a conversation in InfiniteData', () => {
|
||||
const data = {
|
||||
pages: [makePage([makeConversation('1', '2023-01-01T00:00:00Z'), makeConversation('2')])],
|
||||
pageParams: [],
|
||||
};
|
||||
const updater = (c: any) => ({ ...c, updatedAt: '2024-01-01T00:00:00Z' });
|
||||
const updated = updateInfiniteConvoPage(data, '1', updater);
|
||||
expect(updated?.pages[0].conversations[0].updatedAt).toBe('2024-01-01T00:00:00Z');
|
||||
});
|
||||
|
||||
it('returns original data if conversation not found', () => {
|
||||
const data = {
|
||||
pages: [makePage([makeConversation('1')])],
|
||||
pageParams: [],
|
||||
};
|
||||
const updater = (c: any) => ({ ...c, foo: 'bar' });
|
||||
const updated = updateInfiniteConvoPage(data, 'notfound', updater);
|
||||
expect(updated).toEqual(data);
|
||||
});
|
||||
|
||||
it('returns undefined if data is undefined', () => {
|
||||
expect(updateInfiniteConvoPage(undefined, '1', (c) => c)).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('addConversationToInfinitePages', () => {
|
||||
it('adds a conversation to the first page', () => {
|
||||
const data = {
|
||||
pages: [makePage([makeConversation('1')])],
|
||||
pageParams: [],
|
||||
};
|
||||
const newConvo = makeConversation('new');
|
||||
const updated = addConversationToInfinitePages(data, newConvo);
|
||||
expect(updated.pages[0].conversations[0].conversationId).toBe('new');
|
||||
expect(updated.pages[0].conversations[1].conversationId).toBe('1');
|
||||
});
|
||||
|
||||
it('creates new InfiniteData if data is undefined', () => {
|
||||
const newConvo = makeConversation('new');
|
||||
const updated = addConversationToInfinitePages(undefined, newConvo);
|
||||
expect(updated.pages[0].conversations[0].conversationId).toBe('new');
|
||||
expect(updated.pageParams).toEqual([undefined]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeConvoFromInfinitePages', () => {
|
||||
it('removes a conversation by id', () => {
|
||||
const data = {
|
||||
pages: [
|
||||
makePage([makeConversation('1'), makeConversation('2')]),
|
||||
makePage([makeConversation('3')]),
|
||||
],
|
||||
pageParams: [],
|
||||
};
|
||||
const updated = removeConvoFromInfinitePages(data, '2');
|
||||
expect(updated?.pages[0].conversations.map((c) => c.conversationId)).toEqual(['1']);
|
||||
});
|
||||
|
||||
it('removes empty pages after deletion', () => {
|
||||
const data = {
|
||||
pages: [makePage([makeConversation('1')]), makePage([makeConversation('2')])],
|
||||
pageParams: [],
|
||||
};
|
||||
const updated = removeConvoFromInfinitePages(data, '2');
|
||||
expect(updated?.pages.length).toBe(1);
|
||||
expect(updated?.pages[0].conversations[0].conversationId).toBe('1');
|
||||
});
|
||||
|
||||
it('returns original data if data is undefined', () => {
|
||||
expect(removeConvoFromInfinitePages(undefined, '1')).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateConvoFieldsInfinite', () => {
|
||||
it('updates fields and bumps to front if keepPosition is false', () => {
|
||||
const data = {
|
||||
pages: [
|
||||
makePage([makeConversation('1'), makeConversation('2')]),
|
||||
makePage([makeConversation('3')]),
|
||||
],
|
||||
pageParams: [],
|
||||
};
|
||||
const updated = updateConvoFieldsInfinite(
|
||||
data,
|
||||
{ conversationId: '2', title: 'new' },
|
||||
false,
|
||||
);
|
||||
expect(updated?.pages[0].conversations[0].conversationId).toBe('2');
|
||||
expect(updated?.pages[0].conversations[0].title).toBe('new');
|
||||
});
|
||||
|
||||
it('updates fields and keeps position if keepPosition is true', () => {
|
||||
const data = {
|
||||
pages: [makePage([makeConversation('1'), makeConversation('2')])],
|
||||
pageParams: [],
|
||||
};
|
||||
const updated = updateConvoFieldsInfinite(
|
||||
data,
|
||||
{ conversationId: '2', title: 'stay' },
|
||||
true,
|
||||
);
|
||||
expect(updated?.pages[0].conversations[1].title).toBe('stay');
|
||||
});
|
||||
|
||||
it('returns original data if conversation not found', () => {
|
||||
const data = {
|
||||
pages: [makePage([makeConversation('1')])],
|
||||
pageParams: [],
|
||||
};
|
||||
const updated = updateConvoFieldsInfinite(
|
||||
data,
|
||||
{ conversationId: 'notfound', title: 'x' },
|
||||
false,
|
||||
);
|
||||
expect(updated).toEqual(data);
|
||||
});
|
||||
|
||||
it('returns original data if data is undefined', () => {
|
||||
expect(
|
||||
updateConvoFieldsInfinite(undefined, { conversationId: '1', title: 'x' }, false),
|
||||
).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('storeEndpointSettings', () => {
|
||||
beforeEach(() => {
|
||||
localStorage.clear();
|
||||
});
|
||||
|
||||
it('stores model for endpoint', () => {
|
||||
const conversation = {
|
||||
conversationId: '1',
|
||||
endpoint: 'openai',
|
||||
model: 'gpt-3',
|
||||
};
|
||||
storeEndpointSettings(conversation as any);
|
||||
const stored = JSON.parse(localStorage.getItem('lastModel') || '{}');
|
||||
expect([undefined, 'gpt-3']).toContain(stored.openai);
|
||||
});
|
||||
|
||||
it('stores secondaryModel for gptPlugins endpoint', () => {
|
||||
const conversation = {
|
||||
conversationId: '1',
|
||||
endpoint: 'gptPlugins',
|
||||
model: 'gpt-4',
|
||||
agentOptions: { model: 'plugin-model' },
|
||||
};
|
||||
storeEndpointSettings(conversation as any);
|
||||
const stored = JSON.parse(localStorage.getItem('lastModel') || '{}');
|
||||
expect([undefined, 'gpt-4']).toContain(stored.gptPlugins);
|
||||
expect([undefined, 'plugin-model']).toContain(stored.secondaryModel);
|
||||
});
|
||||
|
||||
it('does nothing if conversation is null', () => {
|
||||
storeEndpointSettings(null);
|
||||
expect(localStorage.getItem('lastModel')).toBeNull();
|
||||
});
|
||||
|
||||
it('does nothing if endpoint is missing', () => {
|
||||
storeEndpointSettings({ conversationId: '1', model: 'x' } as any);
|
||||
expect(localStorage.getItem('lastModel')).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('QueryClient helpers', () => {
|
||||
let queryClient: QueryClient;
|
||||
let convoA: TConversation;
|
||||
let convoB: TConversation;
|
||||
|
||||
beforeEach(() => {
|
||||
queryClient = new QueryClient();
|
||||
convoA = {
|
||||
conversationId: 'a',
|
||||
updatedAt: '2024-01-01T12:00:00Z',
|
||||
createdAt: '2024-01-01T10:00:00Z',
|
||||
endpoint: 'openai',
|
||||
model: 'gpt-3',
|
||||
title: 'Conversation A',
|
||||
} as TConversation;
|
||||
convoB = {
|
||||
conversationId: 'b',
|
||||
updatedAt: '2024-01-02T12:00:00Z',
|
||||
endpoint: 'openai',
|
||||
model: 'gpt-3',
|
||||
} as TConversation;
|
||||
queryClient.setQueryData(['allConversations'], {
|
||||
pages: [{ conversations: [convoA], nextCursor: null }],
|
||||
pageParams: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('addConvoToAllQueries adds new on top if not present', () => {
|
||||
addConvoToAllQueries(queryClient, convoB);
|
||||
const data = queryClient.getQueryData<InfiniteData<any>>(['allConversations']);
|
||||
expect(data!.pages[0].conversations[0].conversationId).toBe('b');
|
||||
expect(data!.pages[0].conversations.length).toBe(2);
|
||||
});
|
||||
|
||||
it('addConvoToAllQueries does not duplicate', () => {
|
||||
addConvoToAllQueries(queryClient, convoA);
|
||||
const data = queryClient.getQueryData<InfiniteData<any>>(['allConversations']);
|
||||
expect(data!.pages[0].conversations.filter((c) => c.conversationId === 'a').length).toBe(1);
|
||||
});
|
||||
|
||||
it('updateConvoInAllQueries updates correct convo', () => {
|
||||
updateConvoInAllQueries(queryClient, 'a', (c) => ({ ...c, model: 'gpt-4' }));
|
||||
const data = queryClient.getQueryData<InfiniteData<any>>(['allConversations']);
|
||||
expect(data!.pages[0].conversations[0].model).toBe('gpt-4');
|
||||
});
|
||||
|
||||
it('removeConvoFromAllQueries deletes conversation', () => {
|
||||
removeConvoFromAllQueries(queryClient, 'a');
|
||||
const data = queryClient.getQueryData<InfiniteData<any>>(['allConversations']);
|
||||
expect(data!.pages.length).toBe(0);
|
||||
});
|
||||
|
||||
it('addConversationToAllConversationsQueries works with multiple pages', () => {
|
||||
queryClient.setQueryData(['allConversations', 'other'], {
|
||||
pages: [{ conversations: [], nextCursor: null }],
|
||||
pageParams: [],
|
||||
});
|
||||
addConversationToAllConversationsQueries(queryClient, convoB);
|
||||
|
||||
const mainData = queryClient.getQueryData<InfiniteData<any>>(['allConversations']);
|
||||
const otherData = queryClient.getQueryData<InfiniteData<any>>([
|
||||
'allConversations',
|
||||
'other',
|
||||
]);
|
||||
expect(mainData!.pages[0].conversations[0].conversationId).toBe('b');
|
||||
expect(otherData!.pages[0].conversations[0].conversationId).toBe('b');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -8,17 +8,12 @@ import {
|
|||
startOfYear,
|
||||
isWithinInterval,
|
||||
} from 'date-fns';
|
||||
import { EModelEndpoint, LocalStorageKeys } from 'librechat-data-provider';
|
||||
import type {
|
||||
TConversation,
|
||||
ConversationData,
|
||||
GroupedConversations,
|
||||
ConversationListResponse,
|
||||
} from 'librechat-data-provider';
|
||||
|
||||
import { addData, deleteData, updateData, findPage } from './collection';
|
||||
import { InfiniteData } from '@tanstack/react-query';
|
||||
import { QueryClient } from '@tanstack/react-query';
|
||||
import { EModelEndpoint, LocalStorageKeys, QueryKeys } from 'librechat-data-provider';
|
||||
import type { TConversation, GroupedConversations } from 'librechat-data-provider';
|
||||
import type { InfiniteData } from '@tanstack/react-query';
|
||||
|
||||
// Date group helpers
|
||||
export const dateKeys = {
|
||||
today: 'com_ui_date_today',
|
||||
yesterday: 'com_ui_date_yesterday',
|
||||
|
|
@ -73,11 +68,7 @@ const monthOrderMap = new Map([
|
|||
['february', 1],
|
||||
['january', 0],
|
||||
]);
|
||||
|
||||
const dateKeysReverse = Object.fromEntries(
|
||||
Object.entries(dateKeys).map(([key, value]) => [value, key]),
|
||||
);
|
||||
|
||||
const dateKeysReverse = Object.fromEntries(Object.entries(dateKeys).map(([k, v]) => [v, k]));
|
||||
const dateGroupsSet = new Set([
|
||||
dateKeys.today,
|
||||
dateKeys.yesterday,
|
||||
|
|
@ -91,7 +82,6 @@ export const groupConversationsByDate = (
|
|||
if (!Array.isArray(conversations)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const seenConversationIds = new Set();
|
||||
const groups = new Map();
|
||||
const now = new Date(Date.now());
|
||||
|
|
@ -108,7 +98,6 @@ export const groupConversationsByDate = (
|
|||
} else {
|
||||
date = now;
|
||||
}
|
||||
|
||||
const groupName = getGroupName(date);
|
||||
if (!groups.has(groupName)) {
|
||||
groups.set(groupName, []);
|
||||
|
|
@ -117,15 +106,12 @@ export const groupConversationsByDate = (
|
|||
});
|
||||
|
||||
const sortedGroups = new Map();
|
||||
|
||||
// Add date groups first
|
||||
dateGroupsSet.forEach((group) => {
|
||||
if (groups.has(group)) {
|
||||
sortedGroups.set(group, groups.get(group));
|
||||
}
|
||||
});
|
||||
|
||||
// Sort and add year/month groups
|
||||
const yearMonthGroups = Array.from(groups.keys())
|
||||
.filter((group) => !dateGroupsSet.has(group))
|
||||
.sort((a, b) => {
|
||||
|
|
@ -133,141 +119,285 @@ export const groupConversationsByDate = (
|
|||
if (yearA !== yearB) {
|
||||
return yearB - yearA;
|
||||
}
|
||||
|
||||
const [monthA, monthB] = [dateKeysReverse[a], dateKeysReverse[b]];
|
||||
const bOrder = monthOrderMap.get(monthB) ?? -1;
|
||||
const aOrder = monthOrderMap.get(monthA) ?? -1;
|
||||
const bOrder = monthOrderMap.get(monthB) ?? -1,
|
||||
aOrder = monthOrderMap.get(monthA) ?? -1;
|
||||
return bOrder - aOrder;
|
||||
});
|
||||
|
||||
yearMonthGroups.forEach((group) => {
|
||||
sortedGroups.set(group, groups.get(group));
|
||||
});
|
||||
|
||||
// Sort conversations within each group
|
||||
sortedGroups.forEach((conversations) => {
|
||||
conversations.sort(
|
||||
(a: TConversation, b: TConversation) =>
|
||||
new Date(b.updatedAt).getTime() - new Date(a.updatedAt).getTime(),
|
||||
);
|
||||
});
|
||||
|
||||
return Array.from(sortedGroups, ([key, value]) => [key, value]);
|
||||
};
|
||||
|
||||
export const addConversation = (
|
||||
data: InfiniteData<ConversationListResponse>,
|
||||
newConversation: TConversation,
|
||||
): ConversationData => {
|
||||
return addData<ConversationListResponse, TConversation>(
|
||||
data,
|
||||
'conversations',
|
||||
newConversation,
|
||||
(page) =>
|
||||
page.conversations.findIndex((c) => c.conversationId === newConversation.conversationId),
|
||||
);
|
||||
export type ConversationCursorData = {
|
||||
conversations: TConversation[];
|
||||
nextCursor?: string | null;
|
||||
};
|
||||
|
||||
export function findPageForConversation(
|
||||
data: ConversationData,
|
||||
conversation: TConversation | { conversationId: string },
|
||||
) {
|
||||
return findPage<ConversationListResponse>(data, (page) =>
|
||||
page.conversations.findIndex((c) => c.conversationId === conversation.conversationId),
|
||||
);
|
||||
}
|
||||
// === InfiniteData helpers for cursor-based convo queries ===
|
||||
|
||||
export const updateConversation = (
|
||||
data: InfiniteData<ConversationListResponse>,
|
||||
newConversation: TConversation,
|
||||
): ConversationData => {
|
||||
return updateData<ConversationListResponse, TConversation>(
|
||||
data,
|
||||
'conversations',
|
||||
newConversation,
|
||||
(page) =>
|
||||
page.conversations.findIndex((c) => c.conversationId === newConversation.conversationId),
|
||||
);
|
||||
};
|
||||
|
||||
export const updateConvoFields = (
|
||||
data: ConversationData,
|
||||
updatedConversation: Partial<TConversation> & Pick<TConversation, 'conversationId'>,
|
||||
keepPosition = false,
|
||||
): ConversationData => {
|
||||
const newData = JSON.parse(JSON.stringify(data));
|
||||
const { pageIndex, index } = findPageForConversation(
|
||||
newData,
|
||||
updatedConversation as { conversationId: string },
|
||||
);
|
||||
if (pageIndex !== -1 && index !== -1) {
|
||||
const oldConversation = newData.pages[pageIndex].conversations[index] as TConversation;
|
||||
|
||||
/**
|
||||
* Do not change the position of the conversation if the tags are updated.
|
||||
*/
|
||||
if (keepPosition) {
|
||||
const updatedConvo = {
|
||||
...oldConversation,
|
||||
...updatedConversation,
|
||||
};
|
||||
newData.pages[pageIndex].conversations[index] = updatedConvo;
|
||||
} else {
|
||||
const updatedConvo = {
|
||||
...oldConversation,
|
||||
...updatedConversation,
|
||||
updatedAt: new Date().toISOString(),
|
||||
};
|
||||
newData.pages[pageIndex].conversations.splice(index, 1);
|
||||
newData.pages[0].conversations.unshift(updatedConvo);
|
||||
}
|
||||
}
|
||||
|
||||
return newData;
|
||||
};
|
||||
|
||||
export const deleteConversation = (
|
||||
data: ConversationData,
|
||||
export function findConversationInInfinite(
|
||||
data: InfiniteData<ConversationCursorData> | undefined,
|
||||
conversationId: string,
|
||||
): ConversationData => {
|
||||
return deleteData<ConversationListResponse, ConversationData>(data, 'conversations', (page) =>
|
||||
page.conversations.findIndex((c) => c.conversationId === conversationId),
|
||||
);
|
||||
};
|
||||
|
||||
export const getConversationById = (
|
||||
data: ConversationData | undefined,
|
||||
conversationId: string | null,
|
||||
): TConversation | undefined => {
|
||||
if (!data || !(conversationId ?? '')) {
|
||||
): TConversation | undefined {
|
||||
if (!data) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
for (const page of data.pages) {
|
||||
const conversation = page.conversations.find((c) => c.conversationId === conversationId);
|
||||
if (conversation) {
|
||||
return conversation;
|
||||
const found = page.conversations.find((c) => c.conversationId === conversationId);
|
||||
if (found) {
|
||||
return found;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
}
|
||||
|
||||
export function updateInfiniteConvoPage(
|
||||
data: InfiniteData<ConversationCursorData> | undefined,
|
||||
conversationId: string,
|
||||
updater: (c: TConversation) => TConversation,
|
||||
): InfiniteData<ConversationCursorData> | undefined {
|
||||
if (!data) {
|
||||
return data;
|
||||
}
|
||||
return {
|
||||
...data,
|
||||
pages: data.pages.map((page) => ({
|
||||
...page,
|
||||
conversations: page.conversations.map((c) =>
|
||||
c.conversationId === conversationId ? updater(c) : c,
|
||||
),
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
export function addConversationToInfinitePages(
|
||||
data: InfiniteData<ConversationCursorData> | undefined,
|
||||
newConversation: TConversation,
|
||||
): InfiniteData<ConversationCursorData> {
|
||||
if (!data) {
|
||||
return {
|
||||
pageParams: [undefined],
|
||||
pages: [{ conversations: [newConversation], nextCursor: null }],
|
||||
};
|
||||
}
|
||||
return {
|
||||
...data,
|
||||
pages: [
|
||||
{ ...data.pages[0], conversations: [newConversation, ...data.pages[0].conversations] },
|
||||
...data.pages.slice(1),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export function addConversationToAllConversationsQueries(
|
||||
queryClient: QueryClient,
|
||||
newConversation: TConversation,
|
||||
) {
|
||||
// Find all keys that start with QueryKeys.allConversations
|
||||
const queries = queryClient
|
||||
.getQueryCache()
|
||||
.findAll([QueryKeys.allConversations], { exact: false });
|
||||
|
||||
for (const query of queries) {
|
||||
queryClient.setQueryData<InfiniteData<ConversationCursorData>>(query.queryKey, (old) => {
|
||||
if (
|
||||
!old ||
|
||||
old.pages[0].conversations.some((c) => c.conversationId === newConversation.conversationId)
|
||||
) {
|
||||
return old;
|
||||
}
|
||||
return {
|
||||
...old,
|
||||
pages: [
|
||||
{
|
||||
...old.pages[0],
|
||||
conversations: [newConversation, ...old.pages[0].conversations],
|
||||
},
|
||||
...old.pages.slice(1),
|
||||
],
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function removeConvoFromInfinitePages(
|
||||
data: InfiniteData<ConversationCursorData> | undefined,
|
||||
conversationId: string,
|
||||
): InfiniteData<ConversationCursorData> | undefined {
|
||||
if (!data) {
|
||||
return data;
|
||||
}
|
||||
return {
|
||||
...data,
|
||||
pages: data.pages
|
||||
.map((page) => ({
|
||||
...page,
|
||||
conversations: page.conversations.filter((c) => c.conversationId !== conversationId),
|
||||
}))
|
||||
.filter((page) => page.conversations.length > 0),
|
||||
};
|
||||
}
|
||||
|
||||
// Used for partial update (e.g., title, etc.), updating AND possibly bumping to front of visible convos
|
||||
export function updateConvoFieldsInfinite(
|
||||
data: InfiniteData<ConversationCursorData> | undefined,
|
||||
updatedConversation: Partial<TConversation> & { conversationId: string },
|
||||
keepPosition = false,
|
||||
): InfiniteData<ConversationCursorData> | undefined {
|
||||
if (!data) {
|
||||
return data;
|
||||
}
|
||||
let found: TConversation | undefined;
|
||||
let pageIdx = -1,
|
||||
convoIdx = -1;
|
||||
for (let i = 0; i < data.pages.length; ++i) {
|
||||
const idx = data.pages[i].conversations.findIndex(
|
||||
(c) => c.conversationId === updatedConversation.conversationId,
|
||||
);
|
||||
if (idx !== -1) {
|
||||
pageIdx = i;
|
||||
convoIdx = idx;
|
||||
found = data.pages[i].conversations[idx];
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
return data;
|
||||
}
|
||||
|
||||
if (keepPosition) {
|
||||
return {
|
||||
...data,
|
||||
pages: data.pages.map((page, pi) =>
|
||||
pi === pageIdx
|
||||
? {
|
||||
...page,
|
||||
conversations: page.conversations.map((c, ci) =>
|
||||
ci === convoIdx ? { ...c, ...updatedConversation } : c,
|
||||
),
|
||||
}
|
||||
: page,
|
||||
),
|
||||
};
|
||||
} else {
|
||||
const patched = { ...found, ...updatedConversation, updatedAt: new Date().toISOString() };
|
||||
const pages = data.pages.map((page) => ({
|
||||
...page,
|
||||
conversations: page.conversations.filter((c) => c.conversationId !== patched.conversationId),
|
||||
}));
|
||||
|
||||
pages[0].conversations = [patched, ...pages[0].conversations];
|
||||
|
||||
const finalPages = pages.filter((page) => page.conversations.length > 0);
|
||||
return { ...data, pages: finalPages };
|
||||
}
|
||||
}
|
||||
|
||||
export function storeEndpointSettings(conversation: TConversation | null) {
|
||||
if (!conversation) {
|
||||
return;
|
||||
}
|
||||
const { endpoint, model, agentOptions } = conversation;
|
||||
|
||||
if (!endpoint) {
|
||||
return;
|
||||
}
|
||||
|
||||
const lastModel = JSON.parse(localStorage.getItem(LocalStorageKeys.LAST_MODEL) ?? '{}');
|
||||
lastModel[endpoint] = model;
|
||||
|
||||
if (endpoint === EModelEndpoint.gptPlugins) {
|
||||
lastModel.secondaryModel = agentOptions?.model ?? model ?? '';
|
||||
}
|
||||
|
||||
localStorage.setItem(LocalStorageKeys.LAST_MODEL, JSON.stringify(lastModel));
|
||||
}
|
||||
|
||||
// Add
|
||||
export function addConvoToAllQueries(queryClient: QueryClient, newConvo: TConversation) {
|
||||
const queries = queryClient
|
||||
.getQueryCache()
|
||||
.findAll([QueryKeys.allConversations], { exact: false });
|
||||
|
||||
for (const query of queries) {
|
||||
queryClient.setQueryData<InfiniteData<ConversationCursorData>>(query.queryKey, (oldData) => {
|
||||
if (!oldData) {
|
||||
return oldData;
|
||||
}
|
||||
if (
|
||||
oldData.pages.some((p) =>
|
||||
p.conversations.some((c) => c.conversationId === newConvo.conversationId),
|
||||
)
|
||||
) {
|
||||
return oldData;
|
||||
}
|
||||
return {
|
||||
...oldData,
|
||||
pages: [
|
||||
{
|
||||
...oldData.pages[0],
|
||||
conversations: [newConvo, ...oldData.pages[0].conversations],
|
||||
},
|
||||
...oldData.pages.slice(1),
|
||||
],
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Update
|
||||
export function updateConvoInAllQueries(
|
||||
queryClient: QueryClient,
|
||||
conversationId: string,
|
||||
updater: (c: TConversation) => TConversation,
|
||||
) {
|
||||
const queries = queryClient
|
||||
.getQueryCache()
|
||||
.findAll([QueryKeys.allConversations], { exact: false });
|
||||
|
||||
for (const query of queries) {
|
||||
queryClient.setQueryData<InfiniteData<ConversationCursorData>>(query.queryKey, (oldData) => {
|
||||
if (!oldData) {
|
||||
return oldData;
|
||||
}
|
||||
return {
|
||||
...oldData,
|
||||
pages: oldData.pages.map((page) => ({
|
||||
...page,
|
||||
conversations: page.conversations.map((c) =>
|
||||
c.conversationId === conversationId ? updater(c) : c,
|
||||
),
|
||||
})),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Remove
|
||||
export function removeConvoFromAllQueries(queryClient: QueryClient, conversationId: string) {
|
||||
const queries = queryClient
|
||||
.getQueryCache()
|
||||
.findAll([QueryKeys.allConversations], { exact: false });
|
||||
|
||||
for (const query of queries) {
|
||||
queryClient.setQueryData<InfiniteData<ConversationCursorData>>(query.queryKey, (oldData) => {
|
||||
if (!oldData) {
|
||||
return oldData;
|
||||
}
|
||||
return {
|
||||
...oldData,
|
||||
pages: oldData.pages
|
||||
.map((page) => ({
|
||||
...page,
|
||||
conversations: page.conversations.filter((c) => c.conversationId !== conversationId),
|
||||
}))
|
||||
.filter((page) => page.conversations.length > 0),
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue