️ refactor: Optimize Rendering Performance for Icons, Conversations (#5234)

* refactor: HoverButtons and Fork components to use explicit props

* refactor: improve typing for Fork Component

* fix: memoize SpecIcon to avoid unnecessary re-renders

* feat: introduce URLIcon component and update SpecIcon for improved icon handling

* WIP: optimizing icons

* refactor: simplify modelLabel assignment in Message components

* refactor: memoize ConvoOptions component to optimize rendering performance
This commit is contained in:
Danny Avila 2025-01-09 15:40:10 -05:00 committed by GitHub
parent 687ab32bd3
commit 0f95604a67
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 206 additions and 171 deletions

View file

@ -220,6 +220,7 @@ export default function useChatFunctions({
isCreatedByUser: false,
isEdited: isEditOrContinue,
iconURL: convo.iconURL,
model: convo.model,
error: false,
};
@ -254,6 +255,7 @@ export default function useChatFunctions({
currentMessages = currentMessages.filter((msg) => msg.messageId !== responseMessageId);
}
logger.log('message_state', initialResponse);
const submission: TSubmission = {
conversation: {
...conversation,

View file

@ -54,11 +54,11 @@ export default function useMessageProcess({ message }: { message?: TMessage | nu
latestText.current &&
convoId !== latestText.current.split(Constants.COMMON_DIVIDER)[2])
) {
logger.log('[useMessageProcess] Setting latest message: ', logInfo);
logger.log('latest_message', '[useMessageProcess] Setting latest message; logInfo:', logInfo);
latestText.current = textKey;
setLatestMessage({ ...message });
} else {
logger.log('No change in latest message', logInfo);
logger.log('latest_message', 'No change in latest message; logInfo', logInfo);
}
}, [hasNoChildren, message, setLatestMessage, conversation?.conversationId]);

View file

@ -1,28 +1,28 @@
import type { TMessage } from 'librechat-data-provider';
import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider';
type TUseGenerations = {
error?: boolean;
endpoint?: string;
message?: TMessage;
isSubmitting: boolean;
messageId?: string;
isEditing?: boolean;
latestMessage: TMessage | null;
isSubmitting: boolean;
searchResult?: boolean;
finish_reason?: string;
latestMessageId?: string;
isCreatedByUser?: boolean;
};
export default function useGenerationsByLatest({
error = false,
endpoint,
message,
isSubmitting,
messageId,
isEditing = false,
latestMessage,
isSubmitting,
searchResult = false,
finish_reason = '',
latestMessageId,
isCreatedByUser = false,
}: TUseGenerations) {
const {
messageId,
searchResult = false,
error = false,
finish_reason = '',
isCreatedByUser = false,
} = message ?? {};
const isEditableEndpoint = Boolean(
[
EModelEndpoint.openAI,
@ -37,7 +37,7 @@ export default function useGenerationsByLatest({
);
const continueSupported =
latestMessage?.messageId === messageId &&
latestMessageId === messageId &&
finish_reason &&
finish_reason !== 'stop' &&
!isEditing &&