💽 refactor(client): Optimize ModelsConfig Query Cache (#2330)

* refactor(client): remove double caching of models via recoil to rely exclusively on react-query

* chore(useConversation): add modelsQuery.data dep to callback
This commit is contained in:
Danny Avila 2024-04-05 17:08:37 -04:00 committed by GitHub
parent fb80af05be
commit f6a84887e1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 76 additions and 94 deletions

View file

@ -1,5 +1,6 @@
import { useRecoilValue } from 'recoil'; import { useRecoilValue } from 'recoil';
import { SettingsViews } from 'librechat-data-provider'; import { SettingsViews } from 'librechat-data-provider';
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
import type { TSettingsProps } from '~/common'; import type { TSettingsProps } from '~/common';
import { getSettings } from './Settings'; import { getSettings } from './Settings';
import { cn } from '~/utils'; import { cn } from '~/utils';
@ -12,7 +13,7 @@ export default function Settings({
className = '', className = '',
isMultiChat = false, isMultiChat = false,
}: TSettingsProps & { isMultiChat?: boolean }) { }: TSettingsProps & { isMultiChat?: boolean }) {
const modelsConfig = useRecoilValue(store.modelsConfig); const modelsQuery = useGetModelsQuery();
const currentSettingsView = useRecoilValue(store.currentSettingsView); const currentSettingsView = useRecoilValue(store.currentSettingsView);
if (!conversation?.endpoint || currentSettingsView !== SettingsViews.default) { if (!conversation?.endpoint || currentSettingsView !== SettingsViews.default) {
return null; return null;
@ -20,7 +21,7 @@ export default function Settings({
const { settings, multiViewSettings } = getSettings(isMultiChat); const { settings, multiViewSettings } = getSettings(isMultiChat);
const { endpoint: _endpoint, endpointType } = conversation; const { endpoint: _endpoint, endpointType } = conversation;
const models = modelsConfig?.[_endpoint] ?? []; const models = modelsQuery?.data?.[_endpoint] ?? [];
const endpoint = endpointType ?? _endpoint; const endpoint = endpointType ?? _endpoint;
const OptionComponent = settings[endpoint]; const OptionComponent = settings[endpoint];

View file

@ -1,8 +1,7 @@
import { useRecoilValue } from 'recoil';
import type { TConversation } from 'librechat-data-provider'; import type { TConversation } from 'librechat-data-provider';
import type { TSetOption } from '~/common'; import type { TSetOption } from '~/common';
import { options, multiChatOptions } from './options'; import { options, multiChatOptions } from './options';
import store from '~/store'; import { useGetModelsQuery } from 'librechat-data-provider/react-query';
type TGoogleProps = { type TGoogleProps = {
showExamples: boolean; showExamples: boolean;
@ -23,13 +22,14 @@ export default function ModelSelect({
isMultiChat = false, isMultiChat = false,
showAbove = true, showAbove = true,
}: TSelectProps) { }: TSelectProps) {
const modelsConfig = useRecoilValue(store.modelsConfig); const modelsQuery = useGetModelsQuery();
if (!conversation?.endpoint) { if (!conversation?.endpoint) {
return null; return null;
} }
const { endpoint: _endpoint, endpointType } = conversation; const { endpoint: _endpoint, endpointType } = conversation;
const models = modelsConfig?.[_endpoint] ?? []; const models = modelsQuery?.data?.[_endpoint] ?? [];
const endpoint = endpointType ?? _endpoint; const endpoint = endpointType ?? _endpoint;
const OptionComponent = isMultiChat ? multiChatOptions[endpoint] : options[endpoint]; const OptionComponent = isMultiChat ? multiChatOptions[endpoint] : options[endpoint];

View file

@ -14,7 +14,6 @@ type TempOverrideType = Record<string, unknown> & {
}; };
export default function useConfigOverride() { export default function useConfigOverride() {
const setModelsConfig = useSetRecoilState(store.modelsConfig);
const setEndpointsQueryEnabled = useSetRecoilState(store.endpointsQueryEnabled); const setEndpointsQueryEnabled = useSetRecoilState(store.endpointsQueryEnabled);
const overrideQuery = useGetEndpointsConfigOverride({ const overrideQuery = useGetEndpointsConfigOverride({
staleTime: Infinity, staleTime: Infinity,
@ -33,10 +32,9 @@ export default function useConfigOverride() {
if (modelsConfig) { if (modelsConfig) {
await queryClient.cancelQueries([QueryKeys.models]); await queryClient.cancelQueries([QueryKeys.models]);
queryClient.setQueryData([QueryKeys.models], modelsConfig); queryClient.setQueryData([QueryKeys.models], modelsConfig);
setModelsConfig(modelsConfig);
} }
}, },
[queryClient, setEndpointsQueryEnabled, setModelsConfig], [queryClient, setEndpointsQueryEnabled],
); );
useEffect(() => { useEffect(() => {

View file

@ -1,6 +1,6 @@
import { useCallback } from 'react'; import { useCallback } from 'react';
import { useSetRecoilState, useResetRecoilState, useRecoilCallback } from 'recoil'; import { useSetRecoilState, useResetRecoilState, useRecoilCallback } from 'recoil';
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query'; import { useGetEndpointsQuery, useGetModelsQuery } from 'librechat-data-provider/react-query';
import type { import type {
TConversation, TConversation,
TMessagesAtom, TMessagesAtom,
@ -16,20 +16,21 @@ import store from '~/store';
const useConversation = () => { const useConversation = () => {
const navigate = useOriginNavigate(); const navigate = useOriginNavigate();
const setConversation = useSetRecoilState(store.conversation); const setConversation = useSetRecoilState(store.conversation);
const resetLatestMessage = useResetRecoilState(store.latestMessage);
const setMessages = useSetRecoilState<TMessagesAtom>(store.messages); const setMessages = useSetRecoilState<TMessagesAtom>(store.messages);
const setSubmission = useSetRecoilState<TSubmission | null>(store.submission); const setSubmission = useSetRecoilState<TSubmission | null>(store.submission);
const resetLatestMessage = useResetRecoilState(store.latestMessage);
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery(); const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
const modelsQuery = useGetModelsQuery();
const switchToConversation = useRecoilCallback( const switchToConversation = useRecoilCallback(
({ snapshot }) => () =>
async ( async (
conversation: TConversation, conversation: TConversation,
messages: TMessagesAtom = null, messages: TMessagesAtom = null,
preset: TPreset | null = null, preset: TPreset | null = null,
modelsData?: TModelsConfig, modelsData?: TModelsConfig,
) => { ) => {
const modelsConfig = modelsData ?? snapshot.getLoadable(store.modelsConfig).contents; const modelsConfig = modelsData ?? modelsQuery.data;
const { endpoint = null } = conversation; const { endpoint = null } = conversation;
if (endpoint === null) { if (endpoint === null) {
@ -61,7 +62,7 @@ const useConversation = () => {
navigate('new'); navigate('new');
} }
}, },
[endpointsConfig], [endpointsConfig, modelsQuery.data],
); );
const newConversation = useCallback( const newConversation = useCallback(

View file

@ -1,6 +1,6 @@
import { useCallback } from 'react'; import { useCallback } from 'react';
import { EModelEndpoint, FileSources, defaultOrderQuery } from 'librechat-data-provider'; import { EModelEndpoint, FileSources, defaultOrderQuery } from 'librechat-data-provider';
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query'; import { useGetEndpointsQuery, useGetModelsQuery } from 'librechat-data-provider/react-query';
import { import {
useSetRecoilState, useSetRecoilState,
useResetRecoilState, useResetRecoilState,
@ -35,6 +35,7 @@ const useNewConvo = (index = 0) => {
const setSubmission = useSetRecoilState<TSubmission | null>(store.submissionByIndex(index)); const setSubmission = useSetRecoilState<TSubmission | null>(store.submissionByIndex(index));
const resetLatestMessage = useResetRecoilState(store.latestMessageFamily(index)); const resetLatestMessage = useResetRecoilState(store.latestMessageFamily(index));
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery(); const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
const modelsQuery = useGetModelsQuery();
const { data: assistants = [] } = useListAssistantsQuery(defaultOrderQuery, { const { data: assistants = [] } = useListAssistantsQuery(defaultOrderQuery, {
select: (res) => select: (res) =>
@ -51,7 +52,7 @@ const useNewConvo = (index = 0) => {
}); });
const switchToConversation = useRecoilCallback( const switchToConversation = useRecoilCallback(
({ snapshot }) => () =>
async ( async (
conversation: TConversation, conversation: TConversation,
preset: Partial<TPreset> | null = null, preset: Partial<TPreset> | null = null,
@ -59,7 +60,7 @@ const useNewConvo = (index = 0) => {
buildDefault?: boolean, buildDefault?: boolean,
keepLatestMessage?: boolean, keepLatestMessage?: boolean,
) => { ) => {
const modelsConfig = modelsData ?? snapshot.getLoadable(store.modelsConfig).contents; const modelsConfig = modelsData ?? modelsQuery.data;
const { endpoint = null } = conversation; const { endpoint = null } = conversation;
const buildDefaultConversation = endpoint === null || buildDefault; const buildDefaultConversation = endpoint === null || buildDefault;
const activePreset = const activePreset =
@ -137,7 +138,7 @@ const useNewConvo = (index = 0) => {
navigate('new'); navigate('new');
} }
}, },
[endpointsConfig, defaultPreset, assistants], [endpointsConfig, defaultPreset, assistants, modelsQuery.data],
); );
const newConversation = useCallback( const newConversation = useCallback(

View file

@ -1,4 +1,3 @@
import { useRecoilValue } from 'recoil';
import { useEffect, useRef } from 'react'; import { useEffect, useRef } from 'react';
import { useParams } from 'react-router-dom'; import { useParams } from 'react-router-dom';
import { import {
@ -23,16 +22,18 @@ export default function ChatRoute() {
const { data: startupConfig } = useGetStartupConfig(); const { data: startupConfig } = useGetStartupConfig();
const { conversation } = store.useCreateConversationAtom(index); const { conversation } = store.useCreateConversationAtom(index);
const modelsQueryEnabled = useRecoilValue(store.modelsQueryEnabled);
const { isAuthenticated } = useAuthRedirect(); const { isAuthenticated } = useAuthRedirect();
const { newConversation } = useNewConvo(); const { newConversation } = useNewConvo();
const hasSetConversation = useRef(false); const hasSetConversation = useRef(false);
const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated && modelsQueryEnabled }); const modelsQuery = useGetModelsQuery({
enabled: isAuthenticated,
refetchOnMount: 'always',
});
const initialConvoQuery = useGetConvoIdQuery(conversationId ?? '', { const initialConvoQuery = useGetConvoIdQuery(conversationId ?? '', {
enabled: isAuthenticated && conversationId !== 'new', enabled: isAuthenticated && conversationId !== 'new',
}); });
const endpointsQuery = useGetEndpointsQuery({ enabled: isAuthenticated && modelsQueryEnabled }); const endpointsQuery = useGetEndpointsQuery({ enabled: isAuthenticated });
const { data: assistants = null } = useListAssistantsQuery(defaultOrderQuery, { const { data: assistants = null } = useListAssistantsQuery(defaultOrderQuery, {
select: (res) => select: (res) =>
res.data.map(({ id, name, metadata, model }) => ({ id, name, metadata, model })), res.data.map(({ id, name, metadata, model }) => ({ id, name, metadata, model })),
@ -50,6 +51,7 @@ export default function ChatRoute() {
conversationId === 'new' && conversationId === 'new' &&
endpointsQuery.data && endpointsQuery.data &&
modelsQuery.data && modelsQuery.data &&
!modelsQuery.data?.initial &&
!hasSetConversation.current !hasSetConversation.current
) { ) {
newConversation({ modelsData: modelsQuery.data }); newConversation({ modelsData: modelsQuery.data });
@ -58,6 +60,7 @@ export default function ChatRoute() {
initialConvoQuery.data && initialConvoQuery.data &&
endpointsQuery.data && endpointsQuery.data &&
modelsQuery.data && modelsQuery.data &&
!modelsQuery.data?.initial &&
!hasSetConversation.current !hasSetConversation.current
) { ) {
newConversation({ newConversation({
@ -68,10 +71,15 @@ export default function ChatRoute() {
keepLatestMessage: true, keepLatestMessage: true,
}); });
hasSetConversation.current = !!assistants; hasSetConversation.current = !!assistants;
} else if (!hasSetConversation.current && conversationId === 'new' && assistants) { } else if (
!hasSetConversation.current &&
!modelsQuery.data?.initial &&
conversationId === 'new' &&
assistants
) {
newConversation({ modelsData: modelsQuery.data }); newConversation({ modelsData: modelsQuery.data });
hasSetConversation.current = true; hasSetConversation.current = true;
} else if (!hasSetConversation.current && assistants) { } else if (!hasSetConversation.current && !modelsQuery.data?.initial && assistants) {
newConversation({ newConversation({
template: initialConvoQuery.data, template: initialConvoQuery.data,
preset: initialConvoQuery.data as TPreset, preset: initialConvoQuery.data as TPreset,
@ -80,8 +88,15 @@ export default function ChatRoute() {
}); });
hasSetConversation.current = true; hasSetConversation.current = true;
} }
/* Creates infinite render if all dependencies included */
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [initialConvoQuery.data, modelsQuery.data, endpointsQuery.data, assistants]); }, [
initialConvoQuery.data,
modelsQuery.data,
endpointsQuery.data,
assistants,
conversation?.model,
]);
if (endpointsQuery.isLoading || modelsQuery.isLoading) { if (endpointsQuery.isLoading || modelsQuery.isLoading) {
return <Spinner className="m-auto text-black dark:text-white" />; return <Spinner className="m-auto text-black dark:text-white" />;

View file

@ -1,23 +1,14 @@
/* eslint-disable react-hooks/exhaustive-deps */
import { useEffect, useState } from 'react'; import { useEffect, useState } from 'react';
import { Outlet, useLocation } from 'react-router-dom'; import { Outlet } from 'react-router-dom';
import { useRecoilValue, useSetRecoilState } from 'recoil'; import { useRecoilValue, useSetRecoilState } from 'recoil';
import { useGetModelsQuery, useGetSearchEnabledQuery } from 'librechat-data-provider/react-query'; import { useGetSearchEnabledQuery } from 'librechat-data-provider/react-query';
import type { ContextType } from '~/common'; import type { ContextType } from '~/common';
import { import { useAuthContext, useServerStream, useAssistantsMap, useFileMap } from '~/hooks';
useAuthContext,
useServerStream,
useConversation,
useAssistantsMap,
useFileMap,
} from '~/hooks';
import { AssistantsMapContext, FileMapContext } from '~/Providers'; import { AssistantsMapContext, FileMapContext } from '~/Providers';
import { Nav, MobileNav } from '~/components/Nav'; import { Nav, MobileNav } from '~/components/Nav';
import store from '~/store'; import store from '~/store';
export default function Root() { export default function Root() {
const location = useLocation();
const { newConversation } = useConversation();
const { isAuthenticated } = useAuthContext(); const { isAuthenticated } = useAuthContext();
const [navVisible, setNavVisible] = useState(() => { const [navVisible, setNavVisible] = useState(() => {
const savedNavVisible = localStorage.getItem('navVisible'); const savedNavVisible = localStorage.getItem('navVisible');
@ -27,26 +18,11 @@ export default function Root() {
const submission = useRecoilValue(store.submission); const submission = useRecoilValue(store.submission);
useServerStream(submission ?? null); useServerStream(submission ?? null);
const modelsQueryEnabled = useRecoilValue(store.modelsQueryEnabled);
const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled); const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled);
const setModelsConfig = useSetRecoilState(store.modelsConfig);
const fileMap = useFileMap({ isAuthenticated }); const fileMap = useFileMap({ isAuthenticated });
const assistantsMap = useAssistantsMap({ isAuthenticated }); const assistantsMap = useAssistantsMap({ isAuthenticated });
const searchEnabledQuery = useGetSearchEnabledQuery({ enabled: isAuthenticated }); const searchEnabledQuery = useGetSearchEnabledQuery({ enabled: isAuthenticated });
const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated && modelsQueryEnabled });
useEffect(() => {
if (modelsQuery.data && location.state?.from?.pathname.includes('/chat')) {
setModelsConfig(modelsQuery.data);
// Note: passing modelsQuery.data prevents navigation
newConversation({}, undefined, modelsQuery.data);
} else if (modelsQuery.data) {
setModelsConfig(modelsQuery.data);
} else if (modelsQuery.isError) {
console.error('Failed to get models', modelsQuery.error);
}
}, [modelsQuery.data, modelsQuery.isError]);
useEffect(() => { useEffect(() => {
if (searchEnabledQuery.data) { if (searchEnabledQuery.data) {
@ -54,7 +30,12 @@ export default function Root() {
} else if (searchEnabledQuery.isError) { } else if (searchEnabledQuery.isError) {
console.error('Failed to get search enabled', searchEnabledQuery.error); console.error('Failed to get search enabled', searchEnabledQuery.error);
} }
}, [searchEnabledQuery.data, searchEnabledQuery.isError]); }, [
searchEnabledQuery.data,
searchEnabledQuery.error,
searchEnabledQuery.isError,
setIsSearchEnabled,
]);
if (!isAuthenticated) { if (!isAuthenticated) {
return null; return null;

View file

@ -2,7 +2,6 @@ import conversation from './conversation';
import conversations from './conversations'; import conversations from './conversations';
import families from './families'; import families from './families';
import endpoints from './endpoints'; import endpoints from './endpoints';
import models from './models';
import user from './user'; import user from './user';
import text from './text'; import text from './text';
import toast from './toast'; import toast from './toast';
@ -17,7 +16,6 @@ export default {
...conversation, ...conversation,
...conversations, ...conversations,
...endpoints, ...endpoints,
...models,
...user, ...user,
...text, ...text,
...toast, ...toast,

View file

@ -1,33 +0,0 @@
import { atom } from 'recoil';
import { EModelEndpoint, defaultModels } from 'librechat-data-provider';
import type { TModelsConfig } from 'librechat-data-provider';
const fitlerAssistantModels = (str: string) => {
return /gpt-4|gpt-3\\.5/i.test(str) && !/vision|instruct/i.test(str);
};
const openAIModels = defaultModels[EModelEndpoint.openAI];
const modelsConfig = atom<TModelsConfig>({
key: 'models',
default: {
[EModelEndpoint.openAI]: openAIModels,
[EModelEndpoint.assistants]: openAIModels.filter(fitlerAssistantModels),
[EModelEndpoint.gptPlugins]: openAIModels,
[EModelEndpoint.azureOpenAI]: openAIModels,
[EModelEndpoint.bingAI]: ['BingAI', 'Sydney'],
[EModelEndpoint.chatGPTBrowser]: ['text-davinci-002-render-sha'],
[EModelEndpoint.google]: defaultModels[EModelEndpoint.google],
[EModelEndpoint.anthropic]: defaultModels[EModelEndpoint.anthropic],
},
});
const modelsQueryEnabled = atom<boolean>({
key: 'modelsQueryEnabled',
default: true,
});
export default {
modelsConfig,
modelsQueryEnabled,
};

View file

@ -3,6 +3,7 @@ import { z } from 'zod';
import { EModelEndpoint, eModelEndpointSchema } from './schemas'; import { EModelEndpoint, eModelEndpointSchema } from './schemas';
import { fileConfigSchema } from './file-config'; import { fileConfigSchema } from './file-config';
import { FileSources } from './types/files'; import { FileSources } from './types/files';
import { TModelsConfig } from './types';
export const defaultSocialLogins = ['google', 'facebook', 'openid', 'github', 'discord']; export const defaultSocialLogins = ['google', 'facebook', 'openid', 'github', 'discord'];
@ -332,6 +333,24 @@ export const defaultModels = {
], ],
}; };
const fitlerAssistantModels = (str: string) => {
return /gpt-4|gpt-3\\.5/i.test(str) && !/vision|instruct/i.test(str);
};
const openAIModels = defaultModels[EModelEndpoint.openAI];
export const initialModelsConfig: TModelsConfig = {
initial: [],
[EModelEndpoint.openAI]: openAIModels,
[EModelEndpoint.assistants]: openAIModels.filter(fitlerAssistantModels),
[EModelEndpoint.gptPlugins]: openAIModels,
[EModelEndpoint.azureOpenAI]: openAIModels,
[EModelEndpoint.bingAI]: ['BingAI', 'Sydney'],
[EModelEndpoint.chatGPTBrowser]: ['text-davinci-002-render-sha'],
[EModelEndpoint.google]: defaultModels[EModelEndpoint.google],
[EModelEndpoint.anthropic]: defaultModels[EModelEndpoint.anthropic],
};
export const EndpointURLs: { [key in EModelEndpoint]: string } = { export const EndpointURLs: { [key in EModelEndpoint]: string } = {
[EModelEndpoint.openAI]: `/api/ask/${EModelEndpoint.openAI}`, [EModelEndpoint.openAI]: `/api/ask/${EModelEndpoint.openAI}`,
[EModelEndpoint.bingAI]: `/api/ask/${EModelEndpoint.bingAI}`, [EModelEndpoint.bingAI]: `/api/ask/${EModelEndpoint.bingAI}`,

View file

@ -6,13 +6,13 @@ import {
UseMutationResult, UseMutationResult,
QueryObserverResult, QueryObserverResult,
} from '@tanstack/react-query'; } from '@tanstack/react-query';
import * as t from '../types'; import { defaultOrderQuery, initialModelsConfig } from '../config';
import * as s from '../schemas';
import * as m from '../types/mutations';
import { defaultOrderQuery } from '../config';
import * as dataService from '../data-service'; import * as dataService from '../data-service';
import request from '../request'; import * as m from '../types/mutations';
import { QueryKeys } from '../keys'; import { QueryKeys } from '../keys';
import request from '../request';
import * as s from '../schemas';
import * as t from '../types';
export const useAbortRequestWithMessage = (): UseMutationResult< export const useAbortRequestWithMessage = (): UseMutationResult<
void, void,
@ -211,10 +211,11 @@ export const useGetModelsQuery = (
config?: UseQueryOptions<t.TModelsConfig>, config?: UseQueryOptions<t.TModelsConfig>,
): QueryObserverResult<t.TModelsConfig> => { ): QueryObserverResult<t.TModelsConfig> => {
return useQuery<t.TModelsConfig>([QueryKeys.models], () => dataService.getModels(), { return useQuery<t.TModelsConfig>([QueryKeys.models], () => dataService.getModels(), {
staleTime: Infinity, initialData: initialModelsConfig,
refetchOnWindowFocus: false, refetchOnWindowFocus: false,
refetchOnReconnect: false, refetchOnReconnect: false,
refetchOnMount: false, refetchOnMount: false,
staleTime: Infinity,
...config, ...config,
}); });
}; };