mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 00:40:14 +01:00
💽 refactor(client): Optimize ModelsConfig Query Cache (#2330)
* refactor(client): remove double caching of models via recoil to rely exclusively on react-query * chore(useConversation): add modelsQuery.data dep to callback
This commit is contained in:
parent
fb80af05be
commit
f6a84887e1
11 changed files with 76 additions and 94 deletions
|
|
@ -1,5 +1,6 @@
|
|||
import { useRecoilValue } from 'recoil';
|
||||
import { SettingsViews } from 'librechat-data-provider';
|
||||
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
import type { TSettingsProps } from '~/common';
|
||||
import { getSettings } from './Settings';
|
||||
import { cn } from '~/utils';
|
||||
|
|
@ -12,7 +13,7 @@ export default function Settings({
|
|||
className = '',
|
||||
isMultiChat = false,
|
||||
}: TSettingsProps & { isMultiChat?: boolean }) {
|
||||
const modelsConfig = useRecoilValue(store.modelsConfig);
|
||||
const modelsQuery = useGetModelsQuery();
|
||||
const currentSettingsView = useRecoilValue(store.currentSettingsView);
|
||||
if (!conversation?.endpoint || currentSettingsView !== SettingsViews.default) {
|
||||
return null;
|
||||
|
|
@ -20,7 +21,7 @@ export default function Settings({
|
|||
|
||||
const { settings, multiViewSettings } = getSettings(isMultiChat);
|
||||
const { endpoint: _endpoint, endpointType } = conversation;
|
||||
const models = modelsConfig?.[_endpoint] ?? [];
|
||||
const models = modelsQuery?.data?.[_endpoint] ?? [];
|
||||
const endpoint = endpointType ?? _endpoint;
|
||||
const OptionComponent = settings[endpoint];
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import { useRecoilValue } from 'recoil';
|
||||
import type { TConversation } from 'librechat-data-provider';
|
||||
import type { TSetOption } from '~/common';
|
||||
import { options, multiChatOptions } from './options';
|
||||
import store from '~/store';
|
||||
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
|
||||
type TGoogleProps = {
|
||||
showExamples: boolean;
|
||||
|
|
@ -23,13 +22,14 @@ export default function ModelSelect({
|
|||
isMultiChat = false,
|
||||
showAbove = true,
|
||||
}: TSelectProps) {
|
||||
const modelsConfig = useRecoilValue(store.modelsConfig);
|
||||
const modelsQuery = useGetModelsQuery();
|
||||
|
||||
if (!conversation?.endpoint) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { endpoint: _endpoint, endpointType } = conversation;
|
||||
const models = modelsConfig?.[_endpoint] ?? [];
|
||||
const models = modelsQuery?.data?.[_endpoint] ?? [];
|
||||
const endpoint = endpointType ?? _endpoint;
|
||||
|
||||
const OptionComponent = isMultiChat ? multiChatOptions[endpoint] : options[endpoint];
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ type TempOverrideType = Record<string, unknown> & {
|
|||
};
|
||||
|
||||
export default function useConfigOverride() {
|
||||
const setModelsConfig = useSetRecoilState(store.modelsConfig);
|
||||
const setEndpointsQueryEnabled = useSetRecoilState(store.endpointsQueryEnabled);
|
||||
const overrideQuery = useGetEndpointsConfigOverride({
|
||||
staleTime: Infinity,
|
||||
|
|
@ -33,10 +32,9 @@ export default function useConfigOverride() {
|
|||
if (modelsConfig) {
|
||||
await queryClient.cancelQueries([QueryKeys.models]);
|
||||
queryClient.setQueryData([QueryKeys.models], modelsConfig);
|
||||
setModelsConfig(modelsConfig);
|
||||
}
|
||||
},
|
||||
[queryClient, setEndpointsQueryEnabled, setModelsConfig],
|
||||
[queryClient, setEndpointsQueryEnabled],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { useCallback } from 'react';
|
||||
import { useSetRecoilState, useResetRecoilState, useRecoilCallback } from 'recoil';
|
||||
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||
import { useGetEndpointsQuery, useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
import type {
|
||||
TConversation,
|
||||
TMessagesAtom,
|
||||
|
|
@ -16,20 +16,21 @@ import store from '~/store';
|
|||
const useConversation = () => {
|
||||
const navigate = useOriginNavigate();
|
||||
const setConversation = useSetRecoilState(store.conversation);
|
||||
const resetLatestMessage = useResetRecoilState(store.latestMessage);
|
||||
const setMessages = useSetRecoilState<TMessagesAtom>(store.messages);
|
||||
const setSubmission = useSetRecoilState<TSubmission | null>(store.submission);
|
||||
const resetLatestMessage = useResetRecoilState(store.latestMessage);
|
||||
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
||||
const modelsQuery = useGetModelsQuery();
|
||||
|
||||
const switchToConversation = useRecoilCallback(
|
||||
({ snapshot }) =>
|
||||
() =>
|
||||
async (
|
||||
conversation: TConversation,
|
||||
messages: TMessagesAtom = null,
|
||||
preset: TPreset | null = null,
|
||||
modelsData?: TModelsConfig,
|
||||
) => {
|
||||
const modelsConfig = modelsData ?? snapshot.getLoadable(store.modelsConfig).contents;
|
||||
const modelsConfig = modelsData ?? modelsQuery.data;
|
||||
const { endpoint = null } = conversation;
|
||||
|
||||
if (endpoint === null) {
|
||||
|
|
@ -61,7 +62,7 @@ const useConversation = () => {
|
|||
navigate('new');
|
||||
}
|
||||
},
|
||||
[endpointsConfig],
|
||||
[endpointsConfig, modelsQuery.data],
|
||||
);
|
||||
|
||||
const newConversation = useCallback(
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { useCallback } from 'react';
|
||||
import { EModelEndpoint, FileSources, defaultOrderQuery } from 'librechat-data-provider';
|
||||
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
|
||||
import { useGetEndpointsQuery, useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
import {
|
||||
useSetRecoilState,
|
||||
useResetRecoilState,
|
||||
|
|
@ -35,6 +35,7 @@ const useNewConvo = (index = 0) => {
|
|||
const setSubmission = useSetRecoilState<TSubmission | null>(store.submissionByIndex(index));
|
||||
const resetLatestMessage = useResetRecoilState(store.latestMessageFamily(index));
|
||||
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
||||
const modelsQuery = useGetModelsQuery();
|
||||
|
||||
const { data: assistants = [] } = useListAssistantsQuery(defaultOrderQuery, {
|
||||
select: (res) =>
|
||||
|
|
@ -51,7 +52,7 @@ const useNewConvo = (index = 0) => {
|
|||
});
|
||||
|
||||
const switchToConversation = useRecoilCallback(
|
||||
({ snapshot }) =>
|
||||
() =>
|
||||
async (
|
||||
conversation: TConversation,
|
||||
preset: Partial<TPreset> | null = null,
|
||||
|
|
@ -59,7 +60,7 @@ const useNewConvo = (index = 0) => {
|
|||
buildDefault?: boolean,
|
||||
keepLatestMessage?: boolean,
|
||||
) => {
|
||||
const modelsConfig = modelsData ?? snapshot.getLoadable(store.modelsConfig).contents;
|
||||
const modelsConfig = modelsData ?? modelsQuery.data;
|
||||
const { endpoint = null } = conversation;
|
||||
const buildDefaultConversation = endpoint === null || buildDefault;
|
||||
const activePreset =
|
||||
|
|
@ -137,7 +138,7 @@ const useNewConvo = (index = 0) => {
|
|||
navigate('new');
|
||||
}
|
||||
},
|
||||
[endpointsConfig, defaultPreset, assistants],
|
||||
[endpointsConfig, defaultPreset, assistants, modelsQuery.data],
|
||||
);
|
||||
|
||||
const newConversation = useCallback(
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import { useRecoilValue } from 'recoil';
|
||||
import { useEffect, useRef } from 'react';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import {
|
||||
|
|
@ -23,16 +22,18 @@ export default function ChatRoute() {
|
|||
const { data: startupConfig } = useGetStartupConfig();
|
||||
|
||||
const { conversation } = store.useCreateConversationAtom(index);
|
||||
const modelsQueryEnabled = useRecoilValue(store.modelsQueryEnabled);
|
||||
const { isAuthenticated } = useAuthRedirect();
|
||||
const { newConversation } = useNewConvo();
|
||||
const hasSetConversation = useRef(false);
|
||||
|
||||
const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated && modelsQueryEnabled });
|
||||
const modelsQuery = useGetModelsQuery({
|
||||
enabled: isAuthenticated,
|
||||
refetchOnMount: 'always',
|
||||
});
|
||||
const initialConvoQuery = useGetConvoIdQuery(conversationId ?? '', {
|
||||
enabled: isAuthenticated && conversationId !== 'new',
|
||||
});
|
||||
const endpointsQuery = useGetEndpointsQuery({ enabled: isAuthenticated && modelsQueryEnabled });
|
||||
const endpointsQuery = useGetEndpointsQuery({ enabled: isAuthenticated });
|
||||
const { data: assistants = null } = useListAssistantsQuery(defaultOrderQuery, {
|
||||
select: (res) =>
|
||||
res.data.map(({ id, name, metadata, model }) => ({ id, name, metadata, model })),
|
||||
|
|
@ -50,6 +51,7 @@ export default function ChatRoute() {
|
|||
conversationId === 'new' &&
|
||||
endpointsQuery.data &&
|
||||
modelsQuery.data &&
|
||||
!modelsQuery.data?.initial &&
|
||||
!hasSetConversation.current
|
||||
) {
|
||||
newConversation({ modelsData: modelsQuery.data });
|
||||
|
|
@ -58,6 +60,7 @@ export default function ChatRoute() {
|
|||
initialConvoQuery.data &&
|
||||
endpointsQuery.data &&
|
||||
modelsQuery.data &&
|
||||
!modelsQuery.data?.initial &&
|
||||
!hasSetConversation.current
|
||||
) {
|
||||
newConversation({
|
||||
|
|
@ -68,10 +71,15 @@ export default function ChatRoute() {
|
|||
keepLatestMessage: true,
|
||||
});
|
||||
hasSetConversation.current = !!assistants;
|
||||
} else if (!hasSetConversation.current && conversationId === 'new' && assistants) {
|
||||
} else if (
|
||||
!hasSetConversation.current &&
|
||||
!modelsQuery.data?.initial &&
|
||||
conversationId === 'new' &&
|
||||
assistants
|
||||
) {
|
||||
newConversation({ modelsData: modelsQuery.data });
|
||||
hasSetConversation.current = true;
|
||||
} else if (!hasSetConversation.current && assistants) {
|
||||
} else if (!hasSetConversation.current && !modelsQuery.data?.initial && assistants) {
|
||||
newConversation({
|
||||
template: initialConvoQuery.data,
|
||||
preset: initialConvoQuery.data as TPreset,
|
||||
|
|
@ -80,8 +88,15 @@ export default function ChatRoute() {
|
|||
});
|
||||
hasSetConversation.current = true;
|
||||
}
|
||||
/* Creates infinite render if all dependencies included */
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [initialConvoQuery.data, modelsQuery.data, endpointsQuery.data, assistants]);
|
||||
}, [
|
||||
initialConvoQuery.data,
|
||||
modelsQuery.data,
|
||||
endpointsQuery.data,
|
||||
assistants,
|
||||
conversation?.model,
|
||||
]);
|
||||
|
||||
if (endpointsQuery.isLoading || modelsQuery.isLoading) {
|
||||
return <Spinner className="m-auto text-black dark:text-white" />;
|
||||
|
|
|
|||
|
|
@ -1,23 +1,14 @@
|
|||
/* eslint-disable react-hooks/exhaustive-deps */
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Outlet, useLocation } from 'react-router-dom';
|
||||
import { Outlet } from 'react-router-dom';
|
||||
import { useRecoilValue, useSetRecoilState } from 'recoil';
|
||||
import { useGetModelsQuery, useGetSearchEnabledQuery } from 'librechat-data-provider/react-query';
|
||||
import { useGetSearchEnabledQuery } from 'librechat-data-provider/react-query';
|
||||
import type { ContextType } from '~/common';
|
||||
import {
|
||||
useAuthContext,
|
||||
useServerStream,
|
||||
useConversation,
|
||||
useAssistantsMap,
|
||||
useFileMap,
|
||||
} from '~/hooks';
|
||||
import { useAuthContext, useServerStream, useAssistantsMap, useFileMap } from '~/hooks';
|
||||
import { AssistantsMapContext, FileMapContext } from '~/Providers';
|
||||
import { Nav, MobileNav } from '~/components/Nav';
|
||||
import store from '~/store';
|
||||
|
||||
export default function Root() {
|
||||
const location = useLocation();
|
||||
const { newConversation } = useConversation();
|
||||
const { isAuthenticated } = useAuthContext();
|
||||
const [navVisible, setNavVisible] = useState(() => {
|
||||
const savedNavVisible = localStorage.getItem('navVisible');
|
||||
|
|
@ -27,26 +18,11 @@ export default function Root() {
|
|||
const submission = useRecoilValue(store.submission);
|
||||
useServerStream(submission ?? null);
|
||||
|
||||
const modelsQueryEnabled = useRecoilValue(store.modelsQueryEnabled);
|
||||
const setIsSearchEnabled = useSetRecoilState(store.isSearchEnabled);
|
||||
const setModelsConfig = useSetRecoilState(store.modelsConfig);
|
||||
|
||||
const fileMap = useFileMap({ isAuthenticated });
|
||||
const assistantsMap = useAssistantsMap({ isAuthenticated });
|
||||
const searchEnabledQuery = useGetSearchEnabledQuery({ enabled: isAuthenticated });
|
||||
const modelsQuery = useGetModelsQuery({ enabled: isAuthenticated && modelsQueryEnabled });
|
||||
|
||||
useEffect(() => {
|
||||
if (modelsQuery.data && location.state?.from?.pathname.includes('/chat')) {
|
||||
setModelsConfig(modelsQuery.data);
|
||||
// Note: passing modelsQuery.data prevents navigation
|
||||
newConversation({}, undefined, modelsQuery.data);
|
||||
} else if (modelsQuery.data) {
|
||||
setModelsConfig(modelsQuery.data);
|
||||
} else if (modelsQuery.isError) {
|
||||
console.error('Failed to get models', modelsQuery.error);
|
||||
}
|
||||
}, [modelsQuery.data, modelsQuery.isError]);
|
||||
|
||||
useEffect(() => {
|
||||
if (searchEnabledQuery.data) {
|
||||
|
|
@ -54,7 +30,12 @@ export default function Root() {
|
|||
} else if (searchEnabledQuery.isError) {
|
||||
console.error('Failed to get search enabled', searchEnabledQuery.error);
|
||||
}
|
||||
}, [searchEnabledQuery.data, searchEnabledQuery.isError]);
|
||||
}, [
|
||||
searchEnabledQuery.data,
|
||||
searchEnabledQuery.error,
|
||||
searchEnabledQuery.isError,
|
||||
setIsSearchEnabled,
|
||||
]);
|
||||
|
||||
if (!isAuthenticated) {
|
||||
return null;
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import conversation from './conversation';
|
|||
import conversations from './conversations';
|
||||
import families from './families';
|
||||
import endpoints from './endpoints';
|
||||
import models from './models';
|
||||
import user from './user';
|
||||
import text from './text';
|
||||
import toast from './toast';
|
||||
|
|
@ -17,7 +16,6 @@ export default {
|
|||
...conversation,
|
||||
...conversations,
|
||||
...endpoints,
|
||||
...models,
|
||||
...user,
|
||||
...text,
|
||||
...toast,
|
||||
|
|
|
|||
|
|
@ -1,33 +0,0 @@
|
|||
import { atom } from 'recoil';
|
||||
import { EModelEndpoint, defaultModels } from 'librechat-data-provider';
|
||||
import type { TModelsConfig } from 'librechat-data-provider';
|
||||
|
||||
const fitlerAssistantModels = (str: string) => {
|
||||
return /gpt-4|gpt-3\\.5/i.test(str) && !/vision|instruct/i.test(str);
|
||||
};
|
||||
|
||||
const openAIModels = defaultModels[EModelEndpoint.openAI];
|
||||
|
||||
const modelsConfig = atom<TModelsConfig>({
|
||||
key: 'models',
|
||||
default: {
|
||||
[EModelEndpoint.openAI]: openAIModels,
|
||||
[EModelEndpoint.assistants]: openAIModels.filter(fitlerAssistantModels),
|
||||
[EModelEndpoint.gptPlugins]: openAIModels,
|
||||
[EModelEndpoint.azureOpenAI]: openAIModels,
|
||||
[EModelEndpoint.bingAI]: ['BingAI', 'Sydney'],
|
||||
[EModelEndpoint.chatGPTBrowser]: ['text-davinci-002-render-sha'],
|
||||
[EModelEndpoint.google]: defaultModels[EModelEndpoint.google],
|
||||
[EModelEndpoint.anthropic]: defaultModels[EModelEndpoint.anthropic],
|
||||
},
|
||||
});
|
||||
|
||||
const modelsQueryEnabled = atom<boolean>({
|
||||
key: 'modelsQueryEnabled',
|
||||
default: true,
|
||||
});
|
||||
|
||||
export default {
|
||||
modelsConfig,
|
||||
modelsQueryEnabled,
|
||||
};
|
||||
|
|
@ -3,6 +3,7 @@ import { z } from 'zod';
|
|||
import { EModelEndpoint, eModelEndpointSchema } from './schemas';
|
||||
import { fileConfigSchema } from './file-config';
|
||||
import { FileSources } from './types/files';
|
||||
import { TModelsConfig } from './types';
|
||||
|
||||
export const defaultSocialLogins = ['google', 'facebook', 'openid', 'github', 'discord'];
|
||||
|
||||
|
|
@ -332,6 +333,24 @@ export const defaultModels = {
|
|||
],
|
||||
};
|
||||
|
||||
const fitlerAssistantModels = (str: string) => {
|
||||
return /gpt-4|gpt-3\\.5/i.test(str) && !/vision|instruct/i.test(str);
|
||||
};
|
||||
|
||||
const openAIModels = defaultModels[EModelEndpoint.openAI];
|
||||
|
||||
export const initialModelsConfig: TModelsConfig = {
|
||||
initial: [],
|
||||
[EModelEndpoint.openAI]: openAIModels,
|
||||
[EModelEndpoint.assistants]: openAIModels.filter(fitlerAssistantModels),
|
||||
[EModelEndpoint.gptPlugins]: openAIModels,
|
||||
[EModelEndpoint.azureOpenAI]: openAIModels,
|
||||
[EModelEndpoint.bingAI]: ['BingAI', 'Sydney'],
|
||||
[EModelEndpoint.chatGPTBrowser]: ['text-davinci-002-render-sha'],
|
||||
[EModelEndpoint.google]: defaultModels[EModelEndpoint.google],
|
||||
[EModelEndpoint.anthropic]: defaultModels[EModelEndpoint.anthropic],
|
||||
};
|
||||
|
||||
export const EndpointURLs: { [key in EModelEndpoint]: string } = {
|
||||
[EModelEndpoint.openAI]: `/api/ask/${EModelEndpoint.openAI}`,
|
||||
[EModelEndpoint.bingAI]: `/api/ask/${EModelEndpoint.bingAI}`,
|
||||
|
|
|
|||
|
|
@ -6,13 +6,13 @@ import {
|
|||
UseMutationResult,
|
||||
QueryObserverResult,
|
||||
} from '@tanstack/react-query';
|
||||
import * as t from '../types';
|
||||
import * as s from '../schemas';
|
||||
import * as m from '../types/mutations';
|
||||
import { defaultOrderQuery } from '../config';
|
||||
import { defaultOrderQuery, initialModelsConfig } from '../config';
|
||||
import * as dataService from '../data-service';
|
||||
import request from '../request';
|
||||
import * as m from '../types/mutations';
|
||||
import { QueryKeys } from '../keys';
|
||||
import request from '../request';
|
||||
import * as s from '../schemas';
|
||||
import * as t from '../types';
|
||||
|
||||
export const useAbortRequestWithMessage = (): UseMutationResult<
|
||||
void,
|
||||
|
|
@ -211,10 +211,11 @@ export const useGetModelsQuery = (
|
|||
config?: UseQueryOptions<t.TModelsConfig>,
|
||||
): QueryObserverResult<t.TModelsConfig> => {
|
||||
return useQuery<t.TModelsConfig>([QueryKeys.models], () => dataService.getModels(), {
|
||||
staleTime: Infinity,
|
||||
initialData: initialModelsConfig,
|
||||
refetchOnWindowFocus: false,
|
||||
refetchOnReconnect: false,
|
||||
refetchOnMount: false,
|
||||
staleTime: Infinity,
|
||||
...config,
|
||||
});
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue