mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-20 02:10:15 +01:00
🤖 feat: OpenAI Assistants v2 (initial support) (#2781)
* 🤖 Assistants V2 Support: Part 1 - Separated Azure Assistants to its own endpoint - File Search / Vector Store integration is incomplete, but can toggle and use storage from playground - Code Interpreter resource files can be added but not deleted - GPT-4o is supported - Many improvements to the Assistants Endpoint overall data-provider v2 changes copy existing route as v1 chore: rename new endpoint to reduce comparison operations and add new azure filesource api: add azureAssistants part 1 force use of version for assistants/assistantsAzure chore: switch name back to azureAssistants refactor type version: string | number Ensure assistants endpoints have version set fix: isArchived type issue in ConversationListParams refactor: update assistants mutations/queries with endpoint/version definitions, update Assistants Map structure chore: FilePreview component ExtendedFile type assertion feat: isAssistantsEndpoint helper chore: remove unused useGenerations chore(buildTree): type issue chore(Advanced): type issue (unused component, maybe in future) first pass for multi-assistant endpoint rewrite fix(listAssistants): pass params correctly feat: list separate assistants by endpoint fix(useTextarea): access assistantMap correctly fix: assistant endpoint switching, resetting ID fix: broken during rewrite, selecting assistant mention fix: set/invalidate assistants endpoint query data correctly feat: Fix issue with assistant ID not being reset correctly getOpenAIClient helper function feat: add toast for assistant deletion fix: assistants delete right after create issue for azure fix: assistant patching refactor: actions to use getOpenAIClient refactor: consolidate logic into helpers file fix: issue where conversation data was not initially available v1 chat support refactor(spendTokens): only early return if completionTokens isNaN fix(OpenAIClient): ensure spendTokens has all necessary params refactor: route/controller logic fix(assistants/initializeClient): use defaultHeaders field fix: sanitize default operation id chore: bump openai package first pass v2 action service feat: retroactive domain parsing for actions added via v1 feat: delete db records of actions/assistants on openai assistant deletion chore: remove vision tools from v2 assistants feat: v2 upload and delete assistant vision images WIP first pass, thread attachments fix: show assistant vision files (save local/firebase copy) v2 image continue fix: annotations fix: refine annotations show analyze as error if is no longer submitting before progress reaches 1 and show file_search as retrieval tool fix: abort run, undefined endpoint issue refactor: consolidate capabilities logic and anticipate versioning frontend version 2 changes fix: query selection and filter add endpoint to unknown filepath add file ids to resource, deleting in progress enable/disable file search remove version log * 🤖 Assistants V2 Support: Part 2 🎹 fix: Autocompletion Chrome Bug on Action API Key Input chore: remove `useOriginNavigate` chore: set correct OpenAI Storage Source fix: azure file deletions, instantiate clients by source for deletion update code interpret files info feat: deleteResourceFileId chore: increase poll interval as azure easily rate limits fix: openai file deletions, TODO: evaluate rejected deletion settled promises to determine which to delete from db records file source icons update table file filters chore: file search info and versioning fix: retrieval update with necessary tool_resources if specified fix(useMentions): add optional chaining in case listMap value is undefined fix: force assistant avatar roundedness fix: azure assistants, check correct flag chore: bump data-provider * fix: merge conflict * ci: fix backend tests due to new updates * chore: update .env.example * meilisearch improvements * localization updates * chore: update comparisons * feat: add additional metadata: endpoint, author ID * chore: azureAssistants ENDPOINTS exclusion warning
This commit is contained in:
parent
af8bcb08d6
commit
1a452121fa
158 changed files with 4184 additions and 1204 deletions
|
|
@ -1,2 +1,3 @@
|
|||
export { default as useAssistantsMap } from './useAssistantsMap';
|
||||
export { default as useSelectAssistant } from './useSelectAssistant';
|
||||
export { default as useAssistantListMap } from './useAssistantListMap';
|
||||
|
|
|
|||
44
client/src/hooks/Assistants/useAssistantListMap.ts
Normal file
44
client/src/hooks/Assistants/useAssistantListMap.ts
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import { useMemo } from 'react';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { AssistantListResponse, AssistantsEndpoint } from 'librechat-data-provider';
|
||||
import type { AssistantListItem } from '~/common';
|
||||
import { useListAssistantsQuery } from '~/data-provider';
|
||||
|
||||
const selectAssistantsResponse = (res: AssistantListResponse): AssistantListItem[] =>
|
||||
res.data.map(({ id, name, metadata, model }) => ({
|
||||
id,
|
||||
name: name ?? '',
|
||||
metadata,
|
||||
model,
|
||||
}));
|
||||
|
||||
export default function useAssistantListMap<T = AssistantListItem[] | null>(
|
||||
selector: (res: AssistantListResponse) => T = selectAssistantsResponse as (
|
||||
res: AssistantListResponse,
|
||||
) => T,
|
||||
): Record<AssistantsEndpoint, T> {
|
||||
const { data: assistantsList = null } = useListAssistantsQuery(
|
||||
EModelEndpoint.assistants,
|
||||
undefined,
|
||||
{
|
||||
select: selector,
|
||||
},
|
||||
);
|
||||
|
||||
const { data: azureAssistants = null } = useListAssistantsQuery(
|
||||
EModelEndpoint.azureAssistants,
|
||||
undefined,
|
||||
{
|
||||
select: selector,
|
||||
},
|
||||
);
|
||||
|
||||
const assistantListMap = useMemo(() => {
|
||||
return {
|
||||
[EModelEndpoint.assistants]: assistantsList as T,
|
||||
[EModelEndpoint.azureAssistants]: azureAssistants as T,
|
||||
};
|
||||
}, [assistantsList, azureAssistants]);
|
||||
|
||||
return assistantListMap;
|
||||
}
|
||||
|
|
@ -1,12 +1,28 @@
|
|||
import { defaultOrderQuery } from 'librechat-data-provider';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { TAssistantsMap } from 'librechat-data-provider';
|
||||
import { useListAssistantsQuery } from '~/data-provider';
|
||||
import { mapAssistants } from '~/utils';
|
||||
|
||||
export default function useAssistantsMap({ isAuthenticated }: { isAuthenticated: boolean }) {
|
||||
const { data: assistantMap = {} } = useListAssistantsQuery(defaultOrderQuery, {
|
||||
export default function useAssistantsMap({
|
||||
isAuthenticated,
|
||||
}: {
|
||||
isAuthenticated: boolean;
|
||||
}): TAssistantsMap {
|
||||
const { data: assistants = {} } = useListAssistantsQuery(EModelEndpoint.assistants, undefined, {
|
||||
select: (res) => mapAssistants(res.data),
|
||||
enabled: isAuthenticated,
|
||||
});
|
||||
const { data: azureAssistants = {} } = useListAssistantsQuery(
|
||||
EModelEndpoint.azureAssistants,
|
||||
undefined,
|
||||
{
|
||||
select: (res) => mapAssistants(res.data),
|
||||
enabled: isAuthenticated,
|
||||
},
|
||||
);
|
||||
|
||||
return assistantMap;
|
||||
return {
|
||||
[EModelEndpoint.assistants]: assistants,
|
||||
[EModelEndpoint.azureAssistants]: azureAssistants,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,32 +1,30 @@
|
|||
import { useCallback } from 'react';
|
||||
import { EModelEndpoint, defaultOrderQuery } from 'librechat-data-provider';
|
||||
import type { TConversation, TPreset } from 'librechat-data-provider';
|
||||
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import type { AssistantsEndpoint, TConversation, TPreset } from 'librechat-data-provider';
|
||||
import useDefaultConvo from '~/hooks/Conversations/useDefaultConvo';
|
||||
import { useListAssistantsQuery } from '~/data-provider';
|
||||
import { useChatContext } from '~/Providers/ChatContext';
|
||||
import useAssistantListMap from './useAssistantListMap';
|
||||
import { mapAssistants } from '~/utils';
|
||||
|
||||
export default function useSelectAssistant() {
|
||||
export default function useSelectAssistant(endpoint: AssistantsEndpoint) {
|
||||
const getDefaultConversation = useDefaultConvo();
|
||||
const { conversation, newConversation } = useChatContext();
|
||||
const { data: assistantMap = {} } = useListAssistantsQuery(defaultOrderQuery, {
|
||||
select: (res) => mapAssistants(res.data),
|
||||
});
|
||||
const assistantMap = useAssistantListMap((res) => mapAssistants(res.data));
|
||||
|
||||
const onSelect = useCallback(
|
||||
(value: string) => {
|
||||
const assistant = assistantMap?.[value];
|
||||
const assistant = assistantMap?.[endpoint]?.[value];
|
||||
if (!assistant) {
|
||||
return;
|
||||
}
|
||||
const template: Partial<TPreset | TConversation> = {
|
||||
endpoint: EModelEndpoint.assistants,
|
||||
endpoint,
|
||||
assistant_id: assistant.id,
|
||||
model: assistant.model,
|
||||
conversationId: 'new',
|
||||
};
|
||||
|
||||
if (conversation?.endpoint === EModelEndpoint.assistants) {
|
||||
if (isAssistantsEndpoint(conversation?.endpoint)) {
|
||||
const currentConvo = getDefaultConversation({
|
||||
conversation: { ...(conversation ?? {}) },
|
||||
preset: template,
|
||||
|
|
@ -44,7 +42,7 @@ export default function useSelectAssistant() {
|
|||
preset: template as Partial<TPreset>,
|
||||
});
|
||||
},
|
||||
[assistantMap, conversation, getDefaultConversation, newConversation],
|
||||
[endpoint, assistantMap, conversation, getDefaultConversation, newConversation],
|
||||
);
|
||||
|
||||
return { onSelect };
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import { useCallback } from 'react';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { useSetRecoilState, useResetRecoilState, useRecoilCallback } from 'recoil';
|
||||
import { useGetEndpointsQuery, useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
import type {
|
||||
|
|
@ -10,11 +11,10 @@ import type {
|
|||
TEndpointsConfig,
|
||||
} from 'librechat-data-provider';
|
||||
import { buildDefaultConvo, getDefaultEndpoint, getEndpointField } from '~/utils';
|
||||
import useOriginNavigate from '../useOriginNavigate';
|
||||
import store from '~/store';
|
||||
|
||||
const useConversation = () => {
|
||||
const navigate = useOriginNavigate();
|
||||
const navigate = useNavigate();
|
||||
const setConversation = useSetRecoilState(store.conversation);
|
||||
const resetLatestMessage = useResetRecoilState(store.latestMessage);
|
||||
const setMessages = useSetRecoilState<TMessagesAtom>(store.messages);
|
||||
|
|
@ -59,7 +59,7 @@ const useConversation = () => {
|
|||
resetLatestMessage();
|
||||
|
||||
if (conversation.conversationId === 'new' && !modelsData) {
|
||||
navigate('new');
|
||||
navigate('/c/new');
|
||||
}
|
||||
},
|
||||
[endpointsConfig, modelsQuery.data],
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
import { useNavigate } from 'react-router-dom';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { useSetRecoilState, useResetRecoilState } from 'recoil';
|
||||
import { QueryKeys, EModelEndpoint, LocalStorageKeys } from 'librechat-data-provider';
|
||||
import type { TConversation, TEndpointsConfig, TModelsConfig } from 'librechat-data-provider';
|
||||
import { buildDefaultConvo, getDefaultEndpoint, getEndpointField } from '~/utils';
|
||||
import useOriginNavigate from '../useOriginNavigate';
|
||||
import store from '~/store';
|
||||
|
||||
const useNavigateToConvo = (index = 0) => {
|
||||
const navigate = useNavigate();
|
||||
const queryClient = useQueryClient();
|
||||
const navigate = useOriginNavigate();
|
||||
const { setConversation } = store.useCreateConversationAtom(index);
|
||||
const setSubmission = useSetRecoilState(store.submissionByIndex(index));
|
||||
const resetLatestMessage = useResetRecoilState(store.latestMessageFamily(index));
|
||||
|
|
@ -48,7 +48,7 @@ const useNavigateToConvo = (index = 0) => {
|
|||
});
|
||||
}
|
||||
setConversation(convo);
|
||||
navigate(convo?.conversationId);
|
||||
navigate(`/c/${convo.conversationId ?? 'new'}`);
|
||||
};
|
||||
|
||||
const navigateWithLastTools = (conversation: TConversation) => {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import exportFromJSON from 'export-from-json';
|
|||
import { useCallback, useEffect, useRef } from 'react';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { useRecoilState, useSetRecoilState, useRecoilValue } from 'recoil';
|
||||
import { QueryKeys, modularEndpoints, EModelEndpoint } from 'librechat-data-provider';
|
||||
import { QueryKeys, modularEndpoints, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import { useCreatePresetMutation, useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
import type { TPreset, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import {
|
||||
|
|
@ -174,8 +174,8 @@ export default function usePresets() {
|
|||
const currentEndpointType = getEndpointField(endpointsConfig, endpoint, 'type');
|
||||
const endpointType = getEndpointField(endpointsConfig, newPreset.endpoint, 'type');
|
||||
const isAssistantSwitch =
|
||||
newPreset.endpoint === EModelEndpoint.assistants &&
|
||||
conversation?.endpoint === EModelEndpoint.assistants &&
|
||||
isAssistantsEndpoint(newPreset.endpoint) &&
|
||||
isAssistantsEndpoint(conversation?.endpoint) &&
|
||||
conversation?.endpoint === newPreset.endpoint;
|
||||
|
||||
if (
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import debounce from 'lodash/debounce';
|
||||
import { FileSources } from 'librechat-data-provider';
|
||||
import { FileSources, EToolResources } from 'librechat-data-provider';
|
||||
import { useCallback, useState, useEffect } from 'react';
|
||||
import type {
|
||||
BatchFile,
|
||||
|
|
@ -16,18 +16,20 @@ type FileMapSetter = GenericSetter<Map<string, ExtendedFile>>;
|
|||
const useFileDeletion = ({
|
||||
mutateAsync,
|
||||
assistant_id,
|
||||
tool_resource,
|
||||
}: {
|
||||
mutateAsync: UseMutateAsyncFunction<DeleteFilesResponse, unknown, DeleteFilesBody, unknown>;
|
||||
assistant_id?: string;
|
||||
tool_resource?: EToolResources;
|
||||
}) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const [_batch, setFileDeleteBatch] = useState<BatchFile[]>([]);
|
||||
const setFilesToDelete = useSetFilesToDelete();
|
||||
|
||||
const executeBatchDelete = useCallback(
|
||||
(filesToDelete: BatchFile[], assistant_id?: string) => {
|
||||
console.log('Deleting files:', filesToDelete, assistant_id);
|
||||
mutateAsync({ files: filesToDelete, assistant_id });
|
||||
(filesToDelete: BatchFile[], assistant_id?: string, tool_resource?: EToolResources) => {
|
||||
console.log('Deleting files:', filesToDelete, assistant_id, tool_resource);
|
||||
mutateAsync({ files: filesToDelete, assistant_id, tool_resource });
|
||||
setFileDeleteBatch([]);
|
||||
},
|
||||
[mutateAsync],
|
||||
|
|
@ -81,11 +83,11 @@ const useFileDeletion = ({
|
|||
|
||||
setFileDeleteBatch((prevBatch) => {
|
||||
const newBatch = [...prevBatch, file];
|
||||
debouncedDelete(newBatch, assistant_id);
|
||||
debouncedDelete(newBatch, assistant_id, tool_resource);
|
||||
return newBatch;
|
||||
});
|
||||
},
|
||||
[debouncedDelete, setFilesToDelete, assistant_id],
|
||||
[debouncedDelete, setFilesToDelete, assistant_id, tool_resource],
|
||||
);
|
||||
|
||||
const deleteFiles = useCallback(
|
||||
|
|
|
|||
|
|
@ -1,13 +1,18 @@
|
|||
import { v4 } from 'uuid';
|
||||
import debounce from 'lodash/debounce';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
import {
|
||||
megabyte,
|
||||
QueryKeys,
|
||||
EModelEndpoint,
|
||||
codeTypeMapping,
|
||||
mergeFileConfig,
|
||||
isAssistantsEndpoint,
|
||||
defaultAssistantsVersion,
|
||||
fileConfig as defaultFileConfig,
|
||||
} from 'librechat-data-provider';
|
||||
import type { TEndpointsConfig } from 'librechat-data-provider';
|
||||
import type { ExtendedFile, FileSetter } from '~/common';
|
||||
import { useUploadFileMutation, useGetFileConfig } from '~/data-provider';
|
||||
import { useDelayedUploadToast } from './useDelayedUploadToast';
|
||||
|
|
@ -20,10 +25,12 @@ const { checkType } = defaultFileConfig;
|
|||
type UseFileHandling = {
|
||||
overrideEndpoint?: EModelEndpoint;
|
||||
fileSetter?: FileSetter;
|
||||
additionalMetadata?: Record<string, string>;
|
||||
fileFilter?: (file: File) => boolean;
|
||||
additionalMetadata?: Record<string, string | undefined>;
|
||||
};
|
||||
|
||||
const useFileHandling = (params?: UseFileHandling) => {
|
||||
const queryClient = useQueryClient();
|
||||
const { showToast } = useToastContext();
|
||||
const [errors, setErrors] = useState<string[]>([]);
|
||||
const { startUploadTimer, clearUploadTimer } = useDelayedUploadToast();
|
||||
|
|
@ -141,15 +148,20 @@ const useFileHandling = (params?: UseFileHandling) => {
|
|||
|
||||
if (params?.additionalMetadata) {
|
||||
for (const [key, value] of Object.entries(params.additionalMetadata)) {
|
||||
formData.append(key, value);
|
||||
if (value) {
|
||||
formData.append(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
endpoint === EModelEndpoint.assistants &&
|
||||
isAssistantsEndpoint(endpoint) &&
|
||||
!formData.get('assistant_id') &&
|
||||
conversation?.assistant_id
|
||||
) {
|
||||
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
|
||||
const version = endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
|
||||
formData.append('version', version);
|
||||
formData.append('assistant_id', conversation.assistant_id);
|
||||
formData.append('model', conversation?.model ?? '');
|
||||
formData.append('message_file', 'true');
|
||||
|
|
|
|||
|
|
@ -5,15 +5,42 @@ import {
|
|||
useGetEndpointsQuery,
|
||||
} from 'librechat-data-provider/react-query';
|
||||
import { getConfigDefaults, EModelEndpoint, alternateName } from 'librechat-data-provider';
|
||||
import type { Assistant } from 'librechat-data-provider';
|
||||
import { useGetPresetsQuery, useListAssistantsQuery } from '~/data-provider';
|
||||
import type { AssistantsEndpoint, TAssistantsMap, TEndpointsConfig } from 'librechat-data-provider';
|
||||
import type { MentionOption } from '~/common';
|
||||
import useAssistantListMap from '~/hooks/Assistants/useAssistantListMap';
|
||||
import { mapEndpoints, getPresetTitle } from '~/utils';
|
||||
import { EndpointIcon } from '~/components/Endpoints';
|
||||
import { useGetPresetsQuery } from '~/data-provider';
|
||||
import useSelectMention from './useSelectMention';
|
||||
|
||||
const defaultInterface = getConfigDefaults().interface;
|
||||
|
||||
export default function useMentions({ assistantMap }: { assistantMap: Record<string, Assistant> }) {
|
||||
const assistantMapFn =
|
||||
({
|
||||
endpoint,
|
||||
assistantMap,
|
||||
endpointsConfig,
|
||||
}: {
|
||||
endpoint: AssistantsEndpoint;
|
||||
assistantMap: TAssistantsMap;
|
||||
endpointsConfig: TEndpointsConfig;
|
||||
}) =>
|
||||
({ id, name, description }) => ({
|
||||
type: endpoint,
|
||||
label: name ?? '',
|
||||
value: id,
|
||||
description: description ?? '',
|
||||
icon: EndpointIcon({
|
||||
conversation: { assistant_id: id, endpoint },
|
||||
containerClassName: 'shadow-stroke overflow-hidden rounded-full',
|
||||
endpointsConfig: endpointsConfig,
|
||||
context: 'menu-item',
|
||||
assistantMap,
|
||||
size: 20,
|
||||
}),
|
||||
});
|
||||
|
||||
export default function useMentions({ assistantMap }: { assistantMap: TAssistantsMap }) {
|
||||
const { data: presets } = useGetPresetsQuery();
|
||||
const { data: modelsConfig } = useGetModelsQuery();
|
||||
const { data: startupConfig } = useGetStartupConfig();
|
||||
|
|
@ -21,30 +48,43 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
|||
const { data: endpoints = [] } = useGetEndpointsQuery({
|
||||
select: mapEndpoints,
|
||||
});
|
||||
const { data: assistants = [] } = useListAssistantsQuery(undefined, {
|
||||
select: (res) =>
|
||||
res.data
|
||||
.map(({ id, name, description }) => ({
|
||||
type: 'assistant',
|
||||
label: name ?? '',
|
||||
value: id,
|
||||
description: description ?? '',
|
||||
icon: EndpointIcon({
|
||||
conversation: { assistant_id: id, endpoint: EModelEndpoint.assistants },
|
||||
containerClassName: 'shadow-stroke overflow-hidden rounded-full',
|
||||
endpointsConfig: endpointsConfig,
|
||||
context: 'menu-item',
|
||||
const listMap = useAssistantListMap((res) =>
|
||||
res.data.map(({ id, name, description }) => ({
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
})),
|
||||
);
|
||||
const assistantListMap = useMemo(
|
||||
() => ({
|
||||
[EModelEndpoint.assistants]: listMap[EModelEndpoint.assistants]
|
||||
?.map(
|
||||
assistantMapFn({
|
||||
endpoint: EModelEndpoint.assistants,
|
||||
assistantMap,
|
||||
size: 20,
|
||||
endpointsConfig,
|
||||
}),
|
||||
}))
|
||||
.filter(Boolean),
|
||||
});
|
||||
)
|
||||
?.filter(Boolean),
|
||||
[EModelEndpoint.azureAssistants]: listMap[EModelEndpoint.azureAssistants]
|
||||
?.map(
|
||||
assistantMapFn({
|
||||
endpoint: EModelEndpoint.azureAssistants,
|
||||
assistantMap,
|
||||
endpointsConfig,
|
||||
}),
|
||||
)
|
||||
?.filter(Boolean),
|
||||
}),
|
||||
[listMap, assistantMap, endpointsConfig],
|
||||
);
|
||||
|
||||
const modelSpecs = useMemo(() => startupConfig?.modelSpecs?.list ?? [], [startupConfig]);
|
||||
const interfaceConfig = useMemo(
|
||||
() => startupConfig?.interface ?? defaultInterface,
|
||||
[startupConfig],
|
||||
);
|
||||
|
||||
const { onSelectMention } = useSelectMention({
|
||||
modelSpecs,
|
||||
endpointsConfig,
|
||||
|
|
@ -52,7 +92,7 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
|||
assistantMap,
|
||||
});
|
||||
|
||||
const options = useMemo(() => {
|
||||
const options: MentionOption[] = useMemo(() => {
|
||||
const mentions = [
|
||||
...(modelSpecs?.length > 0 ? modelSpecs : []).map((modelSpec) => ({
|
||||
value: modelSpec.name,
|
||||
|
|
@ -67,12 +107,12 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
|||
context: 'menu-item',
|
||||
size: 20,
|
||||
}),
|
||||
type: 'modelSpec',
|
||||
type: 'modelSpec' as const,
|
||||
})),
|
||||
...(interfaceConfig.endpointsMenu ? endpoints : []).map((endpoint) => ({
|
||||
value: endpoint,
|
||||
label: alternateName[endpoint] ?? endpoint ?? '',
|
||||
type: 'endpoint',
|
||||
type: 'endpoint' as const,
|
||||
icon: EndpointIcon({
|
||||
conversation: { endpoint },
|
||||
endpointsConfig,
|
||||
|
|
@ -80,7 +120,12 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
|||
size: 20,
|
||||
}),
|
||||
})),
|
||||
...(endpointsConfig?.[EModelEndpoint.assistants] ? assistants : []),
|
||||
...(endpointsConfig?.[EModelEndpoint.assistants]
|
||||
? assistantListMap[EModelEndpoint.assistants]
|
||||
: []),
|
||||
...(endpointsConfig?.[EModelEndpoint.azureAssistants]
|
||||
? assistantListMap[EModelEndpoint.azureAssistants]
|
||||
: []),
|
||||
...((interfaceConfig.presets ? presets : [])?.map((preset, index) => ({
|
||||
value: preset.presetId ?? `preset-${index}`,
|
||||
label: preset.title ?? preset.modelLabel ?? preset.chatGptLabel ?? '',
|
||||
|
|
@ -93,7 +138,7 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
|||
assistantMap,
|
||||
size: 20,
|
||||
}),
|
||||
type: 'preset',
|
||||
type: 'preset' as const,
|
||||
})) ?? []),
|
||||
];
|
||||
|
||||
|
|
@ -102,17 +147,17 @@ export default function useMentions({ assistantMap }: { assistantMap: Record<str
|
|||
presets,
|
||||
endpoints,
|
||||
modelSpecs,
|
||||
assistants,
|
||||
assistantMap,
|
||||
endpointsConfig,
|
||||
assistantListMap,
|
||||
interfaceConfig.presets,
|
||||
interfaceConfig.endpointsMenu,
|
||||
]);
|
||||
|
||||
return {
|
||||
options,
|
||||
assistants,
|
||||
modelsConfig,
|
||||
onSelectMention,
|
||||
assistantListMap,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
import { useCallback } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import type {
|
||||
TPreset,
|
||||
TModelSpec,
|
||||
TConversation,
|
||||
TAssistantsMap,
|
||||
TEndpointsConfig,
|
||||
TPreset,
|
||||
Assistant,
|
||||
} from 'librechat-data-provider';
|
||||
import type { MentionOption } from '~/common';
|
||||
import { getConvoSwitchLogic, getModelSpecIconURL, removeUnavailableTools } from '~/utils';
|
||||
|
|
@ -23,7 +23,7 @@ export default function useSelectMention({
|
|||
presets?: TPreset[];
|
||||
modelSpecs: TModelSpec[];
|
||||
endpointsConfig: TEndpointsConfig;
|
||||
assistantMap: Record<string, Assistant>;
|
||||
assistantMap: TAssistantsMap;
|
||||
}) {
|
||||
const { conversation } = useChatContext();
|
||||
const { newConversation } = useNewConvo();
|
||||
|
|
@ -194,10 +194,10 @@ export default function useSelectMention({
|
|||
onSelectEndpoint(key, { model: option.label });
|
||||
} else if (option.type === 'endpoint') {
|
||||
onSelectEndpoint(key);
|
||||
} else if (option.type === 'assistant') {
|
||||
onSelectEndpoint(EModelEndpoint.assistants, {
|
||||
} else if (isAssistantsEndpoint(option.type)) {
|
||||
onSelectEndpoint(option.type, {
|
||||
assistant_id: key,
|
||||
model: assistantMap?.[key]?.model ?? '',
|
||||
model: assistantMap?.[option.type]?.[key]?.model ?? '',
|
||||
});
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import debounce from 'lodash/debounce';
|
||||
import { useEffect, useRef, useCallback } from 'react';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import { useRecoilValue, useSetRecoilState } from 'recoil';
|
||||
import type { TEndpointOption } from 'librechat-data-provider';
|
||||
import type { KeyboardEvent } from 'react';
|
||||
|
|
@ -45,10 +45,11 @@ export default function useTextarea({
|
|||
const { conversationId, jailbreak, endpoint = '', assistant_id } = conversation || {};
|
||||
const isNotAppendable =
|
||||
((latestMessage?.unfinished && !isSubmitting) || latestMessage?.error) &&
|
||||
endpoint !== EModelEndpoint.assistants;
|
||||
!isAssistantsEndpoint(endpoint);
|
||||
// && (conversationId?.length ?? 0) > 6; // also ensures that we don't show the wrong placeholder
|
||||
|
||||
const assistant = endpoint === EModelEndpoint.assistants && assistantMap?.[assistant_id ?? ''];
|
||||
const assistant =
|
||||
isAssistantsEndpoint(endpoint) && assistantMap?.[endpoint ?? '']?.[assistant_id ?? ''];
|
||||
const assistantName = (assistant && assistant?.name) || '';
|
||||
|
||||
// auto focus to input, when enter a conversation.
|
||||
|
|
@ -86,9 +87,11 @@ export default function useTextarea({
|
|||
if (disabled) {
|
||||
return localize('com_endpoint_config_placeholder');
|
||||
}
|
||||
const currentEndpoint = conversation?.endpoint ?? '';
|
||||
const currentAssistantId = conversation?.assistant_id ?? '';
|
||||
if (
|
||||
conversation?.endpoint === EModelEndpoint.assistants &&
|
||||
(!conversation?.assistant_id || !assistantMap?.[conversation?.assistant_id ?? ''])
|
||||
isAssistantsEndpoint(currentEndpoint) &&
|
||||
(!currentAssistantId || !assistantMap?.[currentEndpoint]?.[currentAssistantId ?? ''])
|
||||
) {
|
||||
return localize('com_endpoint_assistant_placeholder');
|
||||
}
|
||||
|
|
@ -97,10 +100,9 @@ export default function useTextarea({
|
|||
return localize('com_endpoint_message_not_appendable');
|
||||
}
|
||||
|
||||
const sender =
|
||||
conversation?.endpoint === EModelEndpoint.assistants
|
||||
? getAssistantName({ name: assistantName, localize })
|
||||
: getSender(conversation as TEndpointOption);
|
||||
const sender = isAssistantsEndpoint(currentEndpoint)
|
||||
? getAssistantName({ name: assistantName, localize })
|
||||
: getSender(conversation as TEndpointOption);
|
||||
|
||||
return `${localize('com_endpoint_message')} ${sender ? sender : 'ChatGPT'}…`;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { useEffect, useRef, useCallback } from 'react';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import type { TMessageProps } from '~/common';
|
||||
import { useChatContext, useAssistantsMapContext } from '~/Providers';
|
||||
import useCopyToClipboard from './useCopyToClipboard';
|
||||
|
|
@ -55,7 +55,8 @@ export default function useMessageHelpers(props: TMessageProps) {
|
|||
}, [isSubmitting, setAbortScroll]);
|
||||
|
||||
const assistant =
|
||||
conversation?.endpoint === EModelEndpoint.assistants && assistantMap?.[message?.model ?? ''];
|
||||
isAssistantsEndpoint(conversation?.endpoint) &&
|
||||
assistantMap?.[conversation?.endpoint ?? '']?.[message?.model ?? ''];
|
||||
|
||||
const regenerateMessage = () => {
|
||||
if ((isSubmitting && isCreatedByUser) || !message) {
|
||||
|
|
|
|||
|
|
@ -1,35 +1,44 @@
|
|||
import { useState, useEffect } from 'react';
|
||||
import { useState, useEffect, useMemo, useCallback } from 'react';
|
||||
|
||||
export default function useProgress(initialProgress = 0.01, increment = 0.007, fileSize?: number) {
|
||||
const calculateIncrement = (size?: number) => {
|
||||
const baseRate = 0.05;
|
||||
const minRate = 0.002;
|
||||
const sizeMB = size ? size / (1024 * 1024) : 0;
|
||||
const calculateIncrement = useCallback(
|
||||
(size?: number) => {
|
||||
const baseRate = 0.05;
|
||||
const minRate = 0.002;
|
||||
const sizeMB = size ? size / (1024 * 1024) : 0;
|
||||
|
||||
if (!size) {
|
||||
return increment;
|
||||
}
|
||||
if (!size) {
|
||||
return increment;
|
||||
}
|
||||
|
||||
if (sizeMB <= 1) {
|
||||
return baseRate * 2;
|
||||
} else {
|
||||
return Math.max(baseRate / Math.sqrt(sizeMB), minRate);
|
||||
}
|
||||
};
|
||||
if (sizeMB <= 1) {
|
||||
return baseRate * 2;
|
||||
} else {
|
||||
return Math.max(baseRate / Math.sqrt(sizeMB), minRate);
|
||||
}
|
||||
},
|
||||
[increment],
|
||||
);
|
||||
|
||||
const incrementValue = calculateIncrement(fileSize);
|
||||
const incrementValue = useMemo(
|
||||
() => calculateIncrement(fileSize),
|
||||
[fileSize, calculateIncrement],
|
||||
);
|
||||
const [progress, setProgress] = useState(initialProgress);
|
||||
|
||||
const getDynamicIncrement = (currentProgress: number) => {
|
||||
if (!fileSize) {
|
||||
return incrementValue;
|
||||
}
|
||||
if (currentProgress < 0.7) {
|
||||
return incrementValue;
|
||||
} else {
|
||||
return Math.max(0.0005, incrementValue * (1 - currentProgress));
|
||||
}
|
||||
};
|
||||
const getDynamicIncrement = useCallback(
|
||||
(currentProgress: number) => {
|
||||
if (!fileSize) {
|
||||
return incrementValue;
|
||||
}
|
||||
if (currentProgress < 0.7) {
|
||||
return incrementValue;
|
||||
} else {
|
||||
return Math.max(0.0005, incrementValue * (1 - currentProgress));
|
||||
}
|
||||
},
|
||||
[incrementValue, fileSize],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
let timeout: ReturnType<typeof setTimeout>;
|
||||
|
|
@ -58,7 +67,7 @@ export default function useProgress(initialProgress = 0.01, increment = 0.007, f
|
|||
clearInterval(timer);
|
||||
clearTimeout(timeout);
|
||||
};
|
||||
}, [progress, initialProgress, incrementValue, fileSize]);
|
||||
}, [progress, initialProgress, incrementValue, fileSize, getDynamicIncrement]);
|
||||
|
||||
return progress;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import {
|
|||
ArrowRightToLine,
|
||||
// Settings2,
|
||||
} from 'lucide-react';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import type { TConfig, TInterfaceConfig } from 'librechat-data-provider';
|
||||
import type { NavLink } from '~/common';
|
||||
import PanelSwitch from '~/components/SidePanel/Builder/PanelSwitch';
|
||||
|
|
@ -26,7 +26,7 @@ export default function useSideNavLinks({
|
|||
}) {
|
||||
const Links = useMemo(() => {
|
||||
const links: NavLink[] = [];
|
||||
// if (endpoint !== EModelEndpoint.assistants) {
|
||||
// if (!isAssistantsEndpoint(endpoint)) {
|
||||
// links.push({
|
||||
// title: 'com_sidepanel_parameters',
|
||||
// label: '',
|
||||
|
|
@ -36,7 +36,7 @@ export default function useSideNavLinks({
|
|||
// });
|
||||
// }
|
||||
if (
|
||||
endpoint === EModelEndpoint.assistants &&
|
||||
isAssistantsEndpoint(endpoint) &&
|
||||
assistants &&
|
||||
assistants.disableBuilder !== true &&
|
||||
keyProvided &&
|
||||
|
|
|
|||
|
|
@ -12,10 +12,10 @@ import {
|
|||
createPayload,
|
||||
tPresetSchema,
|
||||
tMessageSchema,
|
||||
EModelEndpoint,
|
||||
LocalStorageKeys,
|
||||
tConvoUpdateSchema,
|
||||
removeNullishValues,
|
||||
isAssistantsEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import { useGetUserBalance, useGetStartupConfig } from 'librechat-data-provider/react-query';
|
||||
import type {
|
||||
|
|
@ -441,7 +441,7 @@ export default function useSSE(submission: TSubmission | null, index = 0) {
|
|||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
abortKey: _endpoint === EModelEndpoint.assistants ? runAbortKey : conversationId,
|
||||
abortKey: isAssistantsEndpoint(_endpoint) ? runAbortKey : conversationId,
|
||||
endpoint,
|
||||
}),
|
||||
});
|
||||
|
|
@ -513,7 +513,7 @@ export default function useSSE(submission: TSubmission | null, index = 0) {
|
|||
|
||||
const payloadData = createPayload(submission);
|
||||
let { payload } = payloadData;
|
||||
if (payload.endpoint === EModelEndpoint.assistants) {
|
||||
if (isAssistantsEndpoint(payload.endpoint)) {
|
||||
payload = removeNullishValues(payload);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -18,10 +18,8 @@ export { default as useNewConvo } from './useNewConvo';
|
|||
export { default as useLocalize } from './useLocalize';
|
||||
export { default as useMediaQuery } from './useMediaQuery';
|
||||
export { default as useChatHelpers } from './useChatHelpers';
|
||||
export { default as useGenerations } from './useGenerations';
|
||||
export { default as useScrollToRef } from './useScrollToRef';
|
||||
export { default as useLocalStorage } from './useLocalStorage';
|
||||
export { default as useDelayedRender } from './useDelayedRender';
|
||||
export { default as useOnClickOutside } from './useOnClickOutside';
|
||||
export { default as useOriginNavigate } from './useOriginNavigate';
|
||||
export { default as useGenerationsByLatest } from './useGenerationsByLatest';
|
||||
|
|
|
|||
|
|
@ -3,10 +3,10 @@ import { useCallback, useState } from 'react';
|
|||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import {
|
||||
Constants,
|
||||
EModelEndpoint,
|
||||
QueryKeys,
|
||||
parseCompactConvo,
|
||||
ContentTypes,
|
||||
parseCompactConvo,
|
||||
isAssistantsEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import { useRecoilState, useResetRecoilState, useSetRecoilState } from 'recoil';
|
||||
import { useGetMessagesByConvoId } from 'librechat-data-provider/react-query';
|
||||
|
|
@ -215,7 +215,7 @@ export default function useChatHelpers(index = 0, paramId: string | undefined) {
|
|||
error: false,
|
||||
};
|
||||
|
||||
if (endpoint === EModelEndpoint.assistants) {
|
||||
if (isAssistantsEndpoint(endpoint)) {
|
||||
initialResponse.model = conversation?.assistant_id ?? '';
|
||||
initialResponse.text = '';
|
||||
initialResponse.content = [
|
||||
|
|
|
|||
|
|
@ -1,68 +0,0 @@
|
|||
import type { TMessage } from 'librechat-data-provider';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import store from '~/store';
|
||||
|
||||
type TUseGenerations = {
|
||||
endpoint?: string;
|
||||
message: TMessage;
|
||||
isSubmitting: boolean;
|
||||
isEditing?: boolean;
|
||||
latestMessage?: TMessage | null;
|
||||
};
|
||||
|
||||
export default function useGenerations({
|
||||
endpoint,
|
||||
message,
|
||||
isSubmitting,
|
||||
isEditing = false,
|
||||
latestMessage: _latestMessage,
|
||||
}: TUseGenerations) {
|
||||
const latestMessage = useRecoilValue(store.latestMessage) ?? _latestMessage;
|
||||
|
||||
const { error, messageId, searchResult, finish_reason, isCreatedByUser } = message ?? {};
|
||||
const isEditableEndpoint = !![
|
||||
EModelEndpoint.openAI,
|
||||
EModelEndpoint.google,
|
||||
EModelEndpoint.assistants,
|
||||
EModelEndpoint.anthropic,
|
||||
EModelEndpoint.gptPlugins,
|
||||
EModelEndpoint.azureOpenAI,
|
||||
].find((e) => e === endpoint);
|
||||
|
||||
const continueSupported =
|
||||
latestMessage?.messageId === messageId &&
|
||||
finish_reason &&
|
||||
finish_reason !== 'stop' &&
|
||||
!isEditing &&
|
||||
!searchResult &&
|
||||
isEditableEndpoint;
|
||||
|
||||
const branchingSupported =
|
||||
// 5/21/23: Bing is allowing editing and Message regenerating
|
||||
!![
|
||||
EModelEndpoint.azureOpenAI,
|
||||
EModelEndpoint.openAI,
|
||||
EModelEndpoint.chatGPTBrowser,
|
||||
EModelEndpoint.google,
|
||||
EModelEndpoint.bingAI,
|
||||
EModelEndpoint.gptPlugins,
|
||||
EModelEndpoint.anthropic,
|
||||
].find((e) => e === endpoint);
|
||||
|
||||
const regenerateEnabled =
|
||||
!isCreatedByUser && !searchResult && !isEditing && !isSubmitting && branchingSupported;
|
||||
|
||||
const hideEditButton =
|
||||
isSubmitting ||
|
||||
error ||
|
||||
searchResult ||
|
||||
!branchingSupported ||
|
||||
(!isEditableEndpoint && !isCreatedByUser);
|
||||
|
||||
return {
|
||||
continueSupported,
|
||||
regenerateEnabled,
|
||||
hideEditButton,
|
||||
};
|
||||
}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import type { TMessage } from 'librechat-data-provider';
|
||||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { EModelEndpoint, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
|
||||
type TUseGenerations = {
|
||||
endpoint?: string;
|
||||
|
|
@ -21,7 +21,6 @@ export default function useGenerationsByLatest({
|
|||
EModelEndpoint.openAI,
|
||||
EModelEndpoint.custom,
|
||||
EModelEndpoint.google,
|
||||
EModelEndpoint.assistants,
|
||||
EModelEndpoint.anthropic,
|
||||
EModelEndpoint.gptPlugins,
|
||||
EModelEndpoint.azureOpenAI,
|
||||
|
|
@ -58,12 +57,13 @@ export default function useGenerationsByLatest({
|
|||
!branchingSupported ||
|
||||
(!isEditableEndpoint && !isCreatedByUser);
|
||||
|
||||
const forkingSupported = endpoint !== EModelEndpoint.assistants && !searchResult;
|
||||
const forkingSupported = !isAssistantsEndpoint(endpoint) && !searchResult;
|
||||
|
||||
return {
|
||||
forkingSupported,
|
||||
continueSupported,
|
||||
regenerateEnabled,
|
||||
isEditableEndpoint,
|
||||
hideEditButton,
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,12 +4,8 @@ import {
|
|||
useGetStartupConfig,
|
||||
useGetEndpointsQuery,
|
||||
} from 'librechat-data-provider/react-query';
|
||||
import {
|
||||
FileSources,
|
||||
EModelEndpoint,
|
||||
LocalStorageKeys,
|
||||
defaultOrderQuery,
|
||||
} from 'librechat-data-provider';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { FileSources, LocalStorageKeys, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import {
|
||||
useRecoilState,
|
||||
useRecoilValue,
|
||||
|
|
@ -24,6 +20,7 @@ import type {
|
|||
TConversation,
|
||||
TEndpointsConfig,
|
||||
} from 'librechat-data-provider';
|
||||
import type { AssistantListItem } from '~/common';
|
||||
import {
|
||||
getEndpointField,
|
||||
buildDefaultConvo,
|
||||
|
|
@ -32,13 +29,14 @@ import {
|
|||
getModelSpecIconURL,
|
||||
updateLastSelectedModel,
|
||||
} from '~/utils';
|
||||
import { useDeleteFilesMutation, useListAssistantsQuery } from '~/data-provider';
|
||||
import useOriginNavigate from './useOriginNavigate';
|
||||
import useAssistantListMap from './Assistants/useAssistantListMap';
|
||||
import { useDeleteFilesMutation } from '~/data-provider';
|
||||
|
||||
import { mainTextareaId } from '~/common';
|
||||
import store from '~/store';
|
||||
|
||||
const useNewConvo = (index = 0) => {
|
||||
const navigate = useOriginNavigate();
|
||||
const navigate = useNavigate();
|
||||
const { data: startupConfig } = useGetStartupConfig();
|
||||
const defaultPreset = useRecoilValue(store.defaultPreset);
|
||||
const { setConversation } = store.useCreateConversationAtom(index);
|
||||
|
|
@ -48,11 +46,7 @@ const useNewConvo = (index = 0) => {
|
|||
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
|
||||
const modelsQuery = useGetModelsQuery();
|
||||
const timeoutIdRef = useRef<NodeJS.Timeout>();
|
||||
|
||||
const { data: assistants = [] } = useListAssistantsQuery(defaultOrderQuery, {
|
||||
select: (res) =>
|
||||
res.data.map(({ id, name, metadata, model }) => ({ id, name, metadata, model })),
|
||||
});
|
||||
const assistantsListMap = useAssistantListMap();
|
||||
|
||||
const { mutateAsync } = useDeleteFilesMutation({
|
||||
onSuccess: () => {
|
||||
|
|
@ -100,12 +94,21 @@ const useNewConvo = (index = 0) => {
|
|||
conversation.endpointType = undefined;
|
||||
}
|
||||
|
||||
const isAssistantEndpoint = defaultEndpoint === EModelEndpoint.assistants;
|
||||
const isAssistantEndpoint = isAssistantsEndpoint(defaultEndpoint);
|
||||
const assistants: AssistantListItem[] = assistantsListMap[defaultEndpoint] ?? [];
|
||||
|
||||
if (
|
||||
conversation.assistant_id &&
|
||||
!assistantsListMap[defaultEndpoint]?.[conversation.assistant_id]
|
||||
) {
|
||||
conversation.assistant_id = undefined;
|
||||
}
|
||||
|
||||
if (!conversation.assistant_id && isAssistantEndpoint) {
|
||||
conversation.assistant_id =
|
||||
localStorage.getItem(`${LocalStorageKeys.ASST_ID_PREFIX}${index}`) ??
|
||||
assistants[0]?.id;
|
||||
localStorage.getItem(
|
||||
`${LocalStorageKeys.ASST_ID_PREFIX}${index}${defaultEndpoint}`,
|
||||
) ?? assistants[0]?.id;
|
||||
}
|
||||
|
||||
if (
|
||||
|
|
@ -116,7 +119,7 @@ const useNewConvo = (index = 0) => {
|
|||
const assistant = assistants.find((asst) => asst.id === conversation.assistant_id);
|
||||
conversation.model = assistant?.model;
|
||||
updateLastSelectedModel({
|
||||
endpoint: EModelEndpoint.assistants,
|
||||
endpoint: defaultEndpoint,
|
||||
model: conversation.model,
|
||||
});
|
||||
}
|
||||
|
|
@ -145,7 +148,7 @@ const useNewConvo = (index = 0) => {
|
|||
if (appTitle) {
|
||||
document.title = appTitle;
|
||||
}
|
||||
navigate('new');
|
||||
navigate('/c/new');
|
||||
}
|
||||
|
||||
clearTimeout(timeoutIdRef.current);
|
||||
|
|
@ -156,7 +159,7 @@ const useNewConvo = (index = 0) => {
|
|||
}
|
||||
}, 150);
|
||||
},
|
||||
[endpointsConfig, defaultPreset, assistants, modelsQuery.data],
|
||||
[endpointsConfig, defaultPreset, assistantsListMap, modelsQuery.data],
|
||||
);
|
||||
|
||||
const newConversation = useCallback(
|
||||
|
|
|
|||
|
|
@ -1,18 +0,0 @@
|
|||
import { useNavigate, useLocation } from 'react-router-dom';
|
||||
|
||||
const useOriginNavigate = () => {
|
||||
const _navigate = useNavigate();
|
||||
const location = useLocation();
|
||||
|
||||
const navigate = (url?: string | null, opts = {}) => {
|
||||
if (!url) {
|
||||
return;
|
||||
}
|
||||
const path = location.pathname.match(/^\/[^/]+\//);
|
||||
_navigate(`${path ? path[0] : '/c/'}${url}`, opts);
|
||||
};
|
||||
|
||||
return navigate;
|
||||
};
|
||||
|
||||
export default useOriginNavigate;
|
||||
Loading…
Add table
Add a link
Reference in a new issue