2024-05-07 13:13:55 -04:00
|
|
|
import { useState, useRef, useEffect } from 'react';
|
|
|
|
|
import { EModelEndpoint } from 'librechat-data-provider';
|
|
|
|
|
import type { SetterOrUpdater } from 'recoil';
|
|
|
|
|
import type { MentionOption } from '~/common';
|
|
|
|
|
import { useAssistantsMapContext } from '~/Providers';
|
|
|
|
|
import useMentions from '~/hooks/Input/useMentions';
|
|
|
|
|
import { useLocalize, useCombobox } from '~/hooks';
|
|
|
|
|
import { removeAtSymbolIfLast } from '~/utils';
|
|
|
|
|
import MentionItem from './MentionItem';
|
|
|
|
|
|
|
|
|
|
export default function Mention({
|
|
|
|
|
setShowMentionPopover,
|
|
|
|
|
textAreaRef,
|
|
|
|
|
}: {
|
|
|
|
|
setShowMentionPopover: SetterOrUpdater<boolean>;
|
|
|
|
|
textAreaRef: React.MutableRefObject<HTMLTextAreaElement | null>;
|
|
|
|
|
}) {
|
|
|
|
|
const localize = useLocalize();
|
|
|
|
|
const assistantMap = useAssistantsMapContext();
|
🤖 feat: OpenAI Assistants v2 (initial support) (#2781)
* 🤖 Assistants V2 Support: Part 1
- Separated Azure Assistants to its own endpoint
- File Search / Vector Store integration is incomplete, but can toggle and use storage from playground
- Code Interpreter resource files can be added but not deleted
- GPT-4o is supported
- Many improvements to the Assistants Endpoint overall
data-provider v2 changes
copy existing route as v1
chore: rename new endpoint to reduce comparison operations and add new azure filesource
api: add azureAssistants part 1
force use of version for assistants/assistantsAzure
chore: switch name back to azureAssistants
refactor type version: string | number
Ensure assistants endpoints have version set
fix: isArchived type issue in ConversationListParams
refactor: update assistants mutations/queries with endpoint/version definitions, update Assistants Map structure
chore: FilePreview component ExtendedFile type assertion
feat: isAssistantsEndpoint helper
chore: remove unused useGenerations
chore(buildTree): type issue
chore(Advanced): type issue (unused component, maybe in future)
first pass for multi-assistant endpoint rewrite
fix(listAssistants): pass params correctly
feat: list separate assistants by endpoint
fix(useTextarea): access assistantMap correctly
fix: assistant endpoint switching, resetting ID
fix: broken during rewrite, selecting assistant mention
fix: set/invalidate assistants endpoint query data correctly
feat: Fix issue with assistant ID not being reset correctly
getOpenAIClient helper function
feat: add toast for assistant deletion
fix: assistants delete right after create issue for azure
fix: assistant patching
refactor: actions to use getOpenAIClient
refactor: consolidate logic into helpers file
fix: issue where conversation data was not initially available
v1 chat support
refactor(spendTokens): only early return if completionTokens isNaN
fix(OpenAIClient): ensure spendTokens has all necessary params
refactor: route/controller logic
fix(assistants/initializeClient): use defaultHeaders field
fix: sanitize default operation id
chore: bump openai package
first pass v2 action service
feat: retroactive domain parsing for actions added via v1
feat: delete db records of actions/assistants on openai assistant deletion
chore: remove vision tools from v2 assistants
feat: v2 upload and delete assistant vision images
WIP first pass, thread attachments
fix: show assistant vision files (save local/firebase copy)
v2 image continue
fix: annotations
fix: refine annotations
show analyze as error if is no longer submitting before progress reaches 1 and show file_search as retrieval tool
fix: abort run, undefined endpoint issue
refactor: consolidate capabilities logic and anticipate versioning
frontend version 2 changes
fix: query selection and filter
add endpoint to unknown filepath
add file ids to resource, deleting in progress
enable/disable file search
remove version log
* 🤖 Assistants V2 Support: Part 2
🎹 fix: Autocompletion Chrome Bug on Action API Key Input
chore: remove `useOriginNavigate`
chore: set correct OpenAI Storage Source
fix: azure file deletions, instantiate clients by source for deletion
update code interpret files info
feat: deleteResourceFileId
chore: increase poll interval as azure easily rate limits
fix: openai file deletions, TODO: evaluate rejected deletion settled promises to determine which to delete from db records
file source icons
update table file filters
chore: file search info and versioning
fix: retrieval update with necessary tool_resources if specified
fix(useMentions): add optional chaining in case listMap value is undefined
fix: force assistant avatar roundedness
fix: azure assistants, check correct flag
chore: bump data-provider
* fix: merge conflict
* ci: fix backend tests due to new updates
* chore: update .env.example
* meilisearch improvements
* localization updates
* chore: update comparisons
* feat: add additional metadata: endpoint, author ID
* chore: azureAssistants ENDPOINTS exclusion warning
2024-05-19 12:56:55 -04:00
|
|
|
const { options, modelsConfig, assistantListMap, onSelectMention } = useMentions({
|
|
|
|
|
assistantMap,
|
|
|
|
|
});
|
2024-05-07 13:13:55 -04:00
|
|
|
|
|
|
|
|
const [activeIndex, setActiveIndex] = useState(0);
|
|
|
|
|
const timeoutRef = useRef<NodeJS.Timeout | null>(null);
|
|
|
|
|
const inputRef = useRef<HTMLInputElement | null>(null);
|
|
|
|
|
const [inputOptions, setInputOptions] = useState<MentionOption[]>(options);
|
|
|
|
|
|
|
|
|
|
const { open, setOpen, searchValue, setSearchValue, matches } = useCombobox({
|
|
|
|
|
value: '',
|
|
|
|
|
options: inputOptions,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const handleSelect = (mention?: MentionOption) => {
|
|
|
|
|
if (!mention) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const defaultSelect = () => {
|
|
|
|
|
setSearchValue('');
|
|
|
|
|
setOpen(false);
|
|
|
|
|
setShowMentionPopover(false);
|
|
|
|
|
onSelectMention(mention);
|
|
|
|
|
|
|
|
|
|
if (textAreaRef.current) {
|
|
|
|
|
removeAtSymbolIfLast(textAreaRef.current);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if (mention.type === 'endpoint' && mention.value === EModelEndpoint.assistants) {
|
|
|
|
|
setSearchValue('');
|
🤖 feat: OpenAI Assistants v2 (initial support) (#2781)
* 🤖 Assistants V2 Support: Part 1
- Separated Azure Assistants to its own endpoint
- File Search / Vector Store integration is incomplete, but can toggle and use storage from playground
- Code Interpreter resource files can be added but not deleted
- GPT-4o is supported
- Many improvements to the Assistants Endpoint overall
data-provider v2 changes
copy existing route as v1
chore: rename new endpoint to reduce comparison operations and add new azure filesource
api: add azureAssistants part 1
force use of version for assistants/assistantsAzure
chore: switch name back to azureAssistants
refactor type version: string | number
Ensure assistants endpoints have version set
fix: isArchived type issue in ConversationListParams
refactor: update assistants mutations/queries with endpoint/version definitions, update Assistants Map structure
chore: FilePreview component ExtendedFile type assertion
feat: isAssistantsEndpoint helper
chore: remove unused useGenerations
chore(buildTree): type issue
chore(Advanced): type issue (unused component, maybe in future)
first pass for multi-assistant endpoint rewrite
fix(listAssistants): pass params correctly
feat: list separate assistants by endpoint
fix(useTextarea): access assistantMap correctly
fix: assistant endpoint switching, resetting ID
fix: broken during rewrite, selecting assistant mention
fix: set/invalidate assistants endpoint query data correctly
feat: Fix issue with assistant ID not being reset correctly
getOpenAIClient helper function
feat: add toast for assistant deletion
fix: assistants delete right after create issue for azure
fix: assistant patching
refactor: actions to use getOpenAIClient
refactor: consolidate logic into helpers file
fix: issue where conversation data was not initially available
v1 chat support
refactor(spendTokens): only early return if completionTokens isNaN
fix(OpenAIClient): ensure spendTokens has all necessary params
refactor: route/controller logic
fix(assistants/initializeClient): use defaultHeaders field
fix: sanitize default operation id
chore: bump openai package
first pass v2 action service
feat: retroactive domain parsing for actions added via v1
feat: delete db records of actions/assistants on openai assistant deletion
chore: remove vision tools from v2 assistants
feat: v2 upload and delete assistant vision images
WIP first pass, thread attachments
fix: show assistant vision files (save local/firebase copy)
v2 image continue
fix: annotations
fix: refine annotations
show analyze as error if is no longer submitting before progress reaches 1 and show file_search as retrieval tool
fix: abort run, undefined endpoint issue
refactor: consolidate capabilities logic and anticipate versioning
frontend version 2 changes
fix: query selection and filter
add endpoint to unknown filepath
add file ids to resource, deleting in progress
enable/disable file search
remove version log
* 🤖 Assistants V2 Support: Part 2
🎹 fix: Autocompletion Chrome Bug on Action API Key Input
chore: remove `useOriginNavigate`
chore: set correct OpenAI Storage Source
fix: azure file deletions, instantiate clients by source for deletion
update code interpret files info
feat: deleteResourceFileId
chore: increase poll interval as azure easily rate limits
fix: openai file deletions, TODO: evaluate rejected deletion settled promises to determine which to delete from db records
file source icons
update table file filters
chore: file search info and versioning
fix: retrieval update with necessary tool_resources if specified
fix(useMentions): add optional chaining in case listMap value is undefined
fix: force assistant avatar roundedness
fix: azure assistants, check correct flag
chore: bump data-provider
* fix: merge conflict
* ci: fix backend tests due to new updates
* chore: update .env.example
* meilisearch improvements
* localization updates
* chore: update comparisons
* feat: add additional metadata: endpoint, author ID
* chore: azureAssistants ENDPOINTS exclusion warning
2024-05-19 12:56:55 -04:00
|
|
|
setInputOptions(assistantListMap[EModelEndpoint.assistants]);
|
|
|
|
|
setActiveIndex(0);
|
|
|
|
|
inputRef.current?.focus();
|
|
|
|
|
} else if (mention.type === 'endpoint' && mention.value === EModelEndpoint.azureAssistants) {
|
|
|
|
|
setSearchValue('');
|
|
|
|
|
setInputOptions(assistantListMap[EModelEndpoint.azureAssistants]);
|
2024-05-07 13:13:55 -04:00
|
|
|
setActiveIndex(0);
|
|
|
|
|
inputRef.current?.focus();
|
|
|
|
|
} else if (mention.type === 'endpoint') {
|
|
|
|
|
const models = (modelsConfig?.[mention.value ?? ''] ?? []).map((model) => ({
|
|
|
|
|
value: mention.value,
|
|
|
|
|
label: model,
|
|
|
|
|
type: 'model',
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
setActiveIndex(0);
|
|
|
|
|
setSearchValue('');
|
|
|
|
|
setInputOptions(models);
|
|
|
|
|
inputRef.current?.focus();
|
|
|
|
|
} else {
|
|
|
|
|
defaultSelect();
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
|
if (!open) {
|
|
|
|
|
setInputOptions(options);
|
|
|
|
|
setActiveIndex(0);
|
|
|
|
|
}
|
|
|
|
|
}, [open, options]);
|
|
|
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
|
const currentActiveItem = document.getElementById(`mention-item-${activeIndex}`);
|
|
|
|
|
currentActiveItem?.scrollIntoView({ behavior: 'instant', block: 'nearest' });
|
|
|
|
|
}, [activeIndex]);
|
|
|
|
|
|
|
|
|
|
return (
|
|
|
|
|
<div className="absolute bottom-16 z-10 w-full space-y-2">
|
|
|
|
|
<div className="popover border-token-border-light rounded-2xl border bg-white p-2 shadow-lg dark:bg-gray-700">
|
|
|
|
|
<input
|
|
|
|
|
autoFocus
|
|
|
|
|
ref={inputRef}
|
|
|
|
|
placeholder={localize('com_ui_mention')}
|
|
|
|
|
className="mb-1 w-full border-0 bg-white p-2 text-sm focus:outline-none dark:bg-gray-700 dark:text-gray-200"
|
|
|
|
|
autoComplete="off"
|
|
|
|
|
value={searchValue}
|
|
|
|
|
onKeyDown={(e) => {
|
|
|
|
|
if (e.key === 'Escape') {
|
|
|
|
|
setOpen(false);
|
|
|
|
|
setShowMentionPopover(false);
|
|
|
|
|
textAreaRef.current?.focus();
|
|
|
|
|
}
|
|
|
|
|
if (e.key === 'ArrowDown') {
|
|
|
|
|
setActiveIndex((prevIndex) => (prevIndex + 1) % matches.length);
|
|
|
|
|
} else if (e.key === 'ArrowUp') {
|
|
|
|
|
setActiveIndex((prevIndex) => (prevIndex - 1 + matches.length) % matches.length);
|
|
|
|
|
} else if (e.key === 'Enter' || e.key === 'Tab') {
|
2024-05-14 15:19:58 -04:00
|
|
|
const mentionOption = matches[activeIndex] as MentionOption | undefined;
|
2024-05-07 13:13:55 -04:00
|
|
|
if (mentionOption?.type === 'endpoint') {
|
|
|
|
|
e.preventDefault();
|
|
|
|
|
} else if (e.key === 'Enter') {
|
|
|
|
|
e.preventDefault();
|
|
|
|
|
}
|
|
|
|
|
handleSelect(matches[activeIndex] as MentionOption);
|
|
|
|
|
} else if (e.key === 'Backspace' && searchValue === '') {
|
|
|
|
|
setOpen(false);
|
|
|
|
|
setShowMentionPopover(false);
|
|
|
|
|
textAreaRef.current?.focus();
|
|
|
|
|
}
|
|
|
|
|
}}
|
|
|
|
|
onChange={(e) => setSearchValue(e.target.value)}
|
|
|
|
|
onFocus={() => setOpen(true)}
|
|
|
|
|
onBlur={() => {
|
|
|
|
|
timeoutRef.current = setTimeout(() => {
|
|
|
|
|
setOpen(false);
|
|
|
|
|
setShowMentionPopover(false);
|
|
|
|
|
}, 150);
|
|
|
|
|
}}
|
|
|
|
|
/>
|
|
|
|
|
{open && (
|
|
|
|
|
<div className="max-h-40 overflow-y-auto">
|
|
|
|
|
{(matches as MentionOption[]).map((mention, index) => (
|
|
|
|
|
<MentionItem
|
|
|
|
|
index={index}
|
|
|
|
|
key={`${mention.value}-${index}`}
|
|
|
|
|
onClick={() => {
|
|
|
|
|
if (timeoutRef.current) {
|
|
|
|
|
clearTimeout(timeoutRef.current);
|
|
|
|
|
}
|
|
|
|
|
timeoutRef.current = null;
|
|
|
|
|
handleSelect(mention);
|
|
|
|
|
}}
|
|
|
|
|
name={mention.label ?? ''}
|
|
|
|
|
icon={mention.icon}
|
|
|
|
|
description={mention.description}
|
|
|
|
|
isActive={index === activeIndex}
|
|
|
|
|
/>
|
|
|
|
|
))}
|
|
|
|
|
</div>
|
|
|
|
|
)}
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
);
|
|
|
|
|
}
|