LibreChat/client/src/utils/endpoints.ts
Danny Avila 1a452121fa
🤖 feat: OpenAI Assistants v2 (initial support) (#2781)
* 🤖 Assistants V2 Support: Part 1

- Separated Azure Assistants to its own endpoint
- File Search / Vector Store integration is incomplete, but can toggle and use storage from playground
- Code Interpreter resource files can be added but not deleted
- GPT-4o is supported
- Many improvements to the Assistants Endpoint overall

data-provider v2 changes

copy existing route as v1

chore: rename new endpoint to reduce comparison operations and add new azure filesource

api: add azureAssistants part 1

force use of version for assistants/assistantsAzure

chore: switch name back to azureAssistants

refactor type version: string | number

Ensure assistants endpoints have version set

fix: isArchived type issue in ConversationListParams

refactor: update assistants mutations/queries with endpoint/version definitions, update Assistants Map structure

chore:  FilePreview component ExtendedFile type assertion

feat: isAssistantsEndpoint helper

chore: remove unused useGenerations

chore(buildTree): type issue

chore(Advanced): type issue (unused component, maybe in future)

first pass for multi-assistant endpoint rewrite

fix(listAssistants): pass params correctly

feat: list separate assistants by endpoint

fix(useTextarea): access assistantMap correctly

fix: assistant endpoint switching, resetting ID

fix: broken during rewrite, selecting assistant mention

fix: set/invalidate assistants endpoint query data correctly

feat: Fix issue with assistant ID not being reset correctly

getOpenAIClient helper function

feat: add toast for assistant deletion

fix: assistants delete right after create issue for azure

fix: assistant patching

refactor: actions to use getOpenAIClient

refactor: consolidate logic into helpers file

fix: issue where conversation data was not initially available

v1 chat support

refactor(spendTokens): only early return if completionTokens isNaN

fix(OpenAIClient): ensure spendTokens has all necessary params

refactor: route/controller logic

fix(assistants/initializeClient): use defaultHeaders field

fix: sanitize default operation id

chore: bump openai package

first pass v2 action service

feat: retroactive domain parsing for actions added via v1

feat: delete db records of actions/assistants on openai assistant deletion

chore: remove vision tools from v2 assistants

feat: v2 upload and delete assistant vision images

WIP first pass, thread attachments

fix: show assistant vision files (save local/firebase copy)

v2 image continue

fix: annotations

fix: refine annotations

show analyze as error if is no longer submitting before progress reaches 1 and show file_search as retrieval tool

fix: abort run, undefined endpoint issue

refactor: consolidate capabilities logic and anticipate versioning

frontend version 2 changes

fix: query selection and filter

add endpoint to unknown filepath

add file ids to resource, deleting in progress

enable/disable file search

remove version log

* 🤖 Assistants V2 Support: Part 2

🎹 fix: Autocompletion Chrome Bug on Action API Key Input

chore: remove `useOriginNavigate`

chore: set correct OpenAI Storage Source

fix: azure file deletions, instantiate clients by source for deletion

update code interpret files info

feat: deleteResourceFileId

chore: increase poll interval as azure easily rate limits

fix: openai file deletions, TODO: evaluate rejected deletion settled promises to determine which to delete from db records

file source icons

update table file filters

chore: file search info and versioning

fix: retrieval update with necessary tool_resources if specified

fix(useMentions): add optional chaining in case listMap value is undefined

fix: force assistant avatar roundedness

fix: azure assistants, check correct flag

chore: bump data-provider

* fix: merge conflict

* ci: fix backend tests due to new updates

* chore: update .env.example

* meilisearch improvements

* localization updates

* chore: update comparisons

* feat: add additional metadata: endpoint, author ID

* chore: azureAssistants ENDPOINTS exclusion warning
2024-05-19 12:56:55 -04:00

230 lines
6.9 KiB
TypeScript

import {
EModelEndpoint,
defaultEndpoints,
modularEndpoints,
LocalStorageKeys,
isAssistantsEndpoint,
} from 'librechat-data-provider';
import type {
TConfig,
TPreset,
TModelSpec,
TConversation,
TEndpointsConfig,
} from 'librechat-data-provider';
import type { LocalizeFunction } from '~/common';
export const getAssistantName = ({
name,
localize,
}: {
name?: string;
localize: LocalizeFunction;
}) => {
if (name && name.length > 0) {
return name;
} else {
return localize('com_ui_assistant');
}
};
export const getEndpointsFilter = (endpointsConfig: TEndpointsConfig) => {
const filter: Record<string, boolean> = {};
if (!endpointsConfig) {
return filter;
}
for (const key of Object.keys(endpointsConfig)) {
filter[key] = !!endpointsConfig[key];
}
return filter;
};
export const getAvailableEndpoints = (
filter: Record<string, boolean>,
endpointsConfig: TEndpointsConfig,
) => {
const defaultSet = new Set(defaultEndpoints);
const availableEndpoints: EModelEndpoint[] = [];
for (const endpoint in endpointsConfig) {
// Check if endpoint is in the filter or its type is in defaultEndpoints
if (
filter[endpoint] ||
(endpointsConfig[endpoint]?.type &&
defaultSet.has(endpointsConfig[endpoint]?.type as EModelEndpoint))
) {
availableEndpoints.push(endpoint as EModelEndpoint);
}
}
return availableEndpoints;
};
/** Get the specified field from the endpoint config */
export function getEndpointField<K extends keyof TConfig>(
endpointsConfig: TEndpointsConfig | undefined,
endpoint: EModelEndpoint | string | null | undefined,
property: K,
): TConfig[K] | undefined {
if (!endpointsConfig || endpoint === null || endpoint === undefined) {
return undefined;
}
const config = endpointsConfig[endpoint];
if (!config) {
return undefined;
}
return config[property];
}
export function mapEndpoints(endpointsConfig: TEndpointsConfig) {
const filter = getEndpointsFilter(endpointsConfig);
return getAvailableEndpoints(filter, endpointsConfig).sort(
(a, b) => (endpointsConfig?.[a]?.order ?? 0) - (endpointsConfig?.[b]?.order ?? 0),
);
}
/**
* Ensures the last selected model stays up to date, as conversation may
* update without updating last convo setup when same endpoint */
export function updateLastSelectedModel({
endpoint,
model,
}: {
endpoint: string;
model: string | undefined;
}) {
if (!model) {
return;
}
const lastConversationSetup = JSON.parse(
localStorage.getItem(LocalStorageKeys.LAST_CONVO_SETUP) || '{}',
);
if (lastConversationSetup.endpoint === endpoint) {
lastConversationSetup.model = model;
localStorage.setItem(LocalStorageKeys.LAST_CONVO_SETUP, JSON.stringify(lastConversationSetup));
}
const lastSelectedModels = JSON.parse(localStorage.getItem(LocalStorageKeys.LAST_MODEL) || '{}');
lastSelectedModels[endpoint] = model;
localStorage.setItem(LocalStorageKeys.LAST_MODEL, JSON.stringify(lastSelectedModels));
}
interface ConversationInitParams {
conversation: TConversation | null;
newEndpoint: EModelEndpoint | string;
endpointsConfig: TEndpointsConfig;
modularChat?: boolean;
}
interface InitiatedTemplateResult {
template: Partial<TPreset>;
shouldSwitch: boolean;
isExistingConversation: boolean;
isCurrentModular: boolean;
isNewModular: boolean;
newEndpointType: EModelEndpoint | undefined;
}
/** Get the conditional logic for switching conversations */
export function getConvoSwitchLogic(params: ConversationInitParams): InitiatedTemplateResult {
const { conversation, newEndpoint, endpointsConfig, modularChat } = params;
const currentEndpoint = conversation?.endpoint;
const template: Partial<TPreset> = {
...conversation,
endpoint: newEndpoint,
conversationId: 'new',
};
const isAssistantSwitch =
isAssistantsEndpoint(newEndpoint) &&
isAssistantsEndpoint(currentEndpoint) &&
currentEndpoint === newEndpoint;
const conversationId = conversation?.conversationId;
const isExistingConversation = !!(conversationId && conversationId !== 'new');
const currentEndpointType =
getEndpointField(endpointsConfig, currentEndpoint, 'type') ?? currentEndpoint;
const newEndpointType =
getEndpointField(endpointsConfig, newEndpoint, 'type') ??
(newEndpoint as EModelEndpoint | undefined);
const hasEndpoint = modularEndpoints.has(currentEndpoint ?? '');
const hasCurrentEndpointType = modularEndpoints.has(currentEndpointType ?? '');
const isCurrentModular = hasEndpoint || hasCurrentEndpointType || isAssistantSwitch;
const hasNewEndpoint = modularEndpoints.has(newEndpoint ?? '');
const hasNewEndpointType = modularEndpoints.has(newEndpointType ?? '');
const isNewModular = hasNewEndpoint || hasNewEndpointType || isAssistantSwitch;
const endpointsMatch = currentEndpoint === newEndpoint;
const shouldSwitch = endpointsMatch || modularChat || isAssistantSwitch;
return {
template,
shouldSwitch,
isExistingConversation,
isCurrentModular,
newEndpointType,
isNewModular,
};
}
/** Gets the default spec by order.
*
* First, the admin defined default, then last selected spec, followed by first spec
*/
export function getDefaultModelSpec(modelSpecs?: TModelSpec[]) {
const defaultSpec = modelSpecs?.find((spec) => spec.default);
const lastSelectedSpecName = localStorage.getItem(LocalStorageKeys.LAST_SPEC);
const lastSelectedSpec = modelSpecs?.find((spec) => spec.name === lastSelectedSpecName);
return defaultSpec || lastSelectedSpec || modelSpecs?.[0];
}
/** Gets the default spec iconURL by order or definition.
*
* First, the admin defined default, then last selected spec, followed by first spec
*/
export function getModelSpecIconURL(modelSpec: TModelSpec) {
return modelSpec.iconURL ?? modelSpec.preset.iconURL ?? modelSpec.preset.endpoint ?? '';
}
/** Gets the default frontend-facing endpoint, dependent on iconURL definition.
*
* If the iconURL is defined in the endpoint config, use it, otherwise use the endpoint
*/
export function getIconEndpoint({
endpointsConfig,
iconURL,
endpoint,
}: {
endpointsConfig: TEndpointsConfig | undefined;
iconURL: string | undefined;
endpoint: string | null | undefined;
}) {
return (endpointsConfig?.[iconURL ?? ''] ? iconURL ?? endpoint : endpoint) ?? '';
}
/** Gets the key to use for the default endpoint iconURL, as defined by the custom config */
export function getIconKey({
endpoint,
endpointType: _eType,
endpointsConfig,
endpointIconURL: iconURL,
}: {
endpoint?: string | null;
endpointsConfig?: TEndpointsConfig | undefined;
endpointType?: string | null;
endpointIconURL?: string;
}) {
const endpointType = _eType ?? getEndpointField(endpointsConfig, endpoint, 'type');
const endpointIconURL = iconURL ?? getEndpointField(endpointsConfig, endpoint, 'iconURL');
if (endpointIconURL && EModelEndpoint[endpointIconURL]) {
return endpointIconURL;
}
return endpointType ? 'unknown' : endpoint ?? 'unknown';
}