🎨 feat: UI Refresh for Enhanced UX (#6346)

*  feat: Add Expand Chat functionality and improve UI components

*  feat: Introduce Chat Badges feature with editing capabilities and UI enhancements

*  feat: re-implement file attachment functionality with new components and improved UI

*  feat: Enhance BadgeRow component with drag-and-drop functionality and add animations for better user experience

*  feat: Add useChatBadges hook and enhance Badge component with animations and toggle functionality

* feat: Improve Add/Delete Badges + style and bug fixes

*  feat: Refactor EditBadges component and optimize useChatBadges hook for improved performance and readability

*  feat: Add type definition for LucideIcon in EditBadges component

* refactor: Clean up BadgeRow component by removing outdated comment and improving code readability

* refactor: Rename app-icon class to badge-icon for consistency and improve badge styling

* feat: Add Center Chat Input toggle and update related components for improved UI/UX

* refactor: Simplify ChatView and MessagesView components for improved readability and performance

* refactor: Improve layout and positioning of scroll button in MessagesView component

* refactor: Adjust scroll button position in MessagesView component for better visibility

* refactor: Remove redundant background class from Badge component for cleaner styling

* feat: disable chat badges

* refactor: adjust positioning of scroll button and popover for improved layout

* refactor: simplify class names in ChatForm and RemoveFile components for cleaner code

* refactor: move Switcher to HeaderOptions from SidePanel

* fix(Landing): duplicate description

* feat: add SplitText component for animated text display and update Landing component to use it

* feat(Chat): add ConversationStarters component and integrate it into ChatView; remove ConvoStarter component

* feat(Chat): enhance Message component layout and styling for improved readability

* feat(ControlCombobox, Select): enhance styling and add animation for improved UI experience

* feat(Chat): update Header and HeaderNewChat components for improved layout and styling

* feat(Chat): add ModelDropdown (now includes both endpoint and model) and refactor Menu components for improved UI

* feat(ModelDropdown): add Agent Select; removed old AgentSwitcher components

* feat(ModelDropdown): add settings button for user key configuration

* fix(ModelDropdown): the model dropdown wasn't opening automatically when opening the endpoint one

* refactor(Chat): remove unused EndpointsMenu and related components to streamline codebase

* feat: enhance greeting message and improve accessibility fro ModelDropdown

* refactor(Endpoints): add new hooks and components for endpoint management

* feat(Endpoint): add support for modelSpecs

* feat(Endpoints): add mobile support

* fix: type issues

* fix(modelSpec): type issue

* fix(EndpointMenuDropdown): double overflow scroller in mobile model list

* fix: search model on mobile

* refactor: Endpoint/Model/modelSpec dropdown

* refactor: reorganize imports in Endpoint components

* refactor: remove unused translation keys from English locale

* BREAKING: moving to ariakit with new CustomMenu

* refactor: remove unnecessary comments

* refactor: remove EndpointItem, ModelDropdownButton, SpecIcon, and SpecItem components

* 🔧 fix: AI Icon bump when regenerating message

* wip: chat UI refactoring, fix issues

* chore: add recent update to useAutoSave

* feat: add access control for agent permissions in useMentions hook

* refactor: streamline ModelSelector by removing unused endpoints logic

* refactor: enhance ModelSelector and context by integrating endpointsConfig and improving type usage

* feat: update ModelSelectorContext to utilize conversation data for initial state

* feat: add selector effects for synced endpoint handling

* feat: add guard clause for conversation endpoint in useSelectorEffects hook

* fix: safely call onSelectMention and add autofocus to mention input

* chore: typing

* refactor: ModelSelector to streamline key dialog handling and improve endpoint rendering

* refactor: extract SettingsButton component for cleaner endpoint item rendering

* wip: first pass, expand set api key

* wip: first pass, expanding set key

* refactor: update EndpointItem styles for improved layout and hover effects

* refactor: adjust padding in EndpointItem for improved layout consistency

* refactor: update preset structure in useSelectMention to include spec as null

* refactor: rename setKeyDialogOpen to onOpenChange for clarity and consistency, bring focus back to button that opened dialog

* feat: add SpecIcon component for dynamic model spec icons in menu, adjust icon styling

* refactor: update getSelectedIcon to accept additional parameters and improve icon rendering logic

* fix: adjust padding in MessageRender for improved layout

* refactor: remove inline style for menu width in CustomMenu component

* refactor: enhance layout and styling in ModelSpecItem component for better responsiveness

* refactor: update getDefaultModelSpec to accept startupConfig and improve model spec retrieval logic

* refactor: improve key management and default values in ModelSelector and related components

* refactor: adjust menu width and improve responsiveness in CustomMenu and EndpointItem components

* refactor: enhance focus styles and responsiveness in EndpointItem component

* refactor: improve layout and spacing in Header and ModelSelector components for better responsiveness

* refactor: adjust button styles for consistency and improved layout in AddMultiConvo and PresetsMenu components

* fix: initial fix of assistant names

* fix: assistants handling

* chore: update version of librechat-data-provider to 0.7.75 and add 'spec' to excludedKeys

* fix: improve endpoint filtering logic based on interface configuration and access rights

* fix: remove unused HeaderOptions import and set spec to null in presets and mentions

* fix: ensure currentExample is always an object when updating examples

* fix: update interfaceConfig checks to ensure modelSelect is considered for rendering components

* fix: update model selection logic to consider interface configuration when prioritizing model specs

* fix: add missing localizations

* fix: remove unused agent and assistant selection translations

* fix: implement debounced state updates for selected values in useSelectorEffects

* style: minor style changes related to the ModelSelector

* fix: adjust maximum height for popover and set fixed height for model item

* fix: update placeholders for model and endpoint search inputs

* fix: refactor MessageRender and ContentRender components to better match each other

* fix: remove convo fallback for iconURL in MessageRender and ContentRender components

* fix: update handling of spec, iconURL, and modelLabel in conversation presets, to allow better interchangeability

* fix: replace chatGptLabel with modelLabel in OpenAI settings configuration (fully deprecate chatGptLabel)

* fix: remove console log for assistantNames in useEndpoints hook

* refactor: add cleanInput and cleanOutput options to default conversation handling

* chore: update bun.lockb

* fix: set default value for showIconInHeader in getSelectedIcon function

* refactor: enhance error handling in message processing when latest message has existing content blocks

* chore: allow import/no-cycle for messages

* fix: adjust flex properties in BookmarkMenu for better layout

* feat: support both 'prompt' and 'q' as query parameters in useQueryParams hook

* feat: re-enable Badges components

* refactor: disable edit badge component

* chore: rename assistantMap to assistantsMap for consistency

* chore: rename assistantMap to assistantsMap for consistency in Mention component

* feat: set staleTime for various queries to improve data freshness

* feat: add spec field to tQueryParamsSchema for model specification

* feat: enhance useQueryParams to handle model specs

---------

Co-authored-by: Danny Avila <danny@librechat.ai>
This commit is contained in:
Marco Beretta 2025-03-25 23:50:58 +01:00 committed by GitHub
parent c4fea9cd79
commit 7f29f2f676
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
127 changed files with 4507 additions and 2163 deletions

View file

@ -1,3 +1,4 @@
import { excludedKeys } from 'librechat-data-provider';
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
import type {
TEndpointsConfig,
@ -8,26 +9,66 @@ import type {
import { getDefaultEndpoint, buildDefaultConvo } from '~/utils';
import { useGetEndpointsQuery } from '~/data-provider';
type TDefaultConvo = { conversation: Partial<TConversation>; preset?: Partial<TPreset> | null };
type TDefaultConvo = {
conversation: Partial<TConversation>;
preset?: Partial<TPreset> | null;
cleanInput?: boolean;
cleanOutput?: boolean;
};
const exceptions = new Set(['spec', 'iconURL']);
const useDefaultConvo = () => {
const { data: endpointsConfig = {} as TEndpointsConfig } = useGetEndpointsQuery();
const { data: modelsConfig = {} as TModelsConfig } = useGetModelsQuery();
const getDefaultConversation = ({ conversation, preset }: TDefaultConvo) => {
const getDefaultConversation = ({
conversation: _convo,
preset,
cleanInput,
cleanOutput,
}: TDefaultConvo) => {
const endpoint = getDefaultEndpoint({
convoSetup: preset as TPreset,
endpointsConfig,
});
const models = modelsConfig[endpoint] || [];
const models = modelsConfig[endpoint ?? ''] || [];
const conversation = { ..._convo };
if (cleanInput === true) {
for (const key in conversation) {
if (excludedKeys.has(key) && !exceptions.has(key)) {
continue;
}
if (conversation[key] == null) {
continue;
}
conversation[key] = undefined;
}
}
return buildDefaultConvo({
const defaultConvo = buildDefaultConvo({
conversation: conversation as TConversation,
endpoint,
lastConversationSetup: preset as TConversation,
models,
});
if (!cleanOutput) {
return defaultConvo;
}
for (const key in defaultConvo) {
if (excludedKeys.has(key) && !exceptions.has(key)) {
continue;
}
if (defaultConvo[key] == null) {
continue;
}
defaultConvo[key] = undefined;
}
return defaultConvo;
};
return getDefaultConversation;

View file

@ -62,7 +62,6 @@ export default function usePresets() {
}
hasLoaded.current = true;
// dependencies are stable and only needed once
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [presetsQuery.data, user, modelsData]);
const setPresets = useCallback(
@ -182,12 +181,22 @@ export default function usePresets() {
endpointsConfig,
});
newPreset.spec = null;
newPreset.iconURL = newPreset.iconURL ?? null;
newPreset.modelLabel = newPreset.modelLabel ?? null;
const isModular = isCurrentModular && isNewModular && shouldSwitch;
if (isExistingConversation && isModular) {
const currentConvo = getDefaultConversation({
/* target endpointType is necessary to avoid endpoint mixing */
conversation: { ...(conversation ?? {}), endpointType: newEndpointType },
conversation: {
...(conversation ?? {}),
spec: null,
iconURL: null,
modelLabel: null,
endpointType: newEndpointType,
},
preset: { ...newPreset, endpointType: newEndpointType },
cleanInput: true,
});
/* We don't reset the latest message, only when changing settings mid-converstion */

View file

@ -42,7 +42,7 @@ const useSetIndexOptions: TUseSetOptions = (preset = false) => {
const setExample: TSetExample = (i, type, newValue = null) => {
const update = {};
const current = conversation?.examples?.slice() || [];
const currentExample = { ...current[i] } || {};
const currentExample = { ...current[i] };
currentExample[type] = { content: newValue };
current[i] = currentExample;
update['examples'] = current;

View file

@ -0,0 +1,76 @@
import { EModelEndpoint } from 'librechat-data-provider';
import type { IconMapProps, AgentIconMapProps, IconsRecord } from '~/common';
import { Feather } from 'lucide-react';
import {
MinimalPlugin,
GPTIcon,
AnthropicIcon,
AzureMinimalIcon,
GoogleMinimalIcon,
CustomMinimalIcon,
AssistantIcon,
LightningIcon,
BedrockIcon,
Sparkles,
} from '~/components/svg';
import UnknownIcon from './UnknownIcon';
import { cn } from '~/utils';
const AssistantAvatar = ({
className = '',
assistantName = '',
avatar = '',
context,
size,
}: IconMapProps) => {
if (assistantName && avatar) {
return (
<img
src={avatar}
className="bg-token-surface-secondary dark:bg-token-surface-tertiary h-full w-full rounded-full object-cover"
alt={assistantName}
width="80"
height="80"
/>
);
} else if (assistantName) {
return <AssistantIcon className={cn('text-token-secondary', className)} size={size} />;
}
return <Sparkles className={cn(context === 'landing' ? 'icon-2xl' : '', className)} />;
};
const AgentAvatar = ({ className = '', avatar = '', agentName, size }: AgentIconMapProps) => {
if (agentName != null && agentName && avatar) {
return (
<img
src={avatar}
className="bg-token-surface-secondary dark:bg-token-surface-tertiary h-full w-full rounded-full object-cover"
alt={agentName}
width="80"
height="80"
/>
);
}
return <Feather className={cn(agentName === '' ? 'icon-2xl' : '', className)} size={size} />;
};
const Bedrock = ({ className = '' }: IconMapProps) => {
return <BedrockIcon className={cn(className, 'h-full w-full')} />;
};
export const icons: IconsRecord = {
[EModelEndpoint.azureOpenAI]: AzureMinimalIcon,
[EModelEndpoint.openAI]: GPTIcon,
[EModelEndpoint.gptPlugins]: MinimalPlugin,
[EModelEndpoint.anthropic]: AnthropicIcon,
[EModelEndpoint.chatGPTBrowser]: LightningIcon,
[EModelEndpoint.google]: GoogleMinimalIcon,
[EModelEndpoint.custom]: CustomMinimalIcon,
[EModelEndpoint.assistants]: AssistantAvatar,
[EModelEndpoint.azureAssistants]: AssistantAvatar,
[EModelEndpoint.agents]: AgentAvatar,
[EModelEndpoint.bedrock]: Bedrock,
unknown: UnknownIcon,
};

View file

@ -0,0 +1,98 @@
import { memo } from 'react';
import { EModelEndpoint, KnownEndpoints } from 'librechat-data-provider';
import { CustomMinimalIcon } from '~/components/svg';
import { IconContext } from '~/common';
import { cn } from '~/utils';
const knownEndpointAssets = {
[KnownEndpoints.anyscale]: '/assets/anyscale.png',
[KnownEndpoints.apipie]: '/assets/apipie.png',
[KnownEndpoints.cohere]: '/assets/cohere.png',
[KnownEndpoints.deepseek]: '/assets/deepseek.svg',
[KnownEndpoints.fireworks]: '/assets/fireworks.png',
[KnownEndpoints.groq]: '/assets/groq.png',
[KnownEndpoints.huggingface]: '/assets/huggingface.svg',
[KnownEndpoints.mistral]: '/assets/mistral.png',
[KnownEndpoints.mlx]: '/assets/mlx.png',
[KnownEndpoints.ollama]: '/assets/ollama.png',
[KnownEndpoints.openrouter]: '/assets/openrouter.png',
[KnownEndpoints.perplexity]: '/assets/perplexity.png',
[KnownEndpoints.shuttleai]: '/assets/shuttleai.png',
[KnownEndpoints['together.ai']]: '/assets/together.png',
[KnownEndpoints.unify]: '/assets/unify.webp',
[KnownEndpoints.xai]: '/assets/xai.svg',
};
const knownEndpointClasses = {
[KnownEndpoints.cohere]: {
[IconContext.landing]: 'p-2',
},
[KnownEndpoints.xai]: {
[IconContext.landing]: 'p-2',
[IconContext.menuItem]: 'bg-white',
[IconContext.message]: 'bg-white',
[IconContext.nav]: 'bg-white',
},
};
const getKnownClass = ({
currentEndpoint,
context = '',
className,
}: {
currentEndpoint: string;
context?: string;
className: string;
}) => {
if (currentEndpoint === KnownEndpoints.openrouter) {
return className;
}
const match = knownEndpointClasses[currentEndpoint]?.[context] ?? '';
const defaultClass = context === IconContext.landing ? '' : className;
return cn(match, defaultClass);
};
function UnknownIcon({
className = '',
endpoint: _endpoint,
iconURL = '',
context,
}: {
iconURL?: string;
className?: string;
endpoint?: EModelEndpoint | string | null;
context?: 'landing' | 'menu-item' | 'nav' | 'message';
}) {
const endpoint = _endpoint ?? '';
if (!endpoint) {
return <CustomMinimalIcon className={className} />;
}
const currentEndpoint = endpoint.toLowerCase();
if (iconURL) {
return <img className={className} src={iconURL} alt={`${endpoint} Icon`} />;
}
const assetPath: string = knownEndpointAssets[currentEndpoint] ?? '';
if (!assetPath) {
return <CustomMinimalIcon className={className} />;
}
return (
<img
className={getKnownClass({
currentEndpoint,
context: context,
className,
})}
src={assetPath}
alt={`${currentEndpoint} Icon`}
/>
);
}
export default memo(UnknownIcon);

View file

@ -0,0 +1,4 @@
export { default as useKeyDialog } from './useKeyDialog';
export { default as useModelSelection } from './useModels';
export { default as useEndpoints } from './useEndpoints';
export { default as useSelectorEffects } from './useSelectorEffects';

View file

@ -0,0 +1,182 @@
import React, { useMemo, useCallback } from 'react';
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
import {
EModelEndpoint,
PermissionTypes,
Permissions,
alternateName,
} from 'librechat-data-provider';
import type {
Agent,
Assistant,
TEndpointsConfig,
TAgentsMap,
TInterfaceConfig,
TAssistantsMap,
} from 'librechat-data-provider';
import type { ExtendedEndpoint } from '~/common';
import { mapEndpoints, getIconKey, getEndpointField } from '~/utils';
import { useGetEndpointsQuery } from '~/data-provider';
import { useChatContext } from '~/Providers';
import { useHasAccess } from '~/hooks';
import { icons } from './Icons';
export const useEndpoints = ({
agentsMap,
assistantsMap,
endpointsConfig,
interfaceConfig,
}: {
agentsMap?: TAgentsMap;
assistantsMap?: TAssistantsMap;
endpointsConfig: TEndpointsConfig;
interfaceConfig: TInterfaceConfig;
}) => {
const modelsQuery = useGetModelsQuery();
const { conversation } = useChatContext();
const { data: endpoints = [] } = useGetEndpointsQuery({ select: mapEndpoints });
const { endpoint } = conversation ?? {};
const hasAgentAccess = useHasAccess({
permissionType: PermissionTypes.AGENTS,
permission: Permissions.USE,
});
const agents = useMemo(
() =>
Object.values(agentsMap ?? {}).filter(
(agent): agent is Agent & { name: string } =>
agent !== undefined && 'id' in agent && 'name' in agent && agent.name !== null,
),
[agentsMap],
);
const assistants: Assistant[] = useMemo(
() => Object.values(assistantsMap?.[EModelEndpoint.assistants] ?? {}),
[endpoint, assistantsMap],
);
const azureAssistants: Assistant[] = useMemo(
() => Object.values(assistantsMap?.[EModelEndpoint.azureAssistants] ?? {}),
[endpoint, assistantsMap],
);
const filteredEndpoints = useMemo(() => {
if (!interfaceConfig.modelSelect) {
return [];
}
const result: EModelEndpoint[] = [];
for (let i = 0; i < endpoints.length; i++) {
if (endpoints[i] === EModelEndpoint.agents && !hasAgentAccess) {
continue;
}
result.push(endpoints[i]);
}
return result;
}, [endpoints, hasAgentAccess]);
const endpointRequiresUserKey = useCallback(
(ep: string) => {
return !!getEndpointField(endpointsConfig, ep, 'userProvide');
},
[endpointsConfig],
);
const mappedEndpoints: ExtendedEndpoint[] = useMemo(() => {
return filteredEndpoints.map((ep) => {
const endpointType = getEndpointField(endpointsConfig, ep, 'type');
const iconKey = getIconKey({ endpoint: ep, endpointsConfig, endpointType });
const Icon = icons[iconKey];
const endpointIconURL = getEndpointField(endpointsConfig, ep, 'iconURL');
const hasModels =
(ep === EModelEndpoint.agents && agents?.length > 0) ||
(ep === EModelEndpoint.assistants && assistants?.length > 0) ||
(ep !== EModelEndpoint.assistants &&
ep !== EModelEndpoint.agents &&
(modelsQuery.data?.[ep]?.length ?? 0) > 0);
// Base result object with formatted default icon
const result: ExtendedEndpoint = {
value: ep,
label: alternateName[ep] || ep,
hasModels,
icon: Icon
? React.createElement(Icon, {
size: 20,
className: 'text-text-primary shrink-0 icon-md',
iconURL: endpointIconURL,
endpoint: ep,
})
: null,
};
// Handle agents case
if (ep === EModelEndpoint.agents && agents.length > 0) {
result.models = agents.map((agent) => agent.id);
result.agentNames = agents.reduce((acc, agent) => {
acc[agent.id] = agent.name || '';
return acc;
}, {});
result.modelIcons = agents.reduce((acc, agent) => {
acc[agent.id] = agent?.avatar?.filepath;
return acc;
}, {});
}
// Handle assistants case
else if (ep === EModelEndpoint.assistants && assistants.length > 0) {
result.models = assistants.map((assistant: { id: string }) => assistant.id);
result.assistantNames = assistants.reduce(
(acc: Record<string, string>, assistant: Assistant) => {
acc[assistant.id] = assistant.name || '';
return acc;
},
{},
);
result.modelIcons = assistants.reduce(
(acc: Record<string, string | undefined>, assistant: Assistant) => {
acc[assistant.id] = assistant.metadata?.avatar;
return acc;
},
{},
);
} else if (ep === EModelEndpoint.azureAssistants && azureAssistants.length > 0) {
result.models = azureAssistants.map((assistant: { id: string }) => assistant.id);
result.assistantNames = azureAssistants.reduce(
(acc: Record<string, string>, assistant: Assistant) => {
acc[assistant.id] = assistant.name || '';
return acc;
},
{},
);
result.modelIcons = azureAssistants.reduce(
(acc: Record<string, string | undefined>, assistant: Assistant) => {
acc[assistant.id] = assistant.metadata?.avatar;
return acc;
},
{},
);
}
// For other endpoints with models from the modelsQuery
else if (
ep !== EModelEndpoint.agents &&
ep !== EModelEndpoint.assistants &&
(modelsQuery.data?.[ep]?.length ?? 0) > 0
) {
result.models = modelsQuery.data?.[ep];
}
return result;
});
}, [filteredEndpoints, endpointsConfig, modelsQuery.data, agents, assistants]);
return {
mappedEndpoints,
endpointRequiresUserKey,
};
};
export default useEndpoints;

View file

@ -0,0 +1,38 @@
import { useState, useCallback } from 'react';
import { EModelEndpoint } from 'librechat-data-provider';
export const useKeyDialog = () => {
const [keyDialogOpen, setKeyDialogOpen] = useState(false);
const [keyDialogEndpoint, setKeyDialogEndpoint] = useState<EModelEndpoint | null>(null);
const handleOpenKeyDialog = useCallback(
(ep: EModelEndpoint, e: React.MouseEvent | React.KeyboardEvent) => {
e.preventDefault();
e.stopPropagation();
setKeyDialogEndpoint(ep);
setKeyDialogOpen(true);
},
[],
);
const onOpenChange = (open: boolean) => {
if (!open && keyDialogEndpoint) {
const button = document.getElementById(`endpoint-${keyDialogEndpoint}-settings`);
if (button) {
setTimeout(() => {
button.focus();
}, 5);
}
}
setKeyDialogOpen(open);
};
return {
keyDialogOpen,
keyDialogEndpoint,
onOpenChange,
handleOpenKeyDialog,
};
};
export default useKeyDialog;

View file

@ -0,0 +1,277 @@
import { useCallback, useRef, useContext, useMemo } from 'react';
import { EModelEndpoint, LocalStorageKeys } from 'librechat-data-provider';
import { getConvoSwitchLogic } from '~/utils';
import { mainTextareaId } from '~/common';
import { useRecoilState, useRecoilValue } from 'recoil';
import { useSetIndexOptions, useDefaultConvo } from '~/hooks';
import { useChatContext, useAssistantsMapContext } from '~/Providers';
import { useGetEndpointsQuery } from '~/data-provider';
import store from '~/store';
export const useModelSelection = () => {
const { setOption } = useSetIndexOptions();
const getDefaultConversation = useDefaultConvo();
const { conversation, newConversation, index } = useChatContext();
const { data: endpointsConfig = {} } = useGetEndpointsQuery();
const modularChat = useRecoilValue(store.modularChat);
const assistantsMapResult = useAssistantsMapContext();
const assistantsMap = useMemo(() => assistantsMapResult ?? {}, [assistantsMapResult]);
const timeoutIdRef = useRef<NodeJS.Timeout | undefined>(undefined);
const setAgentId = useCallback(
(agentId: string) => {
setOption('agent_id')(agentId);
localStorage.setItem(`${LocalStorageKeys.AGENT_ID_PREFIX}${index}`, agentId);
clearTimeout(timeoutIdRef.current);
timeoutIdRef.current = setTimeout(() => {
const textarea = document.getElementById(mainTextareaId);
if (textarea) {
textarea.focus();
}
}, 150);
},
[setOption, index, timeoutIdRef],
);
const setAssistantId = useCallback(
(endpoint: string, assistantId: string) => {
const assistant = assistantsMap[endpoint]?.[assistantId];
if (assistant) {
setOption('model')(assistant.model);
setOption('assistant_id')(assistantId);
localStorage.setItem(`${LocalStorageKeys.ASST_ID_PREFIX}${index}${endpoint}`, assistantId);
}
clearTimeout(timeoutIdRef.current);
timeoutIdRef.current = setTimeout(() => {
const textarea = document.getElementById(mainTextareaId);
if (textarea) {
textarea.focus();
}
}, 150);
},
[setOption, index, assistantsMap, timeoutIdRef],
);
const setModel = useCallback(
(model: string) => {
setOption('model')(model);
clearTimeout(timeoutIdRef.current);
timeoutIdRef.current = setTimeout(() => {
const textarea = document.getElementById(mainTextareaId);
if (textarea) {
textarea.focus();
}
}, 150);
},
[setOption, timeoutIdRef],
);
const handleModelSelect = useCallback(
(ep: EModelEndpoint, selectedModel: string) => {
if (ep === EModelEndpoint.assistants) {
if (conversation?.endpoint === ep) {
setAssistantId(ep, selectedModel);
return;
}
const { template } = getConvoSwitchLogic({
newEndpoint: ep,
modularChat: false,
conversation,
endpointsConfig,
});
const assistant = assistantsMap[ep]?.[selectedModel];
const currentConvo = getDefaultConversation({
conversation: {
...conversation,
endpoint: ep,
assistant_id: selectedModel,
model: assistant?.model || '',
},
preset: {
...template,
endpoint: ep,
assistant_id: selectedModel,
model: assistant?.model || '',
},
});
newConversation({
template: currentConvo,
preset: currentConvo,
keepLatestMessage: true,
});
return;
}
if (ep === EModelEndpoint.agents) {
if (conversation?.endpoint === ep) {
setAgentId(selectedModel);
return;
}
const { template } = getConvoSwitchLogic({
newEndpoint: ep,
modularChat: false,
conversation,
endpointsConfig,
});
const currentConvo = getDefaultConversation({
conversation: { ...conversation, endpoint: ep, agent_id: selectedModel },
preset: { ...template, endpoint: ep, agent_id: selectedModel },
});
newConversation({
template: currentConvo,
preset: currentConvo,
keepLatestMessage: true,
});
return;
}
const {
template,
shouldSwitch,
isNewModular,
newEndpointType,
isCurrentModular,
isExistingConversation,
} = getConvoSwitchLogic({
newEndpoint: ep,
modularChat,
conversation,
endpointsConfig,
});
const isModular = isCurrentModular && isNewModular && shouldSwitch;
if (isExistingConversation && isModular) {
template.endpointType = newEndpointType;
const currentConvo = getDefaultConversation({
conversation: { ...(conversation ?? {}), endpointType: template.endpointType },
preset: template,
});
newConversation({
template: currentConvo,
preset: currentConvo,
keepLatestMessage: true,
keepAddedConvos: true,
});
return;
}
newConversation({
template: { ...(template as any) },
keepAddedConvos: isModular,
});
setModel(selectedModel);
},
[
conversation,
endpointsConfig,
modularChat,
newConversation,
getDefaultConversation,
setModel,
setAgentId,
setAssistantId,
assistantsMap,
],
);
const handleEndpointSelect = useCallback(
(ep: string, hasModels: boolean, agents: any[], assistants: any[], modelsData: any) => {
if (hasModels) {
if (conversation?.endpoint !== ep) {
const newEndpoint = ep as EModelEndpoint;
const { template } = getConvoSwitchLogic({
newEndpoint,
modularChat: false,
conversation,
endpointsConfig,
});
let initialModel = '';
let initialAgentId = '';
let initialAssistantId = '';
if (newEndpoint === EModelEndpoint.agents && agents.length > 0) {
initialAgentId = agents[0].id;
} else if (newEndpoint === EModelEndpoint.assistants && assistants.length > 0) {
initialAssistantId = assistants[0].id;
initialModel = assistantsMap[newEndpoint]?.[initialAssistantId]?.model || '';
} else if (modelsData && modelsData[newEndpoint] && modelsData[newEndpoint].length > 0) {
initialModel = modelsData[newEndpoint][0];
}
const currentConvo = getDefaultConversation({
conversation: {
...conversation,
endpoint: newEndpoint,
model: initialModel,
agent_id: initialAgentId,
assistant_id: initialAssistantId,
},
preset: {
...template,
endpoint: newEndpoint,
model: initialModel,
agent_id: initialAgentId,
assistant_id: initialAssistantId,
},
});
newConversation({
template: currentConvo,
preset: currentConvo,
keepLatestMessage: true,
});
}
return;
}
if (!hasModels) {
const newEndpoint = ep as EModelEndpoint;
const { template } = getConvoSwitchLogic({
newEndpoint,
modularChat: false,
conversation,
endpointsConfig,
});
const currentConvo = getDefaultConversation({
conversation: { ...conversation, endpoint: newEndpoint },
preset: { ...template, endpoint: newEndpoint },
});
newConversation({
template: currentConvo,
preset: currentConvo,
keepLatestMessage: true,
});
}
},
[
conversation,
endpointsConfig,
newConversation,
getDefaultConversation,
assistantsMap,
modularChat,
],
);
return {
handleModelSelect,
handleEndpointSelect,
setAgentId,
setAssistantId,
setModel,
};
};
export default useModelSelection;

View file

@ -0,0 +1,96 @@
import React, { useMemo, useEffect, useRef } from 'react';
import { isAgentsEndpoint, isAssistantsEndpoint, LocalStorageKeys } from 'librechat-data-provider';
import type * as t from 'librechat-data-provider';
import type { SelectedValues } from '~/common';
import useSetIndexOptions from '~/hooks/Conversations/useSetIndexOptions';
export default function useSelectorEffects({
index = 0,
agentsMap,
conversation,
assistantsMap,
setSelectedValues,
}: {
index?: number;
agentsMap: t.TAgentsMap | undefined;
assistantsMap: t.TAssistantsMap | undefined;
conversation: t.TConversation | null;
setSelectedValues: React.Dispatch<React.SetStateAction<SelectedValues>>;
}) {
const { setOption } = useSetIndexOptions();
const agents: t.Agent[] = useMemo(() => {
return Object.values(agentsMap ?? {}) as t.Agent[];
}, [agentsMap]);
const { agent_id: selectedAgentId = null, endpoint } = conversation ?? {};
useEffect(() => {
if (selectedAgentId == null && agents.length > 0) {
let agent_id = localStorage.getItem(`${LocalStorageKeys.AGENT_ID_PREFIX}${index}`);
if (agent_id == null) {
agent_id = agents[0].id;
}
const agent = agentsMap?.[agent_id];
if (agent !== undefined && isAgentsEndpoint(endpoint as string) === true) {
setOption('model')('');
setOption('agent_id')(agent_id);
}
}
}, [index, agents, selectedAgentId, agentsMap, endpoint, setOption]);
const debounceTimeoutRef = useRef<NodeJS.Timeout | null>(null);
const debouncedSetSelectedValues = (values: SelectedValues) => {
if (debounceTimeoutRef.current) {
clearTimeout(debounceTimeoutRef.current);
}
debounceTimeoutRef.current = setTimeout(() => {
setSelectedValues(values);
}, 150);
};
useEffect(() => {
if (!conversation?.endpoint) {
return;
}
if (
conversation?.assistant_id ||
conversation?.agent_id ||
conversation?.model ||
conversation?.spec
) {
if (isAgentsEndpoint(conversation?.endpoint)) {
debouncedSetSelectedValues({
endpoint: conversation.endpoint || '',
model: conversation.agent_id ?? '',
modelSpec: '',
});
return;
} else if (isAssistantsEndpoint(conversation?.endpoint)) {
debouncedSetSelectedValues({
endpoint: conversation.endpoint || '',
model: conversation.assistant_id || '',
modelSpec: conversation.spec || '',
});
return;
}
debouncedSetSelectedValues({
endpoint: conversation.endpoint || '',
model: conversation.model || '',
modelSpec: conversation.spec || '',
});
}
return () => {
if (debounceTimeoutRef.current) {
clearTimeout(debounceTimeoutRef.current);
}
};
}, [
conversation?.spec,
conversation?.model,
conversation?.endpoint,
conversation?.agent_id,
conversation?.assistant_id,
]);
}

View file

@ -1,8 +1,10 @@
import { useMemo } from 'react';
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
import {
Permissions,
alternateName,
EModelEndpoint,
PermissionTypes,
isAgentsEndpoint,
getConfigDefaults,
isAssistantsEndpoint,
@ -18,6 +20,7 @@ import {
import useAssistantListMap from '~/hooks/Assistants/useAssistantListMap';
import { mapEndpoints, getPresetTitle } from '~/utils';
import { EndpointIcon } from '~/components/Endpoints';
import useHasAccess from '~/hooks/Roles/useHasAccess';
const defaultInterface = getConfigDefaults().interface;
@ -53,6 +56,11 @@ export default function useMentions({
assistantMap: TAssistantsMap;
includeAssistants: boolean;
}) {
const hasAgentAccess = useHasAccess({
permissionType: PermissionTypes.AGENTS,
permission: Permissions.USE,
});
const { data: presets } = useGetPresetsQuery();
const { data: modelsConfig } = useGetModelsQuery();
const { data: startupConfig } = useGetStartupConfig();
@ -67,7 +75,12 @@ export default function useMentions({
description,
})),
);
const interfaceConfig = useMemo(
() => startupConfig?.interface ?? defaultInterface,
[startupConfig?.interface],
);
const { data: agentsList = null } = useListAgentsQuery(undefined, {
enabled: hasAgentAccess && interfaceConfig.modelSelect === true,
select: (res) => {
const { data } = res;
return data.map(({ id, name, avatar }) => ({
@ -113,10 +126,6 @@ export default function useMentions({
);
const modelSpecs = useMemo(() => startupConfig?.modelSpecs?.list ?? [], [startupConfig]);
const interfaceConfig = useMemo(
() => startupConfig?.interface ?? defaultInterface,
[startupConfig],
);
const options: MentionOption[] = useMemo(() => {
let validEndpoints = endpoints;
@ -129,6 +138,10 @@ export default function useMentions({
return [];
}
if (interfaceConfig.modelSelect !== true) {
return [];
}
const models = (modelsConfig?.[endpoint] ?? []).map((model) => ({
value: endpoint,
label: model,
@ -159,7 +172,7 @@ export default function useMentions({
}),
type: 'modelSpec' as const,
})),
...(interfaceConfig.endpointsMenu === true ? validEndpoints : []).map((endpoint) => ({
...(interfaceConfig.modelSelect === true ? validEndpoints : []).map((endpoint) => ({
value: endpoint,
label: alternateName[endpoint as string] ?? endpoint ?? '',
type: 'endpoint' as const,
@ -170,14 +183,21 @@ export default function useMentions({
size: 20,
}),
})),
...(agentsList ?? []),
...(endpointsConfig?.[EModelEndpoint.assistants] && includeAssistants
...(interfaceConfig.modelSelect === true ? (agentsList ?? []) : []),
...(endpointsConfig?.[EModelEndpoint.assistants] &&
includeAssistants &&
interfaceConfig.modelSelect === true
? assistantListMap[EModelEndpoint.assistants] || []
: []),
...(endpointsConfig?.[EModelEndpoint.azureAssistants] && includeAssistants
...(endpointsConfig?.[EModelEndpoint.azureAssistants] &&
includeAssistants &&
interfaceConfig.modelSelect === true
? assistantListMap[EModelEndpoint.azureAssistants] || []
: []),
...((interfaceConfig.presets === true ? presets : [])?.map((preset, index) => ({
...((interfaceConfig.modelSelect === true && interfaceConfig.presets === true
? presets
: []
)?.map((preset, index) => ({
value: preset.presetId ?? `preset-${index}`,
label: preset.title ?? preset.modelLabel ?? preset.chatGptLabel ?? '',
description: getPresetTitle(preset, true),

View file

@ -9,9 +9,9 @@ import {
tQueryParamsSchema,
isAssistantsEndpoint,
} from 'librechat-data-provider';
import type { TPreset, TEndpointsConfig } from 'librechat-data-provider';
import type { TPreset, TEndpointsConfig, TStartupConfig } from 'librechat-data-provider';
import type { ZodAny } from 'zod';
import { getConvoSwitchLogic, removeUnavailableTools } from '~/utils';
import { getConvoSwitchLogic, getModelSpecIconURL, removeUnavailableTools } from '~/utils';
import useDefaultConvo from '~/hooks/Conversations/useDefaultConvo';
import { useChatContext, useChatFormContext } from '~/Providers';
import useSubmitMessage from '~/hooks/Messages/useSubmitMessage';
@ -87,8 +87,20 @@ export default function useQueryParams({
if (!_newPreset) {
return;
}
let newPreset = removeUnavailableTools(_newPreset, availableTools);
if (newPreset.spec != null && newPreset.spec !== '') {
const startupConfig = queryClient.getQueryData<TStartupConfig>([QueryKeys.startupConfig]);
const modelSpecs = startupConfig?.modelSpecs?.list ?? [];
const spec = modelSpecs.find((s) => s.name === newPreset.spec);
if (!spec) {
return;
}
const { preset } = spec;
preset.iconURL = getModelSpecIconURL(spec);
preset.spec = spec.name;
newPreset = preset;
}
const newPreset = removeUnavailableTools(_newPreset, availableTools);
let newEndpoint = newPreset.endpoint ?? '';
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
@ -122,14 +134,28 @@ export default function useQueryParams({
endpointsConfig,
});
let resetParams = {};
if (newPreset.spec == null) {
template.spec = null;
template.iconURL = null;
template.modelLabel = null;
resetParams = { spec: null, iconURL: null, modelLabel: null };
newPreset = { ...newPreset, ...resetParams };
}
const isModular = isCurrentModular && isNewModular && shouldSwitch;
if (isExistingConversation && isModular) {
template.endpointType = newEndpointType as EModelEndpoint | undefined;
const currentConvo = getDefaultConversation({
/* target endpointType is necessary to avoid endpoint mixing */
conversation: { ...(conversation ?? {}), endpointType: template.endpointType },
conversation: {
...(conversation ?? {}),
endpointType: template.endpointType,
...resetParams,
},
preset: template,
cleanOutput: newPreset.spec != null && newPreset.spec !== '',
});
/* We don't reset the latest message, only when changing settings mid-converstion */
@ -161,9 +187,11 @@ export default function useQueryParams({
queryParams[key] = value;
});
const decodedPrompt = queryParams.prompt || '';
// Support both 'prompt' and 'q' as query parameters, with 'prompt' taking precedence
const decodedPrompt = queryParams.prompt || queryParams.q || '';
const shouldAutoSubmit = queryParams.submit?.toLowerCase() === 'true';
delete queryParams.prompt;
delete queryParams.q;
delete queryParams.submit;
const validSettings = processValidSettings(queryParams);
@ -184,6 +212,10 @@ export default function useQueryParams({
if (!textAreaRef.current) {
return;
}
const startupConfig = queryClient.getQueryData<TStartupConfig>([QueryKeys.startupConfig]);
if (!startupConfig) {
return;
}
const { decodedPrompt, validSettings, shouldAutoSubmit } = processQueryParams();
const currentText = methods.getValues('text');

View file

@ -17,15 +17,17 @@ import store from '~/store';
export default function useSelectMention({
presets,
modelSpecs,
assistantMap,
assistantsMap,
endpointsConfig,
newConversation,
returnHandlers,
}: {
presets?: TPreset[];
modelSpecs: TModelSpec[];
assistantMap?: TAssistantsMap;
assistantsMap?: TAssistantsMap;
newConversation: ConvoGenerator;
endpointsConfig: TEndpointsConfig;
returnHandlers?: boolean;
}) {
const { conversation } = useChatContext();
const getDefaultConversation = useDefaultConvo();
@ -69,7 +71,7 @@ export default function useSelectMention({
preset.assistant_id != null &&
!(preset.model ?? '')
) {
preset.model = assistantMap?.[newEndpoint]?.[preset.assistant_id]?.model;
preset.model = assistantsMap?.[newEndpoint]?.[preset.assistant_id]?.model;
}
const isModular = isCurrentModular && isNewModular && shouldSwitch;
@ -80,6 +82,7 @@ export default function useSelectMention({
/* target endpointType is necessary to avoid endpoint mixing */
conversation: { ...(conversation ?? {}), endpointType: template.endpointType },
preset: template,
cleanOutput: true,
});
/* We don't reset the latest message, only when changing settings mid-converstion */
@ -104,7 +107,7 @@ export default function useSelectMention({
modularChat,
newConversation,
endpointsConfig,
assistantMap,
assistantsMap,
],
);
@ -112,6 +115,7 @@ export default function useSelectMention({
model?: string;
agent_id?: string;
assistant_id?: string;
spec?: string | null;
};
const onSelectEndpoint = useCallback(
@ -149,12 +153,21 @@ export default function useSelectMention({
template.agent_id = agent_id;
}
template.spec = null;
template.iconURL = null;
template.modelLabel = null;
if (isExistingConversation && isCurrentModular && isNewModular && shouldSwitch) {
template.endpointType = newEndpointType;
const currentConvo = getDefaultConversation({
/* target endpointType is necessary to avoid endpoint mixing */
conversation: { ...(conversation ?? {}), endpointType: template.endpointType },
conversation: {
...(conversation ?? {}),
spec: null,
iconURL: null,
modelLabel: null,
endpointType: template.endpointType,
},
preset: template,
});
@ -165,7 +178,7 @@ export default function useSelectMention({
newConversation({
template: { ...(template as Partial<TConversation>) },
preset: { ...kwargs, endpoint: newEndpoint },
preset: { ...kwargs, spec: null, iconURL: null, modelLabel: null, endpoint: newEndpoint },
});
},
[conversation, getDefaultConversation, modularChat, newConversation, endpointsConfig],
@ -194,14 +207,20 @@ export default function useSelectMention({
endpointsConfig,
});
newPreset.spec = null;
newPreset.iconURL = newPreset.iconURL ?? null;
newPreset.modelLabel = newPreset.modelLabel ?? null;
const isModular = isCurrentModular && isNewModular && shouldSwitch;
if (isExistingConversation && isModular) {
template.endpointType = newEndpointType as EModelEndpoint | undefined;
template.spec = null;
template.iconURL = null;
template.modelLabel = null;
const currentConvo = getDefaultConversation({
/* target endpointType is necessary to avoid endpoint mixing */
conversation: { ...(conversation ?? {}), endpointType: template.endpointType },
preset: template,
cleanInput: true,
});
/* We don't reset the latest message, only when changing settings mid-converstion */
@ -242,7 +261,7 @@ export default function useSelectMention({
} else if (isAssistantsEndpoint(option.type)) {
onSelectEndpoint(option.type, {
assistant_id: key,
model: assistantMap?.[option.type]?.[key]?.model ?? '',
model: assistantsMap?.[option.type]?.[key]?.model ?? '',
});
} else if (isAgentsEndpoint(option.type)) {
onSelectEndpoint(option.type, {
@ -250,9 +269,16 @@ export default function useSelectMention({
});
}
},
[modelSpecs, onSelectEndpoint, onSelectPreset, onSelectSpec, presets, assistantMap],
[modelSpecs, onSelectEndpoint, onSelectPreset, onSelectSpec, presets, assistantsMap],
);
if (returnHandlers) {
return {
onSelectSpec,
onSelectEndpoint,
};
}
return {
onSelectMention,
};

View file

@ -10,6 +10,7 @@ import {
tPresetSchema,
tMessageSchema,
tConvoUpdateSchema,
ContentTypes,
} from 'librechat-data-provider';
import type {
TMessage,
@ -58,6 +59,74 @@ export type EventHandlerParams = {
resetLatestMessage?: Resetter;
};
const createErrorMessage = ({
errorMetadata,
getMessages,
submission,
error,
}: {
getMessages: () => TMessage[] | undefined;
errorMetadata?: Partial<TMessage>;
submission: EventSubmission;
error?: Error | unknown;
}) => {
const currentMessages = getMessages();
const latestMessage = currentMessages?.[currentMessages.length - 1];
let errorMessage: TMessage;
const text = submission.initialResponse.text.length > 45 ? submission.initialResponse.text : '';
const errorText =
(errorMetadata?.text || text || (error as Error | undefined)?.message) ??
'Error cancelling request';
const latestContent = latestMessage?.content ?? [];
let isValidContentPart = false;
if (latestContent.length > 0) {
const latestContentPart = latestContent[latestContent.length - 1];
const latestPartValue = latestContentPart?.[latestContentPart.type ?? ''];
isValidContentPart =
latestContentPart.type !== ContentTypes.TEXT ||
(latestContentPart.type === ContentTypes.TEXT && typeof latestPartValue === 'string')
? true
: latestPartValue?.value !== '';
}
if (
latestMessage?.conversationId &&
latestMessage?.messageId &&
latestContent &&
isValidContentPart
) {
const content = [...latestContent];
content.push({
type: ContentTypes.ERROR,
error: errorText,
});
errorMessage = {
...latestMessage,
...errorMetadata,
error: undefined,
text: '',
content,
};
if (
submission.userMessage.messageId &&
submission.userMessage.messageId !== errorMessage.parentMessageId
) {
errorMessage.parentMessageId = submission.userMessage.messageId;
}
return errorMessage;
} else if (errorMetadata) {
return errorMetadata as TMessage;
} else {
errorMessage = {
...submission,
...submission.initialResponse,
text: errorText,
unfinished: !!text.length,
error: true,
};
}
return tMessageSchema.parse(errorMessage);
};
export default function useEventHandlers({
genTitle,
setMessages,
@ -479,11 +548,16 @@ export default function useEventHandlers({
if (!data) {
const convoId = conversationId || v4();
const errorResponse = parseErrorResponse({
const errorMetadata = parseErrorResponse({
text: 'Error connecting to server, try refreshing the page.',
...submission,
conversationId: convoId,
});
const errorResponse = createErrorMessage({
errorMetadata,
getMessages,
submission,
});
setMessages([...messages, userMessage, errorResponse]);
if (newConversation) {
newConversation({
@ -593,21 +667,15 @@ export default function useEventHandlers({
} catch (error) {
console.error('Error cancelling request');
console.error(error);
const convoId = conversationId || v4();
const text =
submission.initialResponse.text.length > 45 ? submission.initialResponse.text : '';
const errorMessage = {
...submission,
...submission.initialResponse,
text: (text || (error as Error | undefined)?.message) ?? 'Error cancelling request',
unfinished: !!text.length,
error: true,
};
const errorResponse = tMessageSchema.parse(errorMessage);
const errorResponse = createErrorMessage({
getMessages,
submission,
error,
});
setMessages([...submission.messages, submission.userMessage, errorResponse]);
if (newConversation) {
newConversation({
template: { conversationId: convoId },
template: { conversationId: conversationId || errorResponse.conversationId || v4() },
preset: tPresetSchema.parse(submission.conversation),
});
}

View file

@ -13,22 +13,25 @@ export * from './Plugins';
export * from './Prompts';
export * from './Roles';
export * from './SSE';
export * from './AuthContext';
export * from './ThemeContext';
export * from './ScreenshotContext';
export * from './ApiErrorBoundaryContext';
export * from './Endpoint';
export type { TranslationKeys } from './useLocalize';
export { default as useToast } from './useToast';
export { default as useTimeout } from './useTimeout';
export { default as useNewConvo } from './useNewConvo';
export { default as useLocalize } from './useLocalize';
export type { TranslationKeys } from './useLocalize';
export { default as useMediaQuery } from './useMediaQuery';
export { default as useChatBadges } from './useChatBadges';
export { default as useScrollToRef } from './useScrollToRef';
export { default as useLocalStorage } from './useLocalStorage';
export { default as useDocumentTitle } from './useDocumentTitle';
export { default as useDelayedRender } from './useDelayedRender';
export { default as useOnClickOutside } from './useOnClickOutside';
export { default as useSpeechToText } from './Input/useSpeechToText';
export { default as useTextToSpeech } from './Input/useTextToSpeech';
export { default as useGenerationsByLatest } from './useGenerationsByLatest';
export { default as useDocumentTitle } from './useDocumentTitle';

View file

@ -0,0 +1,35 @@
import { useRecoilValue } from 'recoil';
import { MessageCircleDashed, Box } from 'lucide-react';
import type { BadgeItem } from '~/common';
import { useLocalize } from '~/hooks';
import store from '~/store';
const badgeConfig = [
{
id: '1',
icon: MessageCircleDashed,
label: 'com_ui_temporary',
atom: store.isTemporary,
},
// {
// id: '2',
// icon: Box,
// label: 'com_ui_artifacts',
// atom: store.codeArtifacts,
// },
// TODO: add more badges here (missing store atoms)
] as const;
export default function useChatBadges(): BadgeItem[] {
const localize = useLocalize();
const activeBadges = useRecoilValue(store.chatBadges) as Array<{ id: string }>;
const activeBadgeIds = new Set(activeBadges.map((badge) => badge.id));
return badgeConfig.map((cfg) => ({
id: cfg.id,
label: localize(cfg.label),
icon: cfg.icon,
atom: cfg.atom,
isAvailable: activeBadgeIds.has(cfg.id),
}));
}

View file

@ -219,11 +219,12 @@ const useNewConvo = (index = 0) => {
};
let preset = _preset;
const defaultModelSpec = getDefaultModelSpec(startupConfig?.modelSpecs?.list);
const defaultModelSpec = getDefaultModelSpec(startupConfig);
if (
!preset &&
startupConfig &&
startupConfig.modelSpecs?.prioritize === true &&
(startupConfig.modelSpecs?.prioritize === true ||
(startupConfig.interface?.modelSelect ?? true) !== true) &&
defaultModelSpec
) {
preset = {