📊 refactor: use Parameters from Side Panel for OpenAI, Anthropic, and Custom endpoints (#4092)

* feat: openai parameters

* refactor: anthropic/bedrock params, add preset params for openai, and add azure params

* refactor: use 'compact' schemas for anthropic/openai

* refactor: ensure custom endpoints are properly recognized as valid param endpoints

* refactor: update paramEndpoints check in BaseClient.js

* chore: optimize logging by omitting modelsConfig

* refactor: update label casing in baseDefinitions combobox items

* fix: remove 'stop' model options when using o1 series models

* refactor(AnthropicClient): remove default `stop` value

* refactor: reset params on parameters change

* refactor: remove unused default parameter value map introduced in prior commit

* fix: 'min' typo for 'max' value

* refactor: preset settings

* refactor: replace dropdown for image detail with slider; remove `preventDelayedUpdate` condition from DynamicSlider

* fix: localizations for freq./pres. penalty

* Refactor maxOutputTokens to use coerceNumber in tConversationSchema

* refactor(AnthropicClient): use `getModelMaxOutputTokens`
This commit is contained in:
Danny Avila 2024-09-17 22:25:54 -04:00 committed by GitHub
parent ebdbfe8427
commit 8dc5b320bc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 575 additions and 1103 deletions

View file

@ -17,8 +17,8 @@ const {
parseParamFromPrompt,
createContextHandlers,
} = require('./prompts');
const { getModelMaxTokens, getModelMaxOutputTokens, matchModelName } = require('~/utils');
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
const { getModelMaxTokens, matchModelName } = require('~/utils');
const { sleep } = require('~/server/utils');
const BaseClient = require('./BaseClient');
const { logger } = require('~/config');
@ -120,7 +120,14 @@ class AnthropicClient extends BaseClient {
this.options.maxContextTokens ??
getModelMaxTokens(this.modelOptions.model, EModelEndpoint.anthropic) ??
100000;
this.maxResponseTokens = this.modelOptions.maxOutputTokens || 1500;
this.maxResponseTokens =
this.modelOptions.maxOutputTokens ??
getModelMaxOutputTokens(
this.modelOptions.model,
this.options.endpointType ?? this.options.endpoint,
this.options.endpointTokenConfig,
) ??
1500;
this.maxPromptTokens =
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
@ -144,17 +151,6 @@ class AnthropicClient extends BaseClient {
this.endToken = '';
this.gptEncoder = this.constructor.getTokenizer('cl100k_base');
if (!this.modelOptions.stop) {
const stopTokens = [this.startToken];
if (this.endToken && this.endToken !== this.startToken) {
stopTokens.push(this.endToken);
}
stopTokens.push(`${this.userLabel}`);
stopTokens.push('<|diff_marker|>');
this.modelOptions.stop = stopTokens;
}
return this;
}

View file

@ -3,7 +3,7 @@ const fetch = require('node-fetch');
const {
supportsBalanceCheck,
isAgentsEndpoint,
paramEndpoints,
isParamEndpoint,
ErrorTypes,
Constants,
CacheKeys,
@ -588,7 +588,10 @@ class BaseClient {
if (typeof completion === 'string') {
responseMessage.text = addSpaceIfNeeded(generation) + completion;
} else if (Array.isArray(completion) && paramEndpoints.has(this.options.endpoint)) {
} else if (
Array.isArray(completion) &&
isParamEndpoint(this.options.endpoint, this.options.endpointType)
) {
responseMessage.text = '';
responseMessage.content = completion;
} else if (Array.isArray(completion)) {

View file

@ -1295,6 +1295,7 @@ ${convo}
if (modelOptions.stream && /\bo1\b/i.test(modelOptions.model)) {
delete modelOptions.stream;
delete modelOptions.stop;
}
if (modelOptions.stream) {

View file

@ -16,7 +16,12 @@ const AskController = async (req, res, next, initializeClient, addTitle) => {
overrideParentMessageId = null,
} = req.body;
logger.debug('[AskController]', { text, conversationId, ...endpointOption });
logger.debug('[AskController]', {
text,
conversationId,
...endpointOption,
modelsConfig: endpointOption.modelsConfig ? 'exists' : '',
});
let userMessage;
let userMessagePromise;

View file

@ -25,6 +25,7 @@ const EditController = async (req, res, next, initializeClient) => {
isContinued,
conversationId,
...endpointOption,
modelsConfig: endpointOption.modelsConfig ? 'exists' : '',
});
let userMessage;

View file

@ -123,7 +123,16 @@ const modelMaxOutputs = {
system_default: 1024,
};
const anthropicMaxOutputs = {
'claude-3-haiku': 4096,
'claude-3-sonnet': 4096,
'claude-3-opus': 4096,
'claude-3.5-sonnet': 8192,
'claude-3-5-sonnet': 8192,
};
const maxOutputTokensMap = {
[EModelEndpoint.anthropic]: anthropicMaxOutputs,
[EModelEndpoint.azureOpenAI]: modelMaxOutputs,
[EModelEndpoint.openAI]: modelMaxOutputs,
[EModelEndpoint.custom]: modelMaxOutputs,

View file

@ -2,7 +2,7 @@ import { useRecoilState } from 'recoil';
import { Settings2 } from 'lucide-react';
import { Root, Anchor } from '@radix-ui/react-popover';
import { useState, useEffect, useMemo } from 'react';
import { tPresetUpdateSchema, EModelEndpoint, paramEndpoints } from 'librechat-data-provider';
import { tPresetUpdateSchema, EModelEndpoint, isParamEndpoint } from 'librechat-data-provider';
import type { TPreset, TInterfaceConfig } from 'librechat-data-provider';
import { EndpointSettings, SaveAsPresetDialog, AlternativeSettings } from '~/components/Endpoints';
import { PluginStoreDialog, TooltipAnchor } from '~/components';
@ -28,7 +28,7 @@ export default function HeaderOptions({
useChatContext();
const { setOption } = useSetIndexOptions();
const { endpoint, conversationId, jailbreak = false } = conversation ?? {};
const { endpoint, endpointType, conversationId, jailbreak = false } = conversation ?? {};
const altConditions: { [key: string]: boolean } = {
bingAI: !!(latestMessage && jailbreak && endpoint === 'bingAI'),
@ -64,6 +64,8 @@ export default function HeaderOptions({
const triggerAdvancedMode = altConditions[endpoint]
? altSettings[endpoint]
: () => setShowPopover((prev) => !prev);
const paramEndpoint = isParamEndpoint(endpoint, endpointType);
return (
<Root
open={showPopover}
@ -83,7 +85,7 @@ export default function HeaderOptions({
)}
{!noSettings[endpoint] &&
interfaceConfig?.parameters === true &&
!paramEndpoints.has(endpoint) && (
paramEndpoint === false && (
<TooltipAnchor
id="parameters-button"
aria-label={localize('com_ui_model_parameters')}
@ -98,7 +100,7 @@ export default function HeaderOptions({
</TooltipAnchor>
)}
</div>
{interfaceConfig?.parameters === true && !paramEndpoints.has(endpoint) && (
{interfaceConfig?.parameters === true && paramEndpoint === false && (
<OptionsPopover
visible={showPopover}
saveAsPreset={saveAsPreset}

View file

@ -1,364 +1,64 @@
import TextareaAutosize from 'react-textarea-autosize';
import { anthropicSettings } from 'librechat-data-provider';
import type { TModelSelectProps, OnInputNumberChange } from '~/common';
import {
Input,
Label,
Slider,
Switch,
HoverCard,
InputNumber,
SelectDropDown,
HoverCardTrigger,
} from '~/components/ui';
import { cn, defaultTextProps, optionText, removeFocusOutlines, removeFocusRings } from '~/utils';
import OptionHoverAlt from '~/components/SidePanel/Parameters/OptionHover';
import { useLocalize, useDebouncedInput } from '~/hooks';
import OptionHover from './OptionHover';
import { ESide } from '~/common';
import { useMemo } from 'react';
import { getSettingsKeys } from 'librechat-data-provider';
import type { SettingDefinition } from 'librechat-data-provider';
import type { TModelSelectProps } from '~/common';
import { componentMapping } from '~/components/SidePanel/Parameters/components';
import { presetSettings } from '~/components/SidePanel/Parameters/settings';
export default function Settings({ conversation, setOption, models, readonly }: TModelSelectProps) {
const localize = useLocalize();
const {
model,
modelLabel,
promptPrefix,
temperature,
topP,
topK,
maxOutputTokens,
maxContextTokens,
resendFiles,
promptCache,
} = conversation ?? {};
const [setMaxContextTokens, maxContextTokensValue] = useDebouncedInput<number | null | undefined>(
{
setOption,
optionKey: 'maxContextTokens',
initialValue: maxContextTokens,
},
);
if (!conversation) {
export default function AnthropicSettings({
conversation,
setOption,
models,
readonly,
}: TModelSelectProps) {
const parameters = useMemo(() => {
const [combinedKey, endpointKey] = getSettingsKeys(
conversation?.endpointType ?? conversation?.endpoint ?? '',
conversation?.model ?? '',
);
return presetSettings[combinedKey] ?? presetSettings[endpointKey];
}, [conversation]);
if (!parameters) {
return null;
}
const setModelLabel = setOption('modelLabel');
const setPromptPrefix = setOption('promptPrefix');
const setTemperature = setOption('temperature');
const setTopP = setOption('topP');
const setTopK = setOption('topK');
const setResendFiles = setOption('resendFiles');
const setPromptCache = setOption('promptCache');
const setModel = (newModel: string) => {
const modelSetter = setOption('model');
const maxOutputSetter = setOption('maxOutputTokens');
if (maxOutputTokens) {
maxOutputSetter(anthropicSettings.maxOutputTokens.set(maxOutputTokens, newModel));
const renderComponent = (setting: SettingDefinition | undefined) => {
if (!setting) {
return null;
}
modelSetter(newModel);
};
const setMaxOutputTokens = (value: number) => {
const setter = setOption('maxOutputTokens');
if (model) {
setter(anthropicSettings.maxOutputTokens.set(value, model));
} else {
setter(value);
const Component = componentMapping[setting.component];
if (!Component) {
return null;
}
const { key, default: defaultValue, ...rest } = setting;
const props = {
key,
settingKey: key,
defaultValue,
...rest,
readonly,
setOption,
conversation,
};
if (key === 'model') {
return <Component {...props} options={models} />;
}
return <Component {...props} />;
};
return (
<div className="grid grid-cols-5 gap-6">
<div className="col-span-5 flex flex-col items-center justify-start gap-6 sm:col-span-3">
<div className="grid w-full items-center gap-2">
<SelectDropDown
value={model ?? ''}
setValue={setModel}
availableValues={models}
disabled={readonly}
className={cn(defaultTextProps, 'flex w-full resize-none', removeFocusRings)}
containerClassName="flex w-full resize-none"
/>
<div className="h-auto max-w-full overflow-x-hidden p-3">
<div className="grid grid-cols-1 gap-6 md:grid-cols-5">
<div className="flex flex-col gap-6 md:col-span-3">
{parameters.col1.map(renderComponent)}
</div>
<div className="grid w-full items-center gap-2">
<Label htmlFor="modelLabel" className="text-left text-sm font-medium">
{localize('com_endpoint_custom_name')}{' '}
<small className="opacity-40">({localize('com_endpoint_default_blank')})</small>
</Label>
<Input
id="modelLabel"
disabled={readonly}
value={modelLabel || ''}
onChange={(e) => setModelLabel(e.target.value ?? null)}
placeholder={localize('com_endpoint_anthropic_custom_name_placeholder')}
className={cn(
defaultTextProps,
'flex h-10 max-h-10 w-full resize-none px-3 py-2',
removeFocusOutlines,
)}
/>
<div className="flex flex-col gap-6 md:col-span-2">
{parameters.col2.map(renderComponent)}
</div>
<div className="grid w-full items-center gap-2">
<Label htmlFor="promptPrefix" className="text-left text-sm font-medium">
{localize('com_endpoint_prompt_prefix')}{' '}
<small className="opacity-40">({localize('com_endpoint_default_blank')})</small>
</Label>
<TextareaAutosize
id="promptPrefix"
disabled={readonly}
value={promptPrefix || ''}
onChange={(e) => setPromptPrefix(e.target.value ?? null)}
placeholder={localize('com_endpoint_prompt_prefix_placeholder')}
className={cn(
defaultTextProps,
'flex max-h-[138px] min-h-[100px] w-full resize-none px-3 py-2 ',
)}
/>
</div>
</div>
<div className="col-span-5 flex flex-col items-center justify-start gap-6 px-3 sm:col-span-2">
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="mt-1 flex w-full justify-between">
<Label htmlFor="max-context-tokens" className="text-left text-sm font-medium">
{localize('com_endpoint_context_tokens')}{' '}
</Label>
<InputNumber
id="max-context-tokens"
stringMode={false}
disabled={readonly}
value={maxContextTokensValue as number}
onChange={setMaxContextTokens as OnInputNumberChange}
placeholder={localize('com_nav_theme_system')}
min={10}
max={2000000}
step={1000}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
'w-1/3',
),
)}
/>
</div>
</HoverCardTrigger>
<OptionHoverAlt
description="com_endpoint_context_info"
langCode={true}
side={ESide.Left}
/>
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="flex justify-between">
<Label htmlFor="temp-int" className="text-left text-sm font-medium">
{localize('com_endpoint_temperature')}{' '}
<small className="opacity-40">
({localize('com_endpoint_default')}: {anthropicSettings.temperature.default})
</small>
</Label>
<InputNumber
id="temp-int"
disabled={readonly}
value={temperature}
onChange={(value) => setTemperature(Number(value))}
max={anthropicSettings.temperature.max}
min={0}
step={0.01}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
),
)}
/>
</div>
<Slider
disabled={readonly}
value={[temperature ?? anthropicSettings.temperature.default]}
onValueChange={(value) => setTemperature(value[0])}
doubleClickHandler={() => setTemperature(anthropicSettings.temperature.default)}
max={anthropicSettings.temperature.max}
min={0}
step={0.01}
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={conversation.endpoint ?? ''} type="temp" side={ESide.Left} />
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="flex justify-between">
<Label htmlFor="top-p-int" className="text-left text-sm font-medium">
{localize('com_endpoint_top_p')}{' '}
<small className="opacity-40">
({localize('com_endpoint_default_with_num', anthropicSettings.topP.default + '')})
</small>
</Label>
<InputNumber
id="top-p-int"
disabled={readonly}
value={topP}
onChange={(value) => setTopP(Number(value))}
max={anthropicSettings.topP.max}
min={0}
step={0.01}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
),
)}
/>
</div>
<Slider
disabled={readonly}
value={[topP ?? 0.7]}
onValueChange={(value) => setTopP(value[0])}
doubleClickHandler={() => setTopP(anthropicSettings.topP.default)}
max={anthropicSettings.topP.max}
min={0}
step={0.01}
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={conversation.endpoint ?? ''} type="topp" side={ESide.Left} />
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="flex justify-between">
<Label htmlFor="top-k-int" className="text-left text-sm font-medium">
{localize('com_endpoint_top_k')}{' '}
<small className="opacity-40">
({localize('com_endpoint_default_with_num', anthropicSettings.topK.default + '')})
</small>
</Label>
<InputNumber
id="top-k-int"
disabled={readonly}
value={topK}
onChange={(value) => setTopK(Number(value))}
max={anthropicSettings.topK.max}
min={1}
step={0.01}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
),
)}
/>
</div>
<Slider
disabled={readonly}
value={[topK ?? 5]}
onValueChange={(value) => setTopK(value[0])}
doubleClickHandler={() => setTopK(anthropicSettings.topK.default)}
max={anthropicSettings.topK.max}
min={1}
step={0.01}
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={conversation.endpoint ?? ''} type="topk" side={ESide.Left} />
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="flex justify-between">
<Label htmlFor="max-tokens-int" className="text-left text-sm font-medium">
{localize('com_endpoint_max_output_tokens')}{' '}
<small className="opacity-40">({anthropicSettings.maxOutputTokens.default})</small>
</Label>
<InputNumber
id="max-tokens-int"
disabled={readonly}
value={maxOutputTokens}
onChange={(value) => setMaxOutputTokens(Number(value))}
max={anthropicSettings.maxOutputTokens.max}
min={1}
step={1}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
),
)}
/>
</div>
<Slider
disabled={readonly}
value={[maxOutputTokens ?? anthropicSettings.maxOutputTokens.default]}
onValueChange={(value) => setMaxOutputTokens(value[0])}
doubleClickHandler={() =>
setMaxOutputTokens(anthropicSettings.maxOutputTokens.default)
}
max={anthropicSettings.maxOutputTokens.max}
min={1}
step={1}
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover
endpoint={conversation.endpoint ?? ''}
type="maxoutputtokens"
side={ESide.Left}
/>
</HoverCard>
<HoverCard openDelay={500}>
<HoverCardTrigger className="grid w-full">
<div className="flex justify-between">
<Label htmlFor="resend-files" className="text-left text-sm font-medium">
{localize('com_endpoint_plug_resend_files')}{' '}
</Label>
<Switch
id="resend-files"
checked={resendFiles ?? true}
onCheckedChange={(checked: boolean) => setResendFiles(checked)}
disabled={readonly}
className="flex"
/>
<OptionHover
endpoint={conversation.endpoint ?? ''}
type="resend"
side={ESide.Bottom}
/>
</div>
</HoverCardTrigger>
</HoverCard>
<HoverCard openDelay={500}>
<HoverCardTrigger className="grid w-full">
<div className="flex justify-between">
<Label htmlFor="prompt-cache" className="text-left text-sm font-medium">
{localize('com_endpoint_prompt_cache')}{' '}
</Label>
<Switch
id="prompt-cache"
checked={promptCache ?? true}
onCheckedChange={(checked: boolean) => setPromptCache(checked)}
disabled={readonly}
className="flex"
/>
<OptionHover
endpoint={conversation.endpoint ?? ''}
type="promptcache"
side={ESide.Bottom}
/>
</div>
</HoverCardTrigger>
</HoverCard>
</div>
</div>
);

View file

@ -13,7 +13,7 @@ export default function BedrockSettings({
}: TModelSelectProps) {
const parameters = useMemo(() => {
const [combinedKey, endpointKey] = getSettingsKeys(
conversation?.endpoint ?? '',
conversation?.endpointType ?? conversation?.endpoint ?? '',
conversation?.model ?? '',
);
return presetSettings[combinedKey] ?? presetSettings[endpointKey];
@ -23,8 +23,14 @@ export default function BedrockSettings({
return null;
}
const renderComponent = (setting: SettingDefinition) => {
const renderComponent = (setting: SettingDefinition | undefined) => {
if (!setting) {
return null;
}
const Component = componentMapping[setting.component];
if (!Component) {
return null;
}
const { key, default: defaultValue, ...rest } = setting;
const props = {

View file

@ -1,474 +1,63 @@
import { useMemo } from 'react';
import TextareaAutosize from 'react-textarea-autosize';
import {
openAISettings,
EModelEndpoint,
imageDetailValue,
imageDetailNumeric,
} from 'librechat-data-provider';
import type { TModelSelectProps, OnInputNumberChange } from '~/common';
import {
Input,
Label,
Switch,
Slider,
HoverCard,
InputNumber,
SelectDropDown,
HoverCardTrigger,
} from '~/components/ui';
import { cn, defaultTextProps, optionText, removeFocusOutlines, removeFocusRings } from '~/utils';
import { OptionHoverAlt, DynamicTags } from '~/components/SidePanel/Parameters';
import { useLocalize, useDebouncedInput } from '~/hooks';
import OptionHover from './OptionHover';
import { ESide } from '~/common';
import { getSettingsKeys } from 'librechat-data-provider';
import type { SettingDefinition, DynamicSettingProps } from 'librechat-data-provider';
import type { TModelSelectProps } from '~/common';
import { componentMapping } from '~/components/SidePanel/Parameters/components';
import { presetSettings } from '~/components/SidePanel/Parameters/settings';
export default function Settings({ conversation, setOption, models, readonly }: TModelSelectProps) {
const localize = useLocalize();
const {
endpoint,
endpointType,
model,
modelLabel,
chatGptLabel,
promptPrefix,
temperature,
top_p: topP,
frequency_penalty: freqP,
presence_penalty: presP,
resendFiles,
imageDetail,
maxContextTokens,
max_tokens,
} = conversation ?? {};
export default function OpenAISettings({
conversation,
setOption,
models,
readonly,
}: TModelSelectProps) {
const parameters = useMemo(() => {
const [combinedKey, endpointKey] = getSettingsKeys(
conversation?.endpointType ?? conversation?.endpoint ?? '',
conversation?.model ?? '',
);
return presetSettings[combinedKey] ?? presetSettings[endpointKey];
}, [conversation]);
const [setChatGptLabel, chatGptLabelValue] = useDebouncedInput<string | null | undefined>({
setOption,
optionKey: 'chatGptLabel',
initialValue: modelLabel ?? chatGptLabel,
});
const [setPromptPrefix, promptPrefixValue] = useDebouncedInput<string | null | undefined>({
setOption,
optionKey: 'promptPrefix',
initialValue: promptPrefix,
});
const [setTemperature, temperatureValue] = useDebouncedInput<number | null | undefined>({
setOption,
optionKey: 'temperature',
initialValue: temperature,
});
const [setTopP, topPValue] = useDebouncedInput<number | null | undefined>({
setOption,
optionKey: 'top_p',
initialValue: topP,
});
const [setFreqP, freqPValue] = useDebouncedInput<number | null | undefined>({
setOption,
optionKey: 'frequency_penalty',
initialValue: freqP,
});
const [setPresP, presPValue] = useDebouncedInput<number | null | undefined>({
setOption,
optionKey: 'presence_penalty',
initialValue: presP,
});
const [setMaxContextTokens, maxContextTokensValue] = useDebouncedInput<number | null | undefined>(
{
setOption,
optionKey: 'maxContextTokens',
initialValue: maxContextTokens,
},
);
const [setMaxOutputTokens, maxOutputTokensValue] = useDebouncedInput<number | null | undefined>({
setOption,
optionKey: 'max_tokens',
initialValue: max_tokens,
});
const optionEndpoint = useMemo(() => endpointType ?? endpoint, [endpoint, endpointType]);
const isOpenAI = useMemo(
() => optionEndpoint === EModelEndpoint.openAI || optionEndpoint === EModelEndpoint.azureOpenAI,
[optionEndpoint],
);
if (!conversation) {
if (!parameters) {
return null;
}
const setModel = setOption('model');
const setResendFiles = setOption('resendFiles');
const setImageDetail = setOption('imageDetail');
const renderComponent = (setting: SettingDefinition | undefined) => {
if (!setting) {
return null;
}
const Component = componentMapping[setting.component];
if (!Component) {
return null;
}
const { key, default: defaultValue, ...rest } = setting;
const props = {
key,
settingKey: key,
defaultValue,
...rest,
readonly,
setOption,
conversation,
};
if (key === 'model') {
return <Component {...props} options={models} />;
}
return <Component {...props} />;
};
return (
<div className="grid grid-cols-5 gap-6">
<div className="col-span-5 flex flex-col items-center justify-start gap-6 sm:col-span-3">
<div className="grid w-full items-center gap-2">
<SelectDropDown
value={model ?? ''}
setValue={setModel}
availableValues={models}
disabled={readonly}
className={cn(defaultTextProps, 'flex w-full resize-none', removeFocusRings)}
containerClassName="flex w-full resize-none"
/>
<div className="h-auto max-w-full overflow-x-hidden p-3">
<div className="grid grid-cols-1 gap-6 md:grid-cols-5">
<div className="flex flex-col gap-6 md:col-span-3">
{parameters.col1.map(renderComponent)}
</div>
<div className="grid w-full items-center gap-2">
<Label htmlFor="chatGptLabel" className="text-left text-sm font-medium">
{localize('com_endpoint_custom_name')}{' '}
<small className="opacity-40">({localize('com_endpoint_default_blank')})</small>
</Label>
<Input
id="chatGptLabel"
disabled={readonly}
value={(chatGptLabelValue as string) || ''}
onChange={setChatGptLabel}
placeholder={localize('com_endpoint_openai_custom_name_placeholder')}
className={cn(
defaultTextProps,
'flex h-10 max-h-10 w-full resize-none px-3 py-2',
removeFocusOutlines,
)}
/>
</div>
<div className="grid w-full items-center gap-2">
<Label htmlFor="promptPrefix" className="text-left text-sm font-medium">
{localize('com_endpoint_prompt_prefix')}{' '}
<small className="opacity-40">({localize('com_endpoint_default_blank')})</small>
</Label>
<TextareaAutosize
id="promptPrefix"
disabled={readonly}
value={(promptPrefixValue as string) || ''}
onChange={setPromptPrefix}
placeholder={localize('com_endpoint_openai_prompt_prefix_placeholder')}
className={cn(
defaultTextProps,
'flex max-h-[138px] min-h-[100px] w-full resize-none px-3 py-2 transition-colors focus:outline-none',
)}
/>
</div>
<div className="grid w-full items-start gap-2">
<DynamicTags
settingKey="stop"
setOption={setOption}
label="com_endpoint_stop"
labelCode={true}
description="com_endpoint_openai_stop"
descriptionCode={true}
placeholder="com_endpoint_stop_placeholder"
placeholderCode={true}
descriptionSide="right"
maxTags={isOpenAI ? 4 : undefined}
conversation={conversation}
readonly={readonly}
/>
</div>
</div>
<div className="col-span-5 flex flex-col items-center justify-start gap-6 px-3 sm:col-span-2">
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="mt-1 flex w-full justify-between">
<Label htmlFor="max-context-tokens" className="text-left text-sm font-medium">
{localize('com_endpoint_context_tokens')}{' '}
</Label>
<InputNumber
id="max-context-tokens"
stringMode={false}
disabled={readonly}
value={maxContextTokensValue as number}
onChange={setMaxContextTokens as OnInputNumberChange}
placeholder={localize('com_nav_theme_system')}
min={10}
max={2000000}
step={1000}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
'w-1/3',
),
)}
/>
</div>
</HoverCardTrigger>
<OptionHoverAlt
description="com_endpoint_context_info"
langCode={true}
side={ESide.Left}
/>
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="mt-1 flex w-full justify-between">
<Label htmlFor="max-output-tokens" className="text-left text-sm font-medium">
{localize('com_endpoint_max_output_tokens')}{' '}
</Label>
<InputNumber
id="max-output-tokens"
stringMode={false}
disabled={readonly}
value={maxOutputTokensValue as number}
onChange={setMaxOutputTokens as OnInputNumberChange}
placeholder={localize('com_nav_theme_system')}
min={10}
max={2000000}
step={1000}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
'w-1/3',
),
)}
/>
</div>
</HoverCardTrigger>
<OptionHoverAlt
description="com_endpoint_openai_max_tokens"
langCode={true}
side={ESide.Left}
/>
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="flex justify-between">
<Label htmlFor="temp-int" className="text-left text-sm font-medium">
{localize('com_endpoint_temperature')}{' '}
<small className="opacity-40">
(
{localize(
'com_endpoint_default_with_num',
openAISettings.temperature.default + '',
)}
)
</small>
</Label>
<InputNumber
id="temp-int"
stringMode={false}
disabled={readonly}
value={temperatureValue as number}
onChange={setTemperature as OnInputNumberChange}
max={openAISettings.temperature.max}
min={openAISettings.temperature.min}
step={openAISettings.temperature.step}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
),
)}
/>
</div>
<Slider
disabled={readonly}
value={[temperatureValue ?? openAISettings.temperature.default]}
onValueChange={(value) => setTemperature(value[0])}
doubleClickHandler={() => setTemperature(openAISettings.temperature.default)}
max={openAISettings.temperature.max}
min={openAISettings.temperature.min}
step={openAISettings.temperature.step}
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={optionEndpoint ?? ''} type="temp" side={ESide.Left} />
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="flex justify-between">
<Label htmlFor="top-p-int" className="text-left text-sm font-medium">
{localize('com_endpoint_top_p')}{' '}
<small className="opacity-40">
({localize('com_endpoint_default_with_num', openAISettings.top_p.default + '')})
</small>
</Label>
<InputNumber
id="top-p-int"
disabled={readonly}
value={topPValue as number}
onChange={(value) => setTopP(Number(value))}
max={openAISettings.top_p.max}
min={openAISettings.top_p.min}
step={openAISettings.top_p.step}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
),
)}
/>
</div>
<Slider
disabled={readonly}
value={[topPValue ?? openAISettings.top_p.default]}
onValueChange={(value) => setTopP(value[0])}
doubleClickHandler={() => setTopP(openAISettings.top_p.default)}
max={openAISettings.top_p.max}
min={openAISettings.top_p.min}
step={openAISettings.top_p.step}
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={optionEndpoint ?? ''} type="topp" side={ESide.Left} />
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="flex justify-between">
<Label htmlFor="freq-penalty-int" className="text-left text-sm font-medium">
{localize('com_endpoint_frequency_penalty')}{' '}
<small className="opacity-40">
(
{localize(
'com_endpoint_default_with_num',
openAISettings.frequency_penalty.default + '',
)}
)
</small>
</Label>
<InputNumber
id="freq-penalty-int"
disabled={readonly}
value={freqPValue as number}
onChange={(value) => setFreqP(Number(value))}
max={openAISettings.frequency_penalty.max}
min={openAISettings.frequency_penalty.min}
step={openAISettings.frequency_penalty.step}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
),
)}
/>
</div>
<Slider
disabled={readonly}
value={[freqPValue ?? openAISettings.frequency_penalty.default]}
onValueChange={(value) => setFreqP(value[0])}
doubleClickHandler={() => setFreqP(openAISettings.frequency_penalty.default)}
max={openAISettings.frequency_penalty.max}
min={openAISettings.frequency_penalty.min}
step={openAISettings.frequency_penalty.step}
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={optionEndpoint ?? ''} type="freq" side={ESide.Left} />
</HoverCard>
<HoverCard openDelay={300}>
<HoverCardTrigger className="grid w-full items-center gap-2">
<div className="flex justify-between">
<Label htmlFor="pres-penalty-int" className="text-left text-sm font-medium">
{localize('com_endpoint_presence_penalty')}{' '}
<small className="opacity-40">
(
{localize(
'com_endpoint_default_with_num',
openAISettings.presence_penalty.default + '',
)}
)
</small>
</Label>
<InputNumber
id="pres-penalty-int"
disabled={readonly}
value={presPValue as number}
onChange={(value) => setPresP(Number(value))}
max={openAISettings.presence_penalty.max}
min={openAISettings.presence_penalty.min}
step={openAISettings.presence_penalty.step}
controls={false}
className={cn(
defaultTextProps,
cn(
optionText,
'reset-rc-number-input reset-rc-number-input-text-right h-auto w-12 border-0 group-hover/temp:border-gray-200',
),
)}
/>
</div>
<Slider
disabled={readonly}
value={[presPValue ?? openAISettings.presence_penalty.default]}
onValueChange={(value) => setPresP(value[0])}
doubleClickHandler={() => setPresP(openAISettings.presence_penalty.default)}
max={openAISettings.presence_penalty.max}
min={openAISettings.presence_penalty.min}
step={openAISettings.presence_penalty.step}
className="flex h-4 w-full"
/>
</HoverCardTrigger>
<OptionHover endpoint={optionEndpoint ?? ''} type="pres" side={ESide.Left} />
</HoverCard>
<div className="w-full">
<div className="mb-2 flex w-full justify-between gap-2">
<label
htmlFor="resend-files"
className="text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70 dark:text-gray-50"
>
<small>{localize('com_endpoint_plug_resend_files')}</small>
</label>
<label
htmlFor="image-detail-value"
className="text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70 dark:text-gray-50"
>
<small>{localize('com_endpoint_plug_image_detail')}</small>
</label>
<Input
id="image-detail-value"
disabled={true}
value={imageDetail ?? openAISettings.imageDetail.default}
className={cn(
defaultTextProps,
optionText,
'flex rounded-md bg-transparent py-2 text-xs focus:outline-none focus:ring-2 focus:ring-gray-400 focus:ring-offset-2 dark:border-gray-700',
'pointer-events-none max-h-5 w-12 border-0 group-hover/temp:border-gray-200',
)}
/>
</div>
<div className="flex w-full justify-between gap-2">
<HoverCard openDelay={500}>
<HoverCardTrigger>
<Switch
id="resend-files"
checked={resendFiles ?? openAISettings.resendFiles.default}
onCheckedChange={(checked: boolean) => setResendFiles(checked)}
disabled={readonly}
className="flex"
/>
<OptionHover endpoint={optionEndpoint ?? ''} type="resend" side={ESide.Bottom} />
</HoverCardTrigger>
</HoverCard>
<HoverCard openDelay={500}>
<HoverCardTrigger className="flex w-[52%] md:w-[125px]">
<Slider
id="image-detail-slider"
disabled={readonly}
value={[
imageDetailNumeric[imageDetail ?? ''] ??
imageDetailNumeric[openAISettings.imageDetail.default],
]}
onValueChange={(value) => setImageDetail(imageDetailValue[value[0]])}
doubleClickHandler={() => setImageDetail(openAISettings.imageDetail.default)}
max={openAISettings.imageDetail.max}
min={openAISettings.imageDetail.min}
step={openAISettings.imageDetail.step}
/>
<OptionHover endpoint={optionEndpoint ?? ''} type="detail" side={ESide.Bottom} />
</HoverCardTrigger>
</HoverCard>
</div>
<div className="flex flex-col gap-6 md:col-span-2">
{parameters.col2.map(renderComponent)}
</div>
</div>
</div>

View file

@ -47,7 +47,6 @@ function DynamicSlider({
conversation,
inputValue,
setInputValue: setLocalValue,
preventDelayedUpdate: isEnum,
});
const selectedValue = useMemo(() => {

View file

@ -1,16 +1,34 @@
import React, { useMemo, useState, useCallback } from 'react';
import React, { useMemo, useState, useEffect, useCallback } from 'react';
import { useGetEndpointsQuery } from 'librechat-data-provider/react-query';
import { getSettingsKeys, tPresetUpdateSchema } from 'librechat-data-provider';
import type { TPreset } from 'librechat-data-provider';
import { SaveAsPresetDialog } from '~/components/Endpoints';
import { useSetIndexOptions, useLocalize } from '~/hooks';
import { getEndpointField, logger } from '~/utils';
import { componentMapping } from './components';
import { useChatContext } from '~/Providers';
import { settings } from './settings';
const excludedKeys = new Set([
'conversationId',
'title',
'endpoint',
'endpointType',
'createdAt',
'updatedAt',
'messages',
'isArchived',
'tags',
'user',
'__v',
'_id',
'tools',
'model',
]);
export default function Parameters() {
const localize = useLocalize();
const { conversation } = useChatContext();
const { conversation, setConversation } = useChatContext();
const { setOption } = useSetIndexOptions();
const [isDialogOpen, setIsDialogOpen] = useState(false);
@ -22,13 +40,70 @@ export default function Parameters() {
return endpointsConfig?.[conversation?.endpoint ?? '']?.availableRegions ?? [];
}, [endpointsConfig, conversation?.endpoint]);
const endpointType = useMemo(
() => getEndpointField(endpointsConfig, conversation?.endpoint, 'type'),
[conversation?.endpoint, endpointsConfig],
);
const parameters = useMemo(() => {
const [combinedKey, endpointKey] = getSettingsKeys(
conversation?.endpoint ?? '',
endpointType ?? conversation?.endpoint ?? '',
conversation?.model ?? '',
);
return settings[combinedKey] ?? settings[endpointKey];
}, [conversation]);
}, [conversation, endpointType]);
useEffect(() => {
if (!parameters) {
return;
}
// const defaultValueMap = new Map();
// const paramKeys = new Set(
// parameters.map((setting) => {
// if (setting.default != null) {
// defaultValueMap.set(setting.key, setting.default);
// }
// return setting.key;
// }),
// );
const paramKeys = new Set(parameters.map((setting) => setting.key));
setConversation((prev) => {
if (!prev) {
return prev;
}
const updatedConversation = { ...prev };
const conversationKeys = Object.keys(updatedConversation);
const updatedKeys: string[] = [];
conversationKeys.forEach((key) => {
// const defaultValue = defaultValueMap.get(key);
// if (paramKeys.has(key) && defaultValue != null && prev[key] != null) {
// updatedKeys.push(key);
// updatedConversation[key] = defaultValue;
// return;
// }
if (paramKeys.has(key)) {
return;
}
if (excludedKeys.has(key)) {
return;
}
if (prev[key] != null) {
updatedKeys.push(key);
delete updatedConversation[key];
}
});
logger.log('parameters', 'parameters effect, updated keys:', updatedKeys);
return updatedConversation;
});
}, [parameters, setConversation]);
const openDialog = useCallback(() => {
const newPreset = tPresetUpdateSchema.parse({
@ -50,6 +125,9 @@ export default function Parameters() {
{/* Below is an example of an applied dynamic setting, each be contained by a div with the column span specified */}
{parameters.map((setting) => {
const Component = componentMapping[setting.component];
if (!Component) {
return null;
}
const { key, default: defaultValue, ...rest } = setting;
if (key === 'region' && bedrockRegions.length) {

View file

@ -11,7 +11,10 @@ import {
DynamicTags,
} from './';
export const componentMapping: Record<ComponentTypes, React.ComponentType<DynamicSettingProps>> = {
export const componentMapping: Record<
ComponentTypes,
React.ComponentType<DynamicSettingProps> | undefined
> = {
[ComponentTypes.Slider]: DynamicSlider,
[ComponentTypes.Dropdown]: DynamicDropdown,
[ComponentTypes.Switch]: DynamicSwitch,

View file

@ -1,8 +1,14 @@
import { EModelEndpoint, BedrockProviders } from 'librechat-data-provider';
import {
ImageDetail,
EModelEndpoint,
openAISettings,
BedrockProviders,
anthropicSettings,
} from 'librechat-data-provider';
import type { SettingsConfiguration, SettingDefinition } from 'librechat-data-provider';
// Base definitions
const baseDefinitions: Record<string, Partial<SettingDefinition>> = {
const baseDefinitions: Record<string, SettingDefinition> = {
model: {
key: 'model',
label: 'com_ui_model',
@ -38,20 +44,32 @@ const baseDefinitions: Record<string, Partial<SettingDefinition>> = {
optionType: 'model',
columnSpan: 4,
},
};
const bedrock: Record<string, SettingDefinition> = {
region: {
key: 'region',
type: 'string',
label: 'com_ui_region',
stop: {
key: 'stop',
label: 'com_endpoint_stop',
labelCode: true,
component: 'combobox',
description: 'com_endpoint_openai_stop',
descriptionCode: true,
placeholder: 'com_endpoint_stop_placeholder',
placeholderCode: true,
type: 'array',
default: [],
component: 'tags',
optionType: 'conversation',
minTags: 0,
maxTags: 4,
},
imageDetail: {
key: 'imageDetail',
label: 'com_endpoint_plug_image_detail',
labelCode: true,
description: 'com_endpoint_openai_detail',
descriptionCode: true,
type: 'enum',
default: ImageDetail.auto,
component: 'slider',
options: [ImageDetail.low, ImageDetail.auto, ImageDetail.high],
optionType: 'conversation',
selectPlaceholder: 'com_ui_select_region',
searchPlaceholder: 'com_ui_select_search_region',
searchPlaceholderCode: true,
selectPlaceholderCode: true,
columnSpan: 2,
},
};
@ -81,8 +99,10 @@ const librechat: Record<string, SettingDefinition> = {
labelCode: true,
type: 'number',
component: 'input',
placeholder: 'com_endpoint_context_info',
placeholder: 'com_nav_theme_system',
placeholderCode: true,
description: 'com_endpoint_context_info',
descriptionCode: true,
optionType: 'model',
columnSpan: 2,
},
@ -112,7 +132,146 @@ const librechat: Record<string, SettingDefinition> = {
},
};
const openAIParams: Record<string, SettingDefinition> = {
chatGptLabel: {
...librechat.modelLabel,
key: 'chatGptLabel',
},
promptPrefix: librechat.promptPrefix,
temperature: createDefinition(baseDefinitions.temperature, {
default: openAISettings.temperature.default,
range: {
min: openAISettings.temperature.min,
max: openAISettings.temperature.max,
step: openAISettings.temperature.step,
},
}),
top_p: createDefinition(baseDefinitions.topP, {
key: 'top_p',
default: openAISettings.top_p.default,
range: {
min: openAISettings.top_p.min,
max: openAISettings.top_p.max,
step: openAISettings.top_p.step,
},
}),
frequency_penalty: {
key: 'frequency_penalty',
label: 'com_endpoint_frequency_penalty',
labelCode: true,
description: 'com_endpoint_openai_freq',
descriptionCode: true,
type: 'number',
default: openAISettings.frequency_penalty.default,
range: {
min: openAISettings.frequency_penalty.min,
max: openAISettings.frequency_penalty.max,
step: openAISettings.frequency_penalty.step,
},
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
presence_penalty: {
key: 'presence_penalty',
label: 'com_endpoint_presence_penalty',
labelCode: true,
description: 'com_endpoint_openai_pres',
descriptionCode: true,
type: 'number',
default: openAISettings.presence_penalty.default,
range: {
min: openAISettings.presence_penalty.min,
max: openAISettings.presence_penalty.max,
step: openAISettings.presence_penalty.step,
},
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
max_tokens: {
key: 'max_tokens',
label: 'com_endpoint_max_output_tokens',
labelCode: true,
type: 'number',
component: 'input',
description: 'com_endpoint_openai_max_tokens',
descriptionCode: true,
placeholder: 'com_nav_theme_system',
placeholderCode: true,
optionType: 'model',
columnSpan: 2,
},
};
const anthropic: Record<string, SettingDefinition> = {
maxOutputTokens: {
key: 'maxOutputTokens',
label: 'com_endpoint_max_output_tokens',
labelCode: true,
type: 'number',
component: 'input',
description: 'com_endpoint_anthropic_maxoutputtokens',
descriptionCode: true,
placeholder: 'com_nav_theme_system',
placeholderCode: true,
range: {
min: anthropicSettings.maxOutputTokens.min,
max: anthropicSettings.maxOutputTokens.max,
step: anthropicSettings.maxOutputTokens.step,
},
optionType: 'model',
columnSpan: 2,
},
temperature: createDefinition(baseDefinitions.temperature, {
default: anthropicSettings.temperature.default,
range: {
min: anthropicSettings.temperature.min,
max: anthropicSettings.temperature.max,
step: anthropicSettings.temperature.step,
},
}),
topP: createDefinition(baseDefinitions.topP, {
default: anthropicSettings.topP.default,
range: {
min: anthropicSettings.topP.min,
max: anthropicSettings.topP.max,
step: anthropicSettings.topP.step,
},
}),
topK: {
key: 'topK',
label: 'com_endpoint_top_k',
labelCode: true,
description: 'com_endpoint_anthropic_topk',
descriptionCode: true,
type: 'number',
default: anthropicSettings.topK.default,
range: {
min: anthropicSettings.topK.min,
max: anthropicSettings.topK.max,
step: anthropicSettings.topK.step,
},
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
promptCache: {
key: 'promptCache',
label: 'com_endpoint_prompt_cache',
labelCode: true,
description: 'com_endpoint_anthropic_prompt_cache',
descriptionCode: true,
type: 'boolean',
default: true,
component: 'switch',
optionType: 'conversation',
showDefault: false,
columnSpan: 2,
},
};
const bedrock: Record<string, SettingDefinition> = {
system: {
key: 'system',
label: 'com_endpoint_prompt_prefix',
@ -124,6 +283,19 @@ const anthropic: Record<string, SettingDefinition> = {
placeholderCode: true,
optionType: 'model',
},
region: {
key: 'region',
type: 'string',
label: 'com_ui_region',
labelCode: true,
component: 'combobox',
optionType: 'conversation',
selectPlaceholder: 'com_ui_select_region',
searchPlaceholder: 'com_ui_select_search_region',
searchPlaceholderCode: true,
selectPlaceholderCode: true,
columnSpan: 2,
},
maxTokens: {
key: 'maxTokens',
label: 'com_endpoint_max_output_tokens',
@ -139,37 +311,13 @@ const anthropic: Record<string, SettingDefinition> = {
default: 1,
range: { min: 0, max: 1, step: 0.01 },
}),
topK: createDefinition(anthropic.topK, {
range: { min: 0, max: 500, step: 1 },
}),
topP: createDefinition(baseDefinitions.topP, {
default: 0.999,
range: { min: 0, max: 1, step: 0.01 },
}),
topK: {
key: 'topK',
label: 'com_endpoint_top_k',
labelCode: true,
description: 'com_endpoint_anthropic_topk',
descriptionCode: true,
type: 'number',
range: { min: 0, max: 500, step: 1 },
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
stop: {
key: 'stop',
label: 'com_endpoint_stop',
labelCode: true,
description: 'com_endpoint_openai_stop',
descriptionCode: true,
placeholder: 'com_endpoint_stop_placeholder',
placeholderCode: true,
type: 'array',
default: [],
component: 'tags',
optionType: 'conversation',
minTags: 0,
maxTags: 4,
},
};
const mistral: Record<string, SettingDefinition> = {
@ -204,15 +352,75 @@ const meta: Record<string, SettingDefinition> = {
}),
};
const bedrockAnthropic: SettingsConfiguration = [
librechat.modelLabel,
anthropic.system,
const openAI: SettingsConfiguration = [
openAIParams.chatGptLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
anthropic.maxTokens,
openAIParams.max_tokens,
openAIParams.temperature,
openAIParams.top_p,
openAIParams.frequency_penalty,
openAIParams.presence_penalty,
baseDefinitions.stop,
librechat.resendFiles,
baseDefinitions.imageDetail,
];
const openAICol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
openAIParams.chatGptLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
];
const openAICol2: SettingsConfiguration = [
openAIParams.max_tokens,
openAIParams.temperature,
openAIParams.top_p,
openAIParams.frequency_penalty,
openAIParams.presence_penalty,
baseDefinitions.stop,
librechat.resendFiles,
baseDefinitions.imageDetail,
];
const anthropicConfig: SettingsConfiguration = [
librechat.modelLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
anthropic.maxOutputTokens,
anthropic.temperature,
anthropic.topP,
anthropic.topK,
anthropic.stop,
librechat.resendFiles,
anthropic.promptCache,
];
const anthropicCol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
librechat.modelLabel,
librechat.promptPrefix,
];
const anthropicCol2: SettingsConfiguration = [
librechat.maxContextTokens,
anthropic.maxOutputTokens,
anthropic.temperature,
anthropic.topP,
anthropic.topK,
librechat.resendFiles,
anthropic.promptCache,
];
const bedrockAnthropic: SettingsConfiguration = [
librechat.modelLabel,
bedrock.system,
librechat.maxContextTokens,
bedrock.maxTokens,
bedrock.temperature,
bedrock.topP,
bedrock.topK,
baseDefinitions.stop,
bedrock.region,
librechat.resendFiles,
];
@ -221,7 +429,7 @@ const bedrockMistral: SettingsConfiguration = [
librechat.modelLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
anthropic.maxTokens,
bedrock.maxTokens,
mistral.temperature,
mistral.topP,
bedrock.region,
@ -232,7 +440,7 @@ const bedrockCohere: SettingsConfiguration = [
librechat.modelLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
anthropic.maxTokens,
bedrock.maxTokens,
cohere.temperature,
cohere.topP,
bedrock.region,
@ -252,16 +460,16 @@ const bedrockGeneral: SettingsConfiguration = [
const bedrockAnthropicCol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
librechat.modelLabel,
anthropic.system,
anthropic.stop,
bedrock.system,
baseDefinitions.stop,
];
const bedrockAnthropicCol2: SettingsConfiguration = [
librechat.maxContextTokens,
anthropic.maxTokens,
anthropic.temperature,
anthropic.topP,
anthropic.topK,
bedrock.maxTokens,
bedrock.temperature,
bedrock.topP,
bedrock.topK,
bedrock.region,
librechat.resendFiles,
];
@ -274,7 +482,7 @@ const bedrockMistralCol1: SettingsConfiguration = [
const bedrockMistralCol2: SettingsConfiguration = [
librechat.maxContextTokens,
anthropic.maxTokens,
bedrock.maxTokens,
mistral.temperature,
mistral.topP,
bedrock.region,
@ -289,7 +497,7 @@ const bedrockCohereCol1: SettingsConfiguration = [
const bedrockCohereCol2: SettingsConfiguration = [
librechat.maxContextTokens,
anthropic.maxTokens,
bedrock.maxTokens,
cohere.temperature,
cohere.topP,
bedrock.region,
@ -311,6 +519,10 @@ const bedrockGeneralCol2: SettingsConfiguration = [
];
export const settings: Record<string, SettingsConfiguration | undefined> = {
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.azureOpenAI]: openAI,
[EModelEndpoint.custom]: openAI,
[EModelEndpoint.anthropic]: anthropicConfig,
[`${EModelEndpoint.bedrock}-${BedrockProviders.Anthropic}`]: bedrockAnthropic,
[`${EModelEndpoint.bedrock}-${BedrockProviders.MistralAI}`]: bedrockMistral,
[`${EModelEndpoint.bedrock}-${BedrockProviders.Cohere}`]: bedrockCohere,
@ -319,6 +531,16 @@ export const settings: Record<string, SettingsConfiguration | undefined> = {
[`${EModelEndpoint.bedrock}-${BedrockProviders.Amazon}`]: bedrockGeneral,
};
const openAIColumns = {
col1: openAICol1,
col2: openAICol2,
};
const bedrockGeneralColumns = {
col1: bedrockGeneralCol1,
col2: bedrockGeneralCol2,
};
export const presetSettings: Record<
string,
| {
@ -327,6 +549,13 @@ export const presetSettings: Record<
}
| undefined
> = {
[EModelEndpoint.openAI]: openAIColumns,
[EModelEndpoint.azureOpenAI]: openAIColumns,
[EModelEndpoint.custom]: openAIColumns,
[EModelEndpoint.anthropic]: {
col1: anthropicCol1,
col2: anthropicCol2,
},
[`${EModelEndpoint.bedrock}-${BedrockProviders.Anthropic}`]: {
col1: bedrockAnthropicCol1,
col2: bedrockAnthropicCol2,
@ -339,16 +568,7 @@ export const presetSettings: Record<
col1: bedrockCohereCol1,
col2: bedrockCohereCol2,
},
[`${EModelEndpoint.bedrock}-${BedrockProviders.Meta}`]: {
col1: bedrockGeneralCol1,
col2: bedrockGeneralCol2,
},
[`${EModelEndpoint.bedrock}-${BedrockProviders.AI21}`]: {
col1: bedrockGeneralCol1,
col2: bedrockGeneralCol2,
},
[`${EModelEndpoint.bedrock}-${BedrockProviders.Amazon}`]: {
col1: bedrockGeneralCol1,
col2: bedrockGeneralCol2,
},
[`${EModelEndpoint.bedrock}-${BedrockProviders.Meta}`]: bedrockGeneralColumns,
[`${EModelEndpoint.bedrock}-${BedrockProviders.AI21}`]: bedrockGeneralColumns,
[`${EModelEndpoint.bedrock}-${BedrockProviders.Amazon}`]: bedrockGeneralColumns,
};

View file

@ -12,9 +12,9 @@ import { ResizableHandleAlt, ResizablePanel, ResizablePanelGroup } from '~/compo
import { useMediaQuery, useLocalStorage, useLocalize } from '~/hooks';
import useSideNavLinks from '~/hooks/Nav/useSideNavLinks';
import NavToggle from '~/components/Nav/NavToggle';
import { cn, getEndpointField } from '~/utils';
import { useChatContext } from '~/Providers';
import Switcher from './Switcher';
import { cn } from '~/utils';
import Nav from './Nav';
interface SidePanelProps {
@ -81,6 +81,10 @@ const SidePanel = ({
return typeof activePanel === 'string' ? activePanel : undefined;
}, []);
const endpointType = useMemo(
() => getEndpointField(endpointsConfig, endpoint, 'type'),
[endpoint, endpointsConfig],
);
const assistants = useMemo(() => endpointsConfig?.[endpoint ?? ''], [endpoint, endpointsConfig]);
const agents = useMemo(() => endpointsConfig?.[endpoint ?? ''], [endpoint, endpointsConfig]);
@ -108,6 +112,7 @@ const SidePanel = ({
hidePanel,
assistants,
keyProvided,
endpointType,
interfaceConfig,
});

View file

@ -4,7 +4,7 @@ import {
isAssistantsEndpoint,
isAgentsEndpoint,
PermissionTypes,
paramEndpoints,
isParamEndpoint,
EModelEndpoint,
Permissions,
} from 'librechat-data-provider';
@ -25,6 +25,7 @@ export default function useSideNavLinks({
agents,
keyProvided,
endpoint,
endpointType,
interfaceConfig,
}: {
hidePanel: () => void;
@ -32,6 +33,7 @@ export default function useSideNavLinks({
agents?: TConfig | null;
keyProvided: boolean;
endpoint?: EModelEndpoint | null;
endpointType?: EModelEndpoint | null;
interfaceConfig: Partial<TInterfaceConfig>;
}) {
const hasAccessToPrompts = useHasAccess({
@ -87,7 +89,11 @@ export default function useSideNavLinks({
});
}
if (interfaceConfig.parameters === true && paramEndpoints.has(endpoint ?? '') && keyProvided) {
if (
interfaceConfig.parameters === true &&
isParamEndpoint(endpoint ?? '', endpointType ?? '') === true &&
keyProvided
) {
links.push({
title: 'com_sidepanel_parameters',
label: '',
@ -128,6 +134,7 @@ export default function useSideNavLinks({
interfaceConfig.parameters,
keyProvided,
assistants,
endpointType,
endpoint,
agents,
hasAccessToPrompts,

View file

@ -7,9 +7,9 @@ import {
import { useNavigate } from 'react-router-dom';
import {
FileSources,
isParamEndpoint,
LocalStorageKeys,
isAssistantsEndpoint,
paramEndpoints,
} from 'librechat-data-provider';
import { useRecoilState, useRecoilValue, useSetRecoilState, useRecoilCallback } from 'recoil';
import type {
@ -185,12 +185,11 @@ const useNewConvo = (index = 0) => {
pauseGlobalAudio();
const templateConvoId = _template.conversationId ?? '';
const isParamEndpoint =
paramEndpoints.has(_template.endpoint ?? '') ||
paramEndpoints.has(_preset?.endpoint ?? '') ||
paramEndpoints.has(_template.endpointType ?? '');
const paramEndpoint =
isParamEndpoint(_template.endpoint ?? '', _template.endpointType ?? '') === true ||
isParamEndpoint(_preset?.endpoint ?? '', _preset?.endpointType ?? '');
const template =
isParamEndpoint && templateConvoId && templateConvoId === 'new'
paramEndpoint === true && templateConvoId && templateConvoId === 'new'
? { endpoint: _template.endpoint }
: _template;

View file

@ -720,11 +720,6 @@ export const modularEndpoints = new Set<EModelEndpoint | string>([
EModelEndpoint.bedrock,
]);
export const paramEndpoints = new Set<EModelEndpoint | string>([
EModelEndpoint.agents,
EModelEndpoint.bedrock,
]);
export const supportsBalanceCheck = {
[EModelEndpoint.custom]: true,
[EModelEndpoint.openAI]: true,

View file

@ -13,13 +13,11 @@ import {
gptPluginsSchema,
// agentsSchema,
compactAgentsSchema,
compactOpenAISchema,
compactGoogleSchema,
compactChatGPTSchema,
chatGPTBrowserSchema,
compactPluginsSchema,
compactAssistantSchema,
compactAnthropicSchema,
} from './schemas';
import { bedrockInputSchema } from './bedrock';
import { alternateName } from './config';
@ -302,20 +300,20 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
};
type CompactEndpointSchema =
| typeof compactOpenAISchema
| typeof openAISchema
| typeof compactAssistantSchema
| typeof compactAgentsSchema
| typeof compactGoogleSchema
| typeof bingAISchema
| typeof compactAnthropicSchema
| typeof anthropicSchema
| typeof compactChatGPTSchema
| typeof bedrockInputSchema
| typeof compactPluginsSchema;
const compactEndpointSchemas: Record<string, CompactEndpointSchema> = {
[EModelEndpoint.openAI]: compactOpenAISchema,
[EModelEndpoint.azureOpenAI]: compactOpenAISchema,
[EModelEndpoint.custom]: compactOpenAISchema,
[EModelEndpoint.openAI]: openAISchema,
[EModelEndpoint.azureOpenAI]: openAISchema,
[EModelEndpoint.custom]: openAISchema,
[EModelEndpoint.assistants]: compactAssistantSchema,
[EModelEndpoint.azureAssistants]: compactAssistantSchema,
[EModelEndpoint.agents]: compactAgentsSchema,
@ -323,7 +321,7 @@ const compactEndpointSchemas: Record<string, CompactEndpointSchema> = {
[EModelEndpoint.bedrock]: bedrockInputSchema,
/* BingAI needs all fields */
[EModelEndpoint.bingAI]: bingAISchema,
[EModelEndpoint.anthropic]: compactAnthropicSchema,
[EModelEndpoint.anthropic]: anthropicSchema,
[EModelEndpoint.chatGPTBrowser]: compactChatGPTSchema,
[EModelEndpoint.gptPlugins]: compactPluginsSchema,
};

View file

@ -28,6 +28,14 @@ export enum EModelEndpoint {
bedrock = 'bedrock',
}
export const paramEndpoints = new Set<EModelEndpoint | string>([
EModelEndpoint.agents,
EModelEndpoint.bedrock,
EModelEndpoint.openAI,
EModelEndpoint.anthropic,
EModelEndpoint.custom,
]);
export enum BedrockProviders {
AI21 = 'ai21',
Amazon = 'amazon',
@ -72,6 +80,21 @@ export const isAgentsEndpoint = (_endpoint?: EModelEndpoint.agents | null | stri
return endpoint === EModelEndpoint.agents;
};
export const isParamEndpoint = (
endpoint: EModelEndpoint | string,
endpointType?: EModelEndpoint | string,
): boolean => {
if (paramEndpoints.has(endpoint)) {
return true;
}
if (endpointType != null) {
return paramEndpoints.has(endpointType);
}
return false;
};
export enum ImageDetail {
low = 'low',
auto = 'auto',
@ -500,7 +523,7 @@ export const tConversationSchema = z.object({
frequency_penalty: z.number().optional(),
presence_penalty: z.number().optional(),
parentMessageId: z.string().optional(),
maxOutputTokens: z.number().optional(),
maxOutputTokens: coerceNumber.optional(),
maxContextTokens: coerceNumber.optional(),
max_tokens: coerceNumber.optional(),
/* Anthropic */
@ -630,71 +653,6 @@ export const tConversationTagSchema = z.object({
});
export type TConversationTag = z.infer<typeof tConversationTagSchema>;
export const openAISchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
resendFiles: true,
artifacts: true,
imageDetail: true,
stop: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
max_tokens: true,
})
.transform((obj) => {
const result = {
...obj,
model: obj.model ?? openAISettings.model.default,
chatGptLabel: obj.chatGptLabel ?? obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? openAISettings.temperature.default,
top_p: obj.top_p ?? openAISettings.top_p.default,
presence_penalty: obj.presence_penalty ?? openAISettings.presence_penalty.default,
frequency_penalty: obj.frequency_penalty ?? openAISettings.frequency_penalty.default,
resendFiles:
typeof obj.resendFiles === 'boolean' ? obj.resendFiles : openAISettings.resendFiles.default,
imageDetail: obj.imageDetail ?? openAISettings.imageDetail.default,
stop: obj.stop ?? undefined,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? undefined,
max_tokens: obj.max_tokens ?? undefined,
};
if (obj.modelLabel != null && obj.modelLabel !== '') {
result.modelLabel = null;
}
return result;
})
.catch(() => ({
model: openAISettings.model.default,
chatGptLabel: null,
promptPrefix: null,
temperature: openAISettings.temperature.default,
top_p: openAISettings.top_p.default,
presence_penalty: openAISettings.presence_penalty.default,
frequency_penalty: openAISettings.frequency_penalty.default,
resendFiles: openAISettings.resendFiles.default,
imageDetail: openAISettings.imageDetail.default,
stop: undefined,
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: undefined,
max_tokens: undefined,
}));
export const googleSchema = tConversationSchema
.pick({
model: true,
@ -778,64 +736,6 @@ export const bingAISchema = tConversationSchema
invocationId: 1,
}));
export const anthropicSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
promptPrefix: true,
temperature: true,
maxOutputTokens: true,
topP: true,
topK: true,
resendFiles: true,
promptCache: true,
artifacts: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
const model = obj.model ?? anthropicSettings.model.default;
return {
...obj,
model,
modelLabel: obj.modelLabel ?? null,
promptPrefix: obj.promptPrefix ?? null,
temperature: obj.temperature ?? anthropicSettings.temperature.default,
maxOutputTokens: obj.maxOutputTokens ?? anthropicSettings.maxOutputTokens.reset(model),
topP: obj.topP ?? anthropicSettings.topP.default,
topK: obj.topK ?? anthropicSettings.topK.default,
promptCache:
typeof obj.promptCache === 'boolean'
? obj.promptCache
: anthropicSettings.promptCache.default,
resendFiles:
typeof obj.resendFiles === 'boolean'
? obj.resendFiles
: anthropicSettings.resendFiles.default,
iconURL: obj.iconURL ?? undefined,
greeting: obj.greeting ?? undefined,
spec: obj.spec ?? undefined,
maxContextTokens: obj.maxContextTokens ?? anthropicSettings.maxContextTokens.default,
};
})
.catch(() => ({
model: anthropicSettings.model.default,
modelLabel: null,
promptPrefix: null,
temperature: anthropicSettings.temperature.default,
maxOutputTokens: anthropicSettings.maxOutputTokens.default,
topP: anthropicSettings.topP.default,
topK: anthropicSettings.topK.default,
resendFiles: anthropicSettings.resendFiles.default,
promptCache: anthropicSettings.promptCache.default,
iconURL: undefined,
greeting: undefined,
spec: undefined,
maxContextTokens: anthropicSettings.maxContextTokens.default,
}));
export const chatGPTBrowserSchema = tConversationSchema
.pick({
model: true,
@ -1027,7 +927,7 @@ export const agentsSchema = tConversationSchema
maxContextTokens: undefined,
}));
export const compactOpenAISchema = tConversationSchema
export const openAISchema = tConversationSchema
.pick({
model: true,
chatGptLabel: true,
@ -1046,29 +946,7 @@ export const compactOpenAISchema = tConversationSchema
maxContextTokens: true,
max_tokens: true,
})
.transform((obj: Partial<TConversation>) => {
const newObj: Partial<TConversation> = { ...obj };
if (newObj.temperature === openAISettings.temperature.default) {
delete newObj.temperature;
}
if (newObj.top_p === openAISettings.top_p.default) {
delete newObj.top_p;
}
if (newObj.presence_penalty === openAISettings.presence_penalty.default) {
delete newObj.presence_penalty;
}
if (newObj.frequency_penalty === openAISettings.frequency_penalty.default) {
delete newObj.frequency_penalty;
}
if (newObj.resendFiles === openAISettings.resendFiles.default) {
delete newObj.resendFiles;
}
if (newObj.imageDetail === openAISettings.imageDetail.default) {
delete newObj.imageDetail;
}
return removeNullishValues(newObj);
})
.transform((obj: Partial<TConversation>) => removeNullishValues(obj))
.catch(() => ({}));
export const compactGoogleSchema = tConversationSchema
@ -1106,7 +984,7 @@ export const compactGoogleSchema = tConversationSchema
})
.catch(() => ({}));
export const compactAnthropicSchema = tConversationSchema
export const anthropicSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
@ -1123,29 +1001,7 @@ export const compactAnthropicSchema = tConversationSchema
spec: true,
maxContextTokens: true,
})
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
if (newObj.temperature === anthropicSettings.temperature.default) {
delete newObj.temperature;
}
if (newObj.maxOutputTokens === anthropicSettings.legacy.maxOutputTokens.default) {
delete newObj.maxOutputTokens;
}
if (newObj.topP === anthropicSettings.topP.default) {
delete newObj.topP;
}
if (newObj.topK === anthropicSettings.topK.default) {
delete newObj.topK;
}
if (newObj.resendFiles === anthropicSettings.resendFiles.default) {
delete newObj.resendFiles;
}
if (newObj.promptCache === anthropicSettings.promptCache.default) {
delete newObj.promptCache;
}
return removeNullishValues(newObj);
})
.transform((obj) => removeNullishValues(obj))
.catch(() => ({}));
export const compactChatGPTSchema = tConversationSchema