import { EModelEndpoint, BedrockProviders } from 'librechat-data-provider'; import type { SettingsConfiguration, SettingDefinition } from 'librechat-data-provider'; // Base definitions const baseDefinitions: Record> = { model: { key: 'model', label: 'com_ui_model', labelCode: true, type: 'string', component: 'dropdown', optionType: 'model', selectPlaceholder: 'com_ui_select_model', searchPlaceholder: 'com_ui_select_search_model', searchPlaceholderCode: true, selectPlaceholderCode: true, columnSpan: 4, }, temperature: { key: 'temperature', label: 'com_endpoint_temperature', labelCode: true, description: 'com_endpoint_openai_temp', descriptionCode: true, type: 'number', component: 'slider', optionType: 'model', columnSpan: 4, }, topP: { key: 'topP', label: 'com_endpoint_top_p', labelCode: true, description: 'com_endpoint_anthropic_topp', descriptionCode: true, type: 'number', component: 'slider', optionType: 'model', columnSpan: 4, }, }; const bedrock: Record = { region: { key: 'region', type: 'string', label: 'com_ui_region', labelCode: true, component: 'combobox', optionType: 'conversation', selectPlaceholder: 'com_ui_select_region', searchPlaceholder: 'com_ui_select_search_region', searchPlaceholderCode: true, selectPlaceholderCode: true, columnSpan: 2, }, }; const createDefinition = ( base: Partial, overrides: Partial, ): SettingDefinition => { return { ...base, ...overrides } as SettingDefinition; }; const librechat: Record = { modelLabel: { key: 'modelLabel', label: 'com_endpoint_custom_name', labelCode: true, type: 'string', default: '', component: 'input', placeholder: 'com_endpoint_openai_custom_name_placeholder', placeholderCode: true, optionType: 'conversation', }, maxContextTokens: { key: 'maxContextTokens', label: 'com_endpoint_context_tokens', labelCode: true, type: 'number', component: 'input', placeholder: 'com_endpoint_context_info', placeholderCode: true, optionType: 'model', columnSpan: 2, }, resendFiles: { key: 'resendFiles', label: 'com_endpoint_plug_resend_files', labelCode: true, description: 'com_endpoint_openai_resend_files', descriptionCode: true, type: 'boolean', default: true, component: 'switch', optionType: 'conversation', showDefault: false, columnSpan: 2, }, promptPrefix: { key: 'promptPrefix', label: 'com_endpoint_prompt_prefix', labelCode: true, type: 'string', default: '', component: 'textarea', placeholder: 'com_endpoint_openai_prompt_prefix_placeholder', placeholderCode: true, optionType: 'model', }, }; const anthropic: Record = { system: { key: 'system', label: 'com_endpoint_prompt_prefix', labelCode: true, type: 'string', default: '', component: 'textarea', placeholder: 'com_endpoint_openai_prompt_prefix_placeholder', placeholderCode: true, optionType: 'model', }, maxTokens: { key: 'maxTokens', label: 'com_endpoint_max_output_tokens', labelCode: true, type: 'number', component: 'input', placeholder: 'com_endpoint_anthropic_maxoutputtokens', placeholderCode: true, optionType: 'model', columnSpan: 2, }, temperature: createDefinition(baseDefinitions.temperature, { default: 1, range: { min: 0, max: 1, step: 0.01 }, }), topP: createDefinition(baseDefinitions.topP, { default: 0.999, range: { min: 0, max: 1, step: 0.01 }, }), topK: { key: 'topK', label: 'com_endpoint_top_k', labelCode: true, description: 'com_endpoint_anthropic_topk', descriptionCode: true, type: 'number', range: { min: 0, max: 500, step: 1 }, component: 'slider', optionType: 'model', columnSpan: 4, }, stop: { key: 'stop', label: 'com_endpoint_stop', labelCode: true, description: 'com_endpoint_openai_stop', descriptionCode: true, placeholder: 'com_endpoint_stop_placeholder', placeholderCode: true, type: 'array', default: [], component: 'tags', optionType: 'conversation', minTags: 0, maxTags: 4, }, }; const mistral: Record = { temperature: createDefinition(baseDefinitions.temperature, { default: 0.7, range: { min: 0, max: 1, step: 0.01 }, }), topP: createDefinition(baseDefinitions.topP, { range: { min: 0, max: 1, step: 0.01 }, }), }; const cohere: Record = { temperature: createDefinition(baseDefinitions.temperature, { default: 0.3, range: { min: 0, max: 1, step: 0.01 }, }), topP: createDefinition(baseDefinitions.topP, { default: 0.75, range: { min: 0.01, max: 0.99, step: 0.01 }, }), }; const meta: Record = { temperature: createDefinition(baseDefinitions.temperature, { default: 0.5, range: { min: 0, max: 1, step: 0.01 }, }), topP: createDefinition(baseDefinitions.topP, { default: 0.9, range: { min: 0, max: 1, step: 0.01 }, }), }; const bedrockAnthropic: SettingsConfiguration = [ librechat.modelLabel, anthropic.system, librechat.maxContextTokens, anthropic.maxTokens, anthropic.temperature, anthropic.topP, anthropic.topK, anthropic.stop, bedrock.region, librechat.resendFiles, ]; const bedrockMistral: SettingsConfiguration = [ librechat.modelLabel, librechat.promptPrefix, librechat.maxContextTokens, anthropic.maxTokens, mistral.temperature, mistral.topP, bedrock.region, librechat.resendFiles, ]; const bedrockCohere: SettingsConfiguration = [ librechat.modelLabel, librechat.promptPrefix, librechat.maxContextTokens, anthropic.maxTokens, cohere.temperature, cohere.topP, bedrock.region, librechat.resendFiles, ]; const bedrockGeneral: SettingsConfiguration = [ librechat.modelLabel, librechat.promptPrefix, librechat.maxContextTokens, meta.temperature, meta.topP, bedrock.region, librechat.resendFiles, ]; const bedrockAnthropicCol1: SettingsConfiguration = [ baseDefinitions.model as SettingDefinition, librechat.modelLabel, anthropic.system, anthropic.stop, ]; const bedrockAnthropicCol2: SettingsConfiguration = [ librechat.maxContextTokens, anthropic.maxTokens, anthropic.temperature, anthropic.topP, anthropic.topK, bedrock.region, librechat.resendFiles, ]; const bedrockMistralCol1: SettingsConfiguration = [ baseDefinitions.model as SettingDefinition, librechat.modelLabel, librechat.promptPrefix, ]; const bedrockMistralCol2: SettingsConfiguration = [ librechat.maxContextTokens, anthropic.maxTokens, mistral.temperature, mistral.topP, bedrock.region, librechat.resendFiles, ]; const bedrockCohereCol1: SettingsConfiguration = [ baseDefinitions.model as SettingDefinition, librechat.modelLabel, librechat.promptPrefix, ]; const bedrockCohereCol2: SettingsConfiguration = [ librechat.maxContextTokens, anthropic.maxTokens, cohere.temperature, cohere.topP, bedrock.region, librechat.resendFiles, ]; const bedrockGeneralCol1: SettingsConfiguration = [ baseDefinitions.model as SettingDefinition, librechat.modelLabel, librechat.promptPrefix, ]; const bedrockGeneralCol2: SettingsConfiguration = [ librechat.maxContextTokens, meta.temperature, meta.topP, bedrock.region, librechat.resendFiles, ]; export const settings: Record = { [`${EModelEndpoint.bedrock}-${BedrockProviders.Anthropic}`]: bedrockAnthropic, [`${EModelEndpoint.bedrock}-${BedrockProviders.MistralAI}`]: bedrockMistral, [`${EModelEndpoint.bedrock}-${BedrockProviders.Cohere}`]: bedrockCohere, [`${EModelEndpoint.bedrock}-${BedrockProviders.Meta}`]: bedrockGeneral, [`${EModelEndpoint.bedrock}-${BedrockProviders.AI21}`]: bedrockGeneral, [`${EModelEndpoint.bedrock}-${BedrockProviders.Amazon}`]: bedrockGeneral, }; export const presetSettings: Record< string, | { col1: SettingsConfiguration; col2: SettingsConfiguration; } | undefined > = { [`${EModelEndpoint.bedrock}-${BedrockProviders.Anthropic}`]: { col1: bedrockAnthropicCol1, col2: bedrockAnthropicCol2, }, [`${EModelEndpoint.bedrock}-${BedrockProviders.MistralAI}`]: { col1: bedrockMistralCol1, col2: bedrockMistralCol2, }, [`${EModelEndpoint.bedrock}-${BedrockProviders.Cohere}`]: { col1: bedrockCohereCol1, col2: bedrockCohereCol2, }, [`${EModelEndpoint.bedrock}-${BedrockProviders.Meta}`]: { col1: bedrockGeneralCol1, col2: bedrockGeneralCol2, }, [`${EModelEndpoint.bedrock}-${BedrockProviders.AI21}`]: { col1: bedrockGeneralCol1, col2: bedrockGeneralCol2, }, [`${EModelEndpoint.bedrock}-${BedrockProviders.Amazon}`]: { col1: bedrockGeneralCol1, col2: bedrockGeneralCol2, }, };