Merge branch 'main' into feat/Multitenant-login-OIDC

This commit is contained in:
Ruben Talstra 2025-05-22 10:49:49 +02:00 committed by GitHub
commit bba4184b2c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
141 changed files with 3565 additions and 954 deletions

View file

@ -1,6 +1,6 @@
{
"name": "librechat-data-provider",
"version": "0.7.82",
"version": "0.7.83",
"description": "data services for librechat apps",
"main": "dist/index.js",
"module": "dist/index.es.js",

View file

@ -52,6 +52,7 @@ export const excludedKeys = new Set([
'model',
'files',
'spec',
'disableParams',
]);
export enum SettingsViews {
@ -278,6 +279,12 @@ export const endpointSchema = baseEndpointSchema.merge(
headers: z.record(z.any()).optional(),
addParams: z.record(z.any()).optional(),
dropParams: z.array(z.string()).optional(),
customParams: z
.object({
defaultParamsEndpoint: z.string().default('custom'),
paramDefinitions: z.array(z.record(z.any())).optional(),
})
.strict(),
customOrder: z.number().optional(),
directEndpoint: z.boolean().optional(),
titleMessageRole: z.string().optional(),
@ -505,10 +512,28 @@ export const intefaceSchema = z
export type TInterfaceConfig = z.infer<typeof intefaceSchema>;
export type TBalanceConfig = z.infer<typeof balanceSchema>;
export const turnstileOptionsSchema = z
.object({
language: z.string().default('auto'),
size: z.enum(['normal', 'compact', 'flexible', 'invisible']).default('normal'),
})
.default({
language: 'auto',
size: 'normal',
});
export const turnstileSchema = z.object({
siteKey: z.string(),
options: turnstileOptionsSchema.optional(),
});
export type TTurnstileConfig = z.infer<typeof turnstileSchema>;
export type TStartupConfig = {
appTitle: string;
socialLogins?: string[];
interface?: TInterfaceConfig;
turnstile?: TTurnstileConfig;
balance?: TBalanceConfig;
discordLoginEnabled: boolean;
facebookLoginEnabled: boolean;
@ -579,6 +604,7 @@ export const configSchema = z.object({
filteredTools: z.array(z.string()).optional(),
mcpServers: MCPServersSchema.optional(),
interface: intefaceSchema,
turnstile: turnstileSchema.optional(),
fileStrategy: fileSourceSchema.default(FileSources.local),
actions: z
.object({
@ -895,6 +921,7 @@ export const visionModels = [
'llama-3-2-11b-vision',
'llama-3.2-90b-vision',
'llama-3-2-90b-vision',
'llama-4',
];
export enum VisionModes {
generative = 'generative',

View file

@ -222,6 +222,12 @@ export const fileConfigSchema = z.object({
endpoints: z.record(endpointFileConfigSchema).optional(),
serverFileSizeLimit: z.number().min(0).optional(),
avatarSizeLimit: z.number().min(0).optional(),
imageGeneration: z
.object({
percentage: z.number().min(0).max(100).optional(),
px: z.number().min(0).optional(),
})
.optional(),
});
/** Helper function to safely convert string patterns to RegExp objects */

View file

@ -358,7 +358,7 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi
// continue;
}
setting.includeInput =
setting.type === SettingTypes.Number ? setting.includeInput ?? true : false; // Default to true if type is number
setting.type === SettingTypes.Number ? (setting.includeInput ?? true) : false; // Default to true if type is number
}
if (setting.component === ComponentTypes.Slider && setting.type === SettingTypes.Number) {
@ -445,7 +445,8 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi
// Validate optionType and conversation schema
if (setting.optionType !== OptionTypes.Custom) {
const conversationSchema = tConversationSchema.shape[setting.key as keyof TConversation];
const conversationSchema =
tConversationSchema.shape[setting.key as keyof Omit<TConversation, 'disableParams'>];
if (!conversationSchema) {
errors.push({
code: ZodIssueCode.custom,
@ -466,7 +467,7 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi
}
/* Default value checks */
if (setting.type === SettingTypes.Number && isNaN(setting.default as number)) {
if (setting.type === SettingTypes.Number && isNaN(setting.default as number) && setting.default != null) {
errors.push({
code: ZodIssueCode.custom,
message: `Invalid default value for setting ${setting.key}. Must be a number.`,
@ -474,7 +475,7 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi
});
}
if (setting.type === SettingTypes.Boolean && typeof setting.default !== 'boolean') {
if (setting.type === SettingTypes.Boolean && typeof setting.default !== 'boolean' && setting.default != null) {
errors.push({
code: ZodIssueCode.custom,
message: `Invalid default value for setting ${setting.key}. Must be a boolean.`,
@ -484,7 +485,7 @@ export function validateSettingDefinitions(settings: SettingsConfiguration): voi
if (
(setting.type === SettingTypes.String || setting.type === SettingTypes.Enum) &&
typeof setting.default !== 'string'
typeof setting.default !== 'string' && setting.default != null
) {
errors.push({
code: ZodIssueCode.custom,

View file

@ -36,3 +36,4 @@ import * as dataService from './data-service';
export * from './utils';
export * from './actions';
export { default as createPayload } from './createPayload';
export * from './parameterSettings';

View file

@ -53,9 +53,10 @@ export const WebSocketOptionsSchema = BaseOptionsSchema.extend({
type: z.literal('websocket').optional(),
url: z
.string()
.url()
.transform((val: string) => extractEnvVariable(val))
.pipe(z.string().url())
.refine(
(val) => {
(val: string) => {
const protocol = new URL(val).protocol;
return protocol === 'ws:' || protocol === 'wss:';
},
@ -70,9 +71,10 @@ export const SSEOptionsSchema = BaseOptionsSchema.extend({
headers: z.record(z.string(), z.string()).optional(),
url: z
.string()
.url()
.transform((val: string) => extractEnvVariable(val))
.pipe(z.string().url())
.refine(
(val) => {
(val: string) => {
const protocol = new URL(val).protocol;
return protocol !== 'ws:' && protocol !== 'wss:';
},
@ -85,15 +87,19 @@ export const SSEOptionsSchema = BaseOptionsSchema.extend({
export const StreamableHTTPOptionsSchema = BaseOptionsSchema.extend({
type: z.literal('streamable-http'),
headers: z.record(z.string(), z.string()).optional(),
url: z.string().url().refine(
(val) => {
url: z
.string()
.transform((val: string) => extractEnvVariable(val))
.pipe(z.string().url())
.refine(
(val: string) => {
const protocol = new URL(val).protocol;
return protocol !== 'ws:' && protocol !== 'wss:';
},
{
message: 'Streamable HTTP URL must not start with ws:// or wss://',
},
),
),
});
export const MCPOptionsSchema = z.union([
@ -138,5 +144,9 @@ export function processMCPEnv(obj: Readonly<MCPOptions>, userId?: string): MCPOp
newObj.headers = processedHeaders;
}
if ('url' in newObj && newObj.url) {
newObj.url = extractEnvVariable(newObj.url);
}
return newObj;
}

View file

@ -0,0 +1,726 @@
import {
ImageDetail,
EModelEndpoint,
openAISettings,
googleSettings,
ReasoningEffort,
BedrockProviders,
anthropicSettings,
} from './types';
import { SettingDefinition, SettingsConfiguration } from './generate';
// Base definitions
const baseDefinitions: Record<string, SettingDefinition> = {
model: {
key: 'model',
label: 'com_ui_model',
labelCode: true,
type: 'string',
component: 'dropdown',
optionType: 'model',
selectPlaceholder: 'com_ui_select_model',
searchPlaceholder: 'com_ui_select_search_model',
searchPlaceholderCode: true,
selectPlaceholderCode: true,
columnSpan: 4,
},
temperature: {
key: 'temperature',
label: 'com_endpoint_temperature',
labelCode: true,
description: 'com_endpoint_openai_temp',
descriptionCode: true,
type: 'number',
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
topP: {
key: 'topP',
label: 'com_endpoint_top_p',
labelCode: true,
description: 'com_endpoint_anthropic_topp',
descriptionCode: true,
type: 'number',
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
stop: {
key: 'stop',
label: 'com_endpoint_stop',
labelCode: true,
description: 'com_endpoint_openai_stop',
descriptionCode: true,
placeholder: 'com_endpoint_stop_placeholder',
placeholderCode: true,
type: 'array',
default: [],
component: 'tags',
optionType: 'conversation',
minTags: 0,
maxTags: 4,
},
imageDetail: {
key: 'imageDetail',
label: 'com_endpoint_plug_image_detail',
labelCode: true,
description: 'com_endpoint_openai_detail',
descriptionCode: true,
type: 'enum',
default: ImageDetail.auto,
component: 'slider',
options: [ImageDetail.low, ImageDetail.auto, ImageDetail.high],
optionType: 'conversation',
columnSpan: 2,
},
};
const createDefinition = (
base: Partial<SettingDefinition>,
overrides: Partial<SettingDefinition>,
): SettingDefinition => {
return { ...base, ...overrides } as SettingDefinition;
};
const librechat: Record<string, SettingDefinition> = {
modelLabel: {
key: 'modelLabel',
label: 'com_endpoint_custom_name',
labelCode: true,
type: 'string',
default: '',
component: 'input',
placeholder: 'com_endpoint_openai_custom_name_placeholder',
placeholderCode: true,
optionType: 'conversation',
},
maxContextTokens: {
key: 'maxContextTokens',
label: 'com_endpoint_context_tokens',
labelCode: true,
type: 'number',
component: 'input',
placeholder: 'com_nav_theme_system',
placeholderCode: true,
description: 'com_endpoint_context_info',
descriptionCode: true,
optionType: 'model',
columnSpan: 2,
},
resendFiles: {
key: 'resendFiles',
label: 'com_endpoint_plug_resend_files',
labelCode: true,
description: 'com_endpoint_openai_resend_files',
descriptionCode: true,
type: 'boolean',
default: true,
component: 'switch',
optionType: 'conversation',
showDefault: false,
columnSpan: 2,
},
promptPrefix: {
key: 'promptPrefix',
label: 'com_endpoint_prompt_prefix',
labelCode: true,
type: 'string',
default: '',
component: 'textarea',
placeholder: 'com_endpoint_openai_prompt_prefix_placeholder',
placeholderCode: true,
optionType: 'model',
},
};
const openAIParams: Record<string, SettingDefinition> = {
chatGptLabel: {
...librechat.modelLabel,
key: 'chatGptLabel',
},
promptPrefix: librechat.promptPrefix,
temperature: createDefinition(baseDefinitions.temperature, {
default: openAISettings.temperature.default,
range: {
min: openAISettings.temperature.min,
max: openAISettings.temperature.max,
step: openAISettings.temperature.step,
},
}),
top_p: createDefinition(baseDefinitions.topP, {
key: 'top_p',
default: openAISettings.top_p.default,
range: {
min: openAISettings.top_p.min,
max: openAISettings.top_p.max,
step: openAISettings.top_p.step,
},
}),
frequency_penalty: {
key: 'frequency_penalty',
label: 'com_endpoint_frequency_penalty',
labelCode: true,
description: 'com_endpoint_openai_freq',
descriptionCode: true,
type: 'number',
default: openAISettings.frequency_penalty.default,
range: {
min: openAISettings.frequency_penalty.min,
max: openAISettings.frequency_penalty.max,
step: openAISettings.frequency_penalty.step,
},
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
presence_penalty: {
key: 'presence_penalty',
label: 'com_endpoint_presence_penalty',
labelCode: true,
description: 'com_endpoint_openai_pres',
descriptionCode: true,
type: 'number',
default: openAISettings.presence_penalty.default,
range: {
min: openAISettings.presence_penalty.min,
max: openAISettings.presence_penalty.max,
step: openAISettings.presence_penalty.step,
},
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
max_tokens: {
key: 'max_tokens',
label: 'com_endpoint_max_output_tokens',
labelCode: true,
type: 'number',
component: 'input',
description: 'com_endpoint_openai_max_tokens',
descriptionCode: true,
placeholder: 'com_nav_theme_system',
placeholderCode: true,
optionType: 'model',
columnSpan: 2,
},
reasoning_effort: {
key: 'reasoning_effort',
label: 'com_endpoint_reasoning_effort',
labelCode: true,
description: 'com_endpoint_openai_reasoning_effort',
descriptionCode: true,
type: 'enum',
default: ReasoningEffort.medium,
component: 'slider',
options: [ReasoningEffort.low, ReasoningEffort.medium, ReasoningEffort.high],
optionType: 'model',
columnSpan: 4,
},
};
const anthropic: Record<string, SettingDefinition> = {
maxOutputTokens: {
key: 'maxOutputTokens',
label: 'com_endpoint_max_output_tokens',
labelCode: true,
type: 'number',
component: 'input',
description: 'com_endpoint_anthropic_maxoutputtokens',
descriptionCode: true,
placeholder: 'com_nav_theme_system',
placeholderCode: true,
range: {
min: anthropicSettings.maxOutputTokens.min,
max: anthropicSettings.maxOutputTokens.max,
step: anthropicSettings.maxOutputTokens.step,
},
optionType: 'model',
columnSpan: 2,
},
temperature: createDefinition(baseDefinitions.temperature, {
default: anthropicSettings.temperature.default,
range: {
min: anthropicSettings.temperature.min,
max: anthropicSettings.temperature.max,
step: anthropicSettings.temperature.step,
},
}),
topP: createDefinition(baseDefinitions.topP, {
default: anthropicSettings.topP.default,
range: {
min: anthropicSettings.topP.min,
max: anthropicSettings.topP.max,
step: anthropicSettings.topP.step,
},
}),
topK: {
key: 'topK',
label: 'com_endpoint_top_k',
labelCode: true,
description: 'com_endpoint_anthropic_topk',
descriptionCode: true,
type: 'number',
default: anthropicSettings.topK.default,
range: {
min: anthropicSettings.topK.min,
max: anthropicSettings.topK.max,
step: anthropicSettings.topK.step,
},
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
promptCache: {
key: 'promptCache',
label: 'com_endpoint_prompt_cache',
labelCode: true,
description: 'com_endpoint_anthropic_prompt_cache',
descriptionCode: true,
type: 'boolean',
default: anthropicSettings.promptCache.default,
component: 'switch',
optionType: 'conversation',
showDefault: false,
columnSpan: 2,
},
thinking: {
key: 'thinking',
label: 'com_endpoint_thinking',
labelCode: true,
description: 'com_endpoint_anthropic_thinking',
descriptionCode: true,
type: 'boolean',
default: anthropicSettings.thinking.default,
component: 'switch',
optionType: 'conversation',
showDefault: false,
columnSpan: 2,
},
thinkingBudget: {
key: 'thinkingBudget',
label: 'com_endpoint_thinking_budget',
labelCode: true,
description: 'com_endpoint_anthropic_thinking_budget',
descriptionCode: true,
type: 'number',
component: 'input',
default: anthropicSettings.thinkingBudget.default,
range: {
min: anthropicSettings.thinkingBudget.min,
max: anthropicSettings.thinkingBudget.max,
step: anthropicSettings.thinkingBudget.step,
},
optionType: 'conversation',
columnSpan: 2,
},
};
const bedrock: Record<string, SettingDefinition> = {
system: {
key: 'system',
label: 'com_endpoint_prompt_prefix',
labelCode: true,
type: 'string',
default: '',
component: 'textarea',
placeholder: 'com_endpoint_openai_prompt_prefix_placeholder',
placeholderCode: true,
optionType: 'model',
},
region: {
key: 'region',
type: 'string',
label: 'com_ui_region',
labelCode: true,
component: 'combobox',
optionType: 'conversation',
selectPlaceholder: 'com_ui_select_region',
searchPlaceholder: 'com_ui_select_search_region',
searchPlaceholderCode: true,
selectPlaceholderCode: true,
columnSpan: 2,
},
maxTokens: {
key: 'maxTokens',
label: 'com_endpoint_max_output_tokens',
labelCode: true,
type: 'number',
component: 'input',
placeholder: 'com_endpoint_anthropic_maxoutputtokens',
placeholderCode: true,
optionType: 'model',
columnSpan: 2,
},
temperature: createDefinition(baseDefinitions.temperature, {
default: 1,
range: { min: 0, max: 1, step: 0.01 },
}),
topK: createDefinition(anthropic.topK, {
range: { min: 0, max: 500, step: 1 },
}),
topP: createDefinition(baseDefinitions.topP, {
default: 0.999,
range: { min: 0, max: 1, step: 0.01 },
}),
};
const mistral: Record<string, SettingDefinition> = {
temperature: createDefinition(baseDefinitions.temperature, {
default: 0.7,
range: { min: 0, max: 1, step: 0.01 },
}),
topP: createDefinition(baseDefinitions.topP, {
range: { min: 0, max: 1, step: 0.01 },
}),
};
const cohere: Record<string, SettingDefinition> = {
temperature: createDefinition(baseDefinitions.temperature, {
default: 0.3,
range: { min: 0, max: 1, step: 0.01 },
}),
topP: createDefinition(baseDefinitions.topP, {
default: 0.75,
range: { min: 0.01, max: 0.99, step: 0.01 },
}),
};
const meta: Record<string, SettingDefinition> = {
temperature: createDefinition(baseDefinitions.temperature, {
default: 0.5,
range: { min: 0, max: 1, step: 0.01 },
}),
topP: createDefinition(baseDefinitions.topP, {
default: 0.9,
range: { min: 0, max: 1, step: 0.01 },
}),
};
const google: Record<string, SettingDefinition> = {
temperature: createDefinition(baseDefinitions.temperature, {
default: googleSettings.temperature.default,
range: {
min: googleSettings.temperature.min,
max: googleSettings.temperature.max,
step: googleSettings.temperature.step,
},
}),
topP: createDefinition(baseDefinitions.topP, {
default: googleSettings.topP.default,
range: {
min: googleSettings.topP.min,
max: googleSettings.topP.max,
step: googleSettings.topP.step,
},
}),
topK: {
key: 'topK',
label: 'com_endpoint_top_k',
labelCode: true,
description: 'com_endpoint_google_topk',
descriptionCode: true,
type: 'number',
default: googleSettings.topK.default,
range: {
min: googleSettings.topK.min,
max: googleSettings.topK.max,
step: googleSettings.topK.step,
},
component: 'slider',
optionType: 'model',
columnSpan: 4,
},
maxOutputTokens: {
key: 'maxOutputTokens',
label: 'com_endpoint_max_output_tokens',
labelCode: true,
type: 'number',
component: 'input',
description: 'com_endpoint_google_maxoutputtokens',
descriptionCode: true,
placeholder: 'com_nav_theme_system',
placeholderCode: true,
default: googleSettings.maxOutputTokens.default,
range: {
min: googleSettings.maxOutputTokens.min,
max: googleSettings.maxOutputTokens.max,
step: googleSettings.maxOutputTokens.step,
},
optionType: 'model',
columnSpan: 2,
},
};
const googleConfig: SettingsConfiguration = [
librechat.modelLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
google.maxOutputTokens,
google.temperature,
google.topP,
google.topK,
librechat.resendFiles,
];
const googleCol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
librechat.modelLabel,
librechat.promptPrefix,
];
const googleCol2: SettingsConfiguration = [
librechat.maxContextTokens,
google.maxOutputTokens,
google.temperature,
google.topP,
google.topK,
librechat.resendFiles,
];
const openAI: SettingsConfiguration = [
librechat.modelLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
openAIParams.max_tokens,
openAIParams.temperature,
openAIParams.top_p,
openAIParams.frequency_penalty,
openAIParams.presence_penalty,
baseDefinitions.stop,
librechat.resendFiles,
baseDefinitions.imageDetail,
openAIParams.reasoning_effort,
];
const openAICol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
librechat.modelLabel,
librechat.promptPrefix,
];
const openAICol2: SettingsConfiguration = [
librechat.maxContextTokens,
openAIParams.max_tokens,
openAIParams.temperature,
openAIParams.top_p,
openAIParams.frequency_penalty,
openAIParams.presence_penalty,
baseDefinitions.stop,
openAIParams.reasoning_effort,
librechat.resendFiles,
baseDefinitions.imageDetail,
];
const anthropicConfig: SettingsConfiguration = [
librechat.modelLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
anthropic.maxOutputTokens,
anthropic.temperature,
anthropic.topP,
anthropic.topK,
librechat.resendFiles,
anthropic.promptCache,
anthropic.thinking,
anthropic.thinkingBudget,
];
const anthropicCol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
librechat.modelLabel,
librechat.promptPrefix,
];
const anthropicCol2: SettingsConfiguration = [
librechat.maxContextTokens,
anthropic.maxOutputTokens,
anthropic.temperature,
anthropic.topP,
anthropic.topK,
librechat.resendFiles,
anthropic.promptCache,
anthropic.thinking,
anthropic.thinkingBudget,
];
const bedrockAnthropic: SettingsConfiguration = [
librechat.modelLabel,
bedrock.system,
librechat.maxContextTokens,
bedrock.maxTokens,
bedrock.temperature,
bedrock.topP,
bedrock.topK,
baseDefinitions.stop,
librechat.resendFiles,
bedrock.region,
anthropic.thinking,
anthropic.thinkingBudget,
];
const bedrockMistral: SettingsConfiguration = [
librechat.modelLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
bedrock.maxTokens,
mistral.temperature,
mistral.topP,
librechat.resendFiles,
bedrock.region,
];
const bedrockCohere: SettingsConfiguration = [
librechat.modelLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
bedrock.maxTokens,
cohere.temperature,
cohere.topP,
librechat.resendFiles,
bedrock.region,
];
const bedrockGeneral: SettingsConfiguration = [
librechat.modelLabel,
librechat.promptPrefix,
librechat.maxContextTokens,
meta.temperature,
meta.topP,
librechat.resendFiles,
bedrock.region,
];
const bedrockAnthropicCol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
librechat.modelLabel,
bedrock.system,
baseDefinitions.stop,
];
const bedrockAnthropicCol2: SettingsConfiguration = [
librechat.maxContextTokens,
bedrock.maxTokens,
bedrock.temperature,
bedrock.topP,
bedrock.topK,
librechat.resendFiles,
bedrock.region,
anthropic.thinking,
anthropic.thinkingBudget,
];
const bedrockMistralCol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
librechat.modelLabel,
librechat.promptPrefix,
];
const bedrockMistralCol2: SettingsConfiguration = [
librechat.maxContextTokens,
bedrock.maxTokens,
mistral.temperature,
mistral.topP,
librechat.resendFiles,
bedrock.region,
];
const bedrockCohereCol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
librechat.modelLabel,
librechat.promptPrefix,
];
const bedrockCohereCol2: SettingsConfiguration = [
librechat.maxContextTokens,
bedrock.maxTokens,
cohere.temperature,
cohere.topP,
librechat.resendFiles,
bedrock.region,
];
const bedrockGeneralCol1: SettingsConfiguration = [
baseDefinitions.model as SettingDefinition,
librechat.modelLabel,
librechat.promptPrefix,
];
const bedrockGeneralCol2: SettingsConfiguration = [
librechat.maxContextTokens,
meta.temperature,
meta.topP,
librechat.resendFiles,
bedrock.region,
];
export const paramSettings: Record<string, SettingsConfiguration | undefined> = {
[EModelEndpoint.openAI]: openAI,
[EModelEndpoint.azureOpenAI]: openAI,
[EModelEndpoint.custom]: openAI,
[EModelEndpoint.anthropic]: anthropicConfig,
[`${EModelEndpoint.bedrock}-${BedrockProviders.Anthropic}`]: bedrockAnthropic,
[`${EModelEndpoint.bedrock}-${BedrockProviders.MistralAI}`]: bedrockMistral,
[`${EModelEndpoint.bedrock}-${BedrockProviders.Cohere}`]: bedrockCohere,
[`${EModelEndpoint.bedrock}-${BedrockProviders.Meta}`]: bedrockGeneral,
[`${EModelEndpoint.bedrock}-${BedrockProviders.AI21}`]: bedrockGeneral,
[`${EModelEndpoint.bedrock}-${BedrockProviders.Amazon}`]: bedrockGeneral,
[`${EModelEndpoint.bedrock}-${BedrockProviders.DeepSeek}`]: bedrockGeneral,
[EModelEndpoint.google]: googleConfig,
};
const openAIColumns = {
col1: openAICol1,
col2: openAICol2,
};
const bedrockGeneralColumns = {
col1: bedrockGeneralCol1,
col2: bedrockGeneralCol2,
};
export const presetSettings: Record<
string,
| {
col1: SettingsConfiguration;
col2: SettingsConfiguration;
}
| undefined
> = {
[EModelEndpoint.openAI]: openAIColumns,
[EModelEndpoint.azureOpenAI]: openAIColumns,
[EModelEndpoint.custom]: openAIColumns,
[EModelEndpoint.anthropic]: {
col1: anthropicCol1,
col2: anthropicCol2,
},
[`${EModelEndpoint.bedrock}-${BedrockProviders.Anthropic}`]: {
col1: bedrockAnthropicCol1,
col2: bedrockAnthropicCol2,
},
[`${EModelEndpoint.bedrock}-${BedrockProviders.MistralAI}`]: {
col1: bedrockMistralCol1,
col2: bedrockMistralCol2,
},
[`${EModelEndpoint.bedrock}-${BedrockProviders.Cohere}`]: {
col1: bedrockCohereCol1,
col2: bedrockCohereCol2,
},
[`${EModelEndpoint.bedrock}-${BedrockProviders.Meta}`]: bedrockGeneralColumns,
[`${EModelEndpoint.bedrock}-${BedrockProviders.AI21}`]: bedrockGeneralColumns,
[`${EModelEndpoint.bedrock}-${BedrockProviders.Amazon}`]: bedrockGeneralColumns,
[`${EModelEndpoint.bedrock}-${BedrockProviders.DeepSeek}`]: bedrockGeneralColumns,
[EModelEndpoint.google]: {
col1: googleCol1,
col2: googleCol2,
},
};
export const agentParamSettings: Record<string, SettingsConfiguration | undefined> = Object.entries(
presetSettings,
).reduce<Record<string, SettingsConfiguration | undefined>>((acc, [key, value]) => {
if (value) {
acc[key] = value.col2;
}
return acc;
}, {});

View file

@ -745,6 +745,7 @@ export type TSetOption = (
export type TConversation = z.infer<typeof tConversationSchema> & {
presetOverride?: Partial<TPreset>;
disableParams?: boolean;
};
export const tSharedLinkSchema = z.object({

View file

@ -10,6 +10,7 @@ import type {
TConversationTag,
TBanner,
} from './schemas';
import { SettingDefinition } from './generate';
export type TOpenAIMessage = OpenAI.Chat.ChatCompletionMessageParam;
export * from './schemas';
@ -268,6 +269,10 @@ export type TConfig = {
disableBuilder?: boolean;
retrievalModels?: string[];
capabilities?: string[];
customParams?: {
defaultParamsEndpoint?: string;
paramDefinitions?: SettingDefinition[];
};
};
export type TEndpointsConfig =

View file

@ -131,6 +131,7 @@ export type BatchFile = {
filepath: string;
embedded: boolean;
source: FileSources;
temp_file_id?: string;
};
export type DeleteFilesBody = {

View file

@ -567,8 +567,14 @@ export class MCPConnection extends EventEmitter {
return this.connectionState;
}
public isConnected(): boolean {
return this.connectionState === 'connected';
public async isConnected(): Promise<boolean> {
try {
await this.client.ping();
return this.connectionState === 'connected';
} catch (error) {
this.logger?.error(`${this.getLogPrefix()} Ping failed:`, error);
return false;
}
}
public getLastError(): Error | null {

View file

@ -1,5 +1,7 @@
/* MCP */
export * from './manager';
/* Utilities */
export * from './utils';
/* Flow */
export * from './flow/manager';
/* types */

View file

@ -71,7 +71,7 @@ export class MCPManager {
const connectionAttempt = this.initializeServer(connection, `[MCP][${serverName}]`);
await Promise.race([connectionAttempt, connectionTimeout]);
if (connection.isConnected()) {
if (await connection.isConnected()) {
initializedServers.add(i);
this.connections.set(serverName, connection); // Store in app-level map
@ -135,7 +135,7 @@ export class MCPManager {
while (attempts < maxAttempts) {
try {
await connection.connect();
if (connection.isConnected()) {
if (await connection.isConnected()) {
return;
}
throw new Error('Connection attempt succeeded but status is not connected');
@ -200,7 +200,7 @@ export class MCPManager {
}
connection = undefined; // Force creation of a new connection
} else if (connection) {
if (connection.isConnected()) {
if (await connection.isConnected()) {
this.logger.debug(`[MCP][User: ${userId}][${serverName}] Reusing active connection`);
// Update timestamp on reuse
this.updateUserLastActivity(userId);
@ -244,7 +244,7 @@ export class MCPManager {
);
await Promise.race([connectionAttempt, connectionTimeout]);
if (!connection.isConnected()) {
if (!(await connection.isConnected())) {
throw new Error('Failed to establish connection after initialization attempt.');
}
@ -342,7 +342,7 @@ export class MCPManager {
public async mapAvailableTools(availableTools: t.LCAvailableTools): Promise<void> {
for (const [serverName, connection] of this.connections.entries()) {
try {
if (connection.isConnected() !== true) {
if ((await connection.isConnected()) !== true) {
this.logger.warn(
`[MCP][${serverName}] Connection not established. Skipping tool mapping.`,
);
@ -375,7 +375,7 @@ export class MCPManager {
for (const [serverName, connection] of this.connections.entries()) {
try {
if (connection.isConnected() !== true) {
if ((await connection.isConnected()) !== true) {
this.logger.warn(
`[MCP][${serverName}] Connection not established. Skipping manifest loading.`,
);
@ -443,7 +443,7 @@ export class MCPManager {
}
}
if (!connection.isConnected()) {
if (!(await connection.isConnected())) {
// This might happen if getUserConnection failed silently or app connection dropped
throw new McpError(
ErrorCode.InternalError, // Use InternalError for connection issues

View file

@ -1,5 +1,13 @@
import type * as t from './types/mcp';
const RECOGNIZED_PROVIDERS = new Set(['google', 'anthropic', 'openai', 'openrouter', 'xai', 'deepseek', 'ollama']);
const RECOGNIZED_PROVIDERS = new Set([
'google',
'anthropic',
'openai',
'openrouter',
'xai',
'deepseek',
'ollama',
]);
const CONTENT_ARRAY_PROVIDERS = new Set(['google', 'anthropic', 'openai']);
const imageFormatters: Record<string, undefined | t.ImageFormatter> = {
@ -49,6 +57,12 @@ function parseAsString(result: t.MCPToolCallResponse): string {
if (item.resource.uri) {
resourceText.push(`Resource URI: ${item.resource.uri}`);
}
if (item.resource.name) {
resourceText.push(`Resource: ${item.resource.name}`);
}
if (item.resource.description) {
resourceText.push(`Description: ${item.resource.description}`);
}
if (item.resource.mimeType != null && item.resource.mimeType) {
resourceText.push(`Type: ${item.resource.mimeType}`);
}
@ -133,6 +147,12 @@ export function formatToolContent(
if (item.resource.uri.length) {
resourceText.push(`Resource URI: ${item.resource.uri}`);
}
if (item.resource.name) {
resourceText.push(`Resource: ${item.resource.name}`);
}
if (item.resource.description) {
resourceText.push(`Description: ${item.resource.description}`);
}
if (item.resource.mimeType != null && item.resource.mimeType) {
resourceText.push(`Type: ${item.resource.mimeType}`);
}

View file

@ -0,0 +1,28 @@
import { normalizeServerName } from './utils';
describe('normalizeServerName', () => {
it('should not modify server names that already match the pattern', () => {
const result = normalizeServerName('valid-server_name.123');
expect(result).toBe('valid-server_name.123');
});
it('should normalize server names with non-ASCII characters', () => {
const result = normalizeServerName('我的服务');
// Should generate a fallback name with a hash
expect(result).toMatch(/^server_\d+$/);
expect(result).toMatch(/^[a-zA-Z0-9_.-]+$/);
});
it('should normalize server names with special characters', () => {
const result = normalizeServerName('server@name!');
// The actual result doesn't have the trailing underscore after trimming
expect(result).toBe('server_name');
expect(result).toMatch(/^[a-zA-Z0-9_.-]+$/);
});
it('should trim leading and trailing underscores', () => {
const result = normalizeServerName('!server-name!');
expect(result).toBe('server-name');
expect(result).toMatch(/^[a-zA-Z0-9_.-]+$/);
});
});

30
packages/mcp/src/utils.ts Normal file
View file

@ -0,0 +1,30 @@
/**
* Normalizes a server name to match the pattern ^[a-zA-Z0-9_.-]+$
* This is required for Azure OpenAI models with Tool Calling
*/
export function normalizeServerName(serverName: string): string {
// Check if the server name already matches the pattern
if (/^[a-zA-Z0-9_.-]+$/.test(serverName)) {
return serverName;
}
/** Replace non-matching characters with underscores.
This preserves the general structure while ensuring compatibility.
Trims leading/trailing underscores
*/
const normalized = serverName.replace(/[^a-zA-Z0-9_.-]/g, '_').replace(/^_+|_+$/g, '');
// If the result is empty (e.g., all characters were non-ASCII and got trimmed),
// generate a fallback name to ensure we always have a valid function name
if (!normalized) {
/** Hash of the original name to ensure uniqueness */
let hash = 0;
for (let i = 0; i < serverName.length; i++) {
hash = (hash << 5) - hash + serverName.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return `server_${Math.abs(hash)}`;
}
return normalized;
}