diff --git a/packages/api/src/endpoints/anthropic/llm.ts b/packages/api/src/endpoints/anthropic/llm.ts index 9f5074ca67..725a608959 100644 --- a/packages/api/src/endpoints/anthropic/llm.ts +++ b/packages/api/src/endpoints/anthropic/llm.ts @@ -23,6 +23,19 @@ export const knownAnthropicParams = new Set([ 'defaultHeaders', ]); +/** + * Applies default parameters to the target object only if the field is undefined + * @param target - The target object to apply defaults to + * @param defaults - Record of default parameter values + */ +function applyDefaultParams(target: Record, defaults: Record) { + for (const [key, value] of Object.entries(defaults)) { + if (target[key] === undefined) { + target[key] = value; + } + } +} + /** * Generates configuration options for creating an Anthropic language model (LLM) instance. * @param apiKey - The API key for authentication with Anthropic. @@ -105,7 +118,26 @@ function getLLMConfig( requestOptions.anthropicApiUrl = options.reverseProxyUrl; } - /** Handle addParams - only process Anthropic-native params, leave OpenAI params for transform */ + /** Handle defaultParams first - only process Anthropic-native params if undefined */ + if (options.defaultParams && typeof options.defaultParams === 'object') { + for (const [key, value] of Object.entries(options.defaultParams)) { + /** Handle web_search separately - don't add to config */ + if (key === 'web_search') { + if (enableWebSearch === undefined && typeof value === 'boolean') { + enableWebSearch = value; + } + continue; + } + + if (knownAnthropicParams.has(key)) { + /** Route known Anthropic params to requestOptions only if undefined */ + applyDefaultParams(requestOptions as Record, { [key]: value }); + } + /** Leave other params for transform to handle - they might be OpenAI params */ + } + } + + /** Handle addParams - can override defaultParams */ if (options.addParams && typeof options.addParams === 'object') { for (const [key, value] of Object.entries(options.addParams)) { /** Handle web_search separately - don't add to config */ diff --git a/packages/api/src/endpoints/google/llm.ts b/packages/api/src/endpoints/google/llm.ts index 23303dd9b3..7934a03c55 100644 --- a/packages/api/src/endpoints/google/llm.ts +++ b/packages/api/src/endpoints/google/llm.ts @@ -32,6 +32,19 @@ export const knownGoogleParams = new Set([ 'authOptions', ]); +/** + * Applies default parameters to the target object only if the field is undefined + * @param target - The target object to apply defaults to + * @param defaults - Record of default parameter values + */ +function applyDefaultParams(target: Record, defaults: Record) { + for (const [key, value] of Object.entries(defaults)) { + if (target[key] === undefined) { + target[key] = value; + } + } +} + function getThresholdMapping(model: string) { const gemini1Pattern = /gemini-(1\.0|1\.5|pro$|1\.0-pro|1\.5-pro|1\.5-flash-001)/; const restrictedPattern = /(gemini-(1\.5-flash-8b|2\.0|exp)|learnlm)/; @@ -222,7 +235,26 @@ export function getGoogleConfig( }; } - /** Handle addParams - only process Google-native params, leave OpenAI params for transform */ + /** Handle defaultParams first - only process Google-native params if undefined */ + if (options.defaultParams && typeof options.defaultParams === 'object') { + for (const [key, value] of Object.entries(options.defaultParams)) { + /** Handle web_search separately - don't add to config */ + if (key === 'web_search') { + if (enableWebSearch === undefined && typeof value === 'boolean') { + enableWebSearch = value; + } + continue; + } + + if (knownGoogleParams.has(key)) { + /** Route known Google params to llmConfig only if undefined */ + applyDefaultParams(llmConfig as Record, { [key]: value }); + } + /** Leave other params for transform to handle - they might be OpenAI params */ + } + } + + /** Handle addParams - can override defaultParams */ if (options.addParams && typeof options.addParams === 'object') { for (const [key, value] of Object.entries(options.addParams)) { /** Handle web_search separately - don't add to config */ diff --git a/packages/api/src/endpoints/openai/config.anthropic.spec.ts b/packages/api/src/endpoints/openai/config.anthropic.spec.ts index 26cdc3a27c..7cc8240031 100644 --- a/packages/api/src/endpoints/openai/config.anthropic.spec.ts +++ b/packages/api/src/endpoints/openai/config.anthropic.spec.ts @@ -682,5 +682,241 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => { }); expect(result.tools).toEqual([]); }); + + describe('defaultParams Support via customParams', () => { + it('should apply defaultParams when fields are undefined', () => { + const apiKey = 'sk-defaults'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-5-sonnet-20241022', + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [ + { key: 'temperature', default: 0.7 }, + { key: 'topP', default: 0.9 }, + { key: 'maxRetries', default: 5 }, + ], + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.7); + expect(result.llmConfig.topP).toBe(0.9); + expect(result.llmConfig.maxRetries).toBe(5); + }); + + it('should not override existing modelOptions with defaultParams', () => { + const apiKey = 'sk-override'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-5-sonnet-20241022', + temperature: 0.9, + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [ + { key: 'temperature', default: 0.5 }, + { key: 'topP', default: 0.8 }, + ], + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.9); + expect(result.llmConfig.topP).toBe(0.8); + }); + + it('should allow addParams to override defaultParams', () => { + const apiKey = 'sk-add-override'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-opus-20240229', + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [ + { key: 'temperature', default: 0.5 }, + { key: 'topP', default: 0.7 }, + ], + }, + addParams: { + temperature: 0.8, + topP: 0.95, + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.8); + expect(result.llmConfig.topP).toBe(0.95); + }); + + it('should handle defaultParams with web_search', () => { + const apiKey = 'sk-web-default'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-5-sonnet-latest', + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [{ key: 'web_search', default: true }], + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.tools).toEqual([ + { + type: 'web_search_20250305', + name: 'web_search', + }, + ]); + }); + + it('should allow addParams to override defaultParams web_search', () => { + const apiKey = 'sk-web-override'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-opus-20240229', + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [{ key: 'web_search', default: true }], + }, + addParams: { + web_search: false, + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.tools).toEqual([]); + }); + + it('should handle dropParams overriding defaultParams', () => { + const apiKey = 'sk-drop'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-opus-20240229', + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [ + { key: 'temperature', default: 0.7 }, + { key: 'topP', default: 0.9 }, + { key: 'web_search', default: true }, + ], + }, + dropParams: ['topP', 'web_search'], + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.7); + expect(result.llmConfig.topP).toBeUndefined(); + expect(result.tools).toEqual([]); + }); + + it('should preserve order: defaultParams < addParams < modelOptions', () => { + const apiKey = 'sk-precedence'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-5-sonnet-20241022', + temperature: 0.9, + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [ + { key: 'temperature', default: 0.3 }, + { key: 'topP', default: 0.5 }, + { key: 'timeout', default: 60000 }, + ], + }, + addParams: { + topP: 0.8, + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.9); + expect(result.llmConfig.topP).toBe(0.8); + expect(result.llmConfig.timeout).toBe(60000); + }); + + it('should handle Claude 3.7 with defaultParams and thinking disabled', () => { + const apiKey = 'sk-37-defaults'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3.7-sonnet-20241022', + thinking: false, + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [ + { key: 'temperature', default: 0.7 }, + { key: 'topP', default: 0.9 }, + { key: 'topK', default: 50 }, + ], + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.7); + expect(result.llmConfig.topP).toBe(0.9); + expect(result.llmConfig.modelKwargs?.topK).toBe(50); + }); + + it('should handle empty paramDefinitions', () => { + const apiKey = 'sk-empty'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-opus-20240229', + temperature: 0.8, + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [], + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.8); + }); + + it('should handle missing paramDefinitions', () => { + const apiKey = 'sk-missing'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-opus-20240229', + temperature: 0.8, + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.8); + }); + + it('should handle mixed Anthropic params in defaultParams', () => { + const apiKey = 'sk-mixed'; + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'claude-3-5-sonnet-20241022', + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [ + { key: 'temperature', default: 0.7 }, + { key: 'topP', default: 0.9 }, + { key: 'maxRetries', default: 3 }, + ], + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.7); + expect(result.llmConfig.topP).toBe(0.9); + expect(result.llmConfig.maxRetries).toBe(3); + }); + }); }); }); diff --git a/packages/api/src/endpoints/openai/config.google.spec.ts b/packages/api/src/endpoints/openai/config.google.spec.ts index 8728d9bcb0..73b133b478 100644 --- a/packages/api/src/endpoints/openai/config.google.spec.ts +++ b/packages/api/src/endpoints/openai/config.google.spec.ts @@ -192,5 +192,196 @@ describe('getOpenAIConfig - Google Compatibility', () => { expect(result.llmConfig.modelKwargs?.topK).toBeUndefined(); }); }); + + describe('defaultParams Support via customParams', () => { + it('should apply defaultParams when fields are undefined', () => { + const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + }, + customParams: { + defaultParamsEndpoint: 'google', + paramDefinitions: [ + { key: 'temperature', default: 0.6 }, + { key: 'topK', default: 40 }, + ], + }, + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.llmConfig.temperature).toBe(0.6); + expect(result.llmConfig.modelKwargs?.topK).toBe(40); + }); + + it('should not override existing modelOptions with defaultParams', () => { + const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + temperature: 0.9, + }, + customParams: { + defaultParamsEndpoint: 'google', + paramDefinitions: [ + { key: 'temperature', default: 0.5 }, + { key: 'topK', default: 40 }, + ], + }, + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.llmConfig.temperature).toBe(0.9); + expect(result.llmConfig.modelKwargs?.topK).toBe(40); + }); + + it('should allow addParams to override defaultParams', () => { + const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + }, + customParams: { + defaultParamsEndpoint: 'google', + paramDefinitions: [ + { key: 'temperature', default: 0.5 }, + { key: 'topK', default: 30 }, + ], + }, + addParams: { + temperature: 0.8, + topK: 50, + }, + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.llmConfig.temperature).toBe(0.8); + expect(result.llmConfig.modelKwargs?.topK).toBe(50); + }); + + it('should handle defaultParams with web_search', () => { + const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + }, + customParams: { + defaultParamsEndpoint: 'google', + paramDefinitions: [{ key: 'web_search', default: true }], + }, + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.tools).toEqual([{ googleSearch: {} }]); + }); + + it('should allow addParams to override defaultParams web_search', () => { + const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + }, + customParams: { + defaultParamsEndpoint: 'google', + paramDefinitions: [{ key: 'web_search', default: true }], + }, + addParams: { + web_search: false, + }, + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.tools).toEqual([]); + }); + + it('should handle dropParams overriding defaultParams', () => { + const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + }, + customParams: { + defaultParamsEndpoint: 'google', + paramDefinitions: [ + { key: 'temperature', default: 0.7 }, + { key: 'topK', default: 40 }, + { key: 'web_search', default: true }, + ], + }, + dropParams: ['topK', 'web_search'], + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.llmConfig.temperature).toBe(0.7); + expect(result.llmConfig.modelKwargs?.topK).toBeUndefined(); + expect(result.tools).toEqual([]); + }); + + it('should preserve order: defaultParams < addParams < modelOptions', () => { + const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + temperature: 0.9, + }, + customParams: { + defaultParamsEndpoint: 'google', + paramDefinitions: [ + { key: 'temperature', default: 0.3 }, + { key: 'topP', default: 0.5 }, + { key: 'topK', default: 20 }, + ], + }, + addParams: { + topP: 0.8, + }, + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.llmConfig.temperature).toBe(0.9); + expect(result.llmConfig.topP).toBe(0.8); + expect(result.llmConfig.modelKwargs?.topK).toBe(20); + }); + + it('should handle empty paramDefinitions', () => { + const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + temperature: 0.8, + }, + customParams: { + defaultParamsEndpoint: 'google', + paramDefinitions: [], + }, + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.llmConfig.temperature).toBe(0.8); + }); + + it('should handle missing paramDefinitions', () => { + const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + + const result = getOpenAIConfig(apiKey, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + temperature: 0.8, + }, + customParams: { + defaultParamsEndpoint: 'google', + }, + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.llmConfig.temperature).toBe(0.8); + }); + }); }); }); diff --git a/packages/api/src/endpoints/openai/config.spec.ts b/packages/api/src/endpoints/openai/config.spec.ts index fcdf0fa83d..14516ee102 100644 --- a/packages/api/src/endpoints/openai/config.spec.ts +++ b/packages/api/src/endpoints/openai/config.spec.ts @@ -1651,5 +1651,210 @@ describe('getOpenAIConfig', () => { expect(result.llmConfig.modelKwargs).toEqual(largeModelKwargs); }); }); + + describe('defaultParams Support via customParams', () => { + it('should apply defaultParams when fields are undefined', () => { + const result = getOpenAIConfig(mockApiKey, { + modelOptions: { + model: 'gpt-4o', + }, + customParams: { + defaultParamsEndpoint: 'azureOpenAI', + paramDefinitions: [ + { key: 'useResponsesApi', default: true }, + { key: 'temperature', default: 0.5 }, + ], + }, + }); + + expect(result.llmConfig.useResponsesApi).toBe(true); + expect(result.llmConfig.temperature).toBe(0.5); + }); + + it('should not override existing modelOptions with defaultParams', () => { + const result = getOpenAIConfig(mockApiKey, { + modelOptions: { + model: 'gpt-5', + temperature: 0.9, + }, + customParams: { + defaultParamsEndpoint: 'azureOpenAI', + paramDefinitions: [ + { key: 'temperature', default: 0.5 }, + { key: 'maxTokens', default: 1000 }, + ], + }, + }); + + expect(result.llmConfig.temperature).toBe(0.9); + expect(result.llmConfig.modelKwargs?.max_completion_tokens).toBe(1000); + }); + + it('should allow addParams to override defaultParams', () => { + const result = getOpenAIConfig(mockApiKey, { + modelOptions: { + model: 'gpt-4o', + }, + customParams: { + defaultParamsEndpoint: 'azureOpenAI', + paramDefinitions: [ + { key: 'useResponsesApi', default: true }, + { key: 'temperature', default: 0.5 }, + ], + }, + addParams: { + useResponsesApi: false, + temperature: 0.8, + }, + }); + + expect(result.llmConfig.useResponsesApi).toBe(false); + expect(result.llmConfig.temperature).toBe(0.8); + }); + + it('should handle defaultParams with unknown parameters', () => { + const result = getOpenAIConfig(mockApiKey, { + modelOptions: { + model: 'gpt-4o', + }, + customParams: { + defaultParamsEndpoint: 'azureOpenAI', + paramDefinitions: [ + { key: 'customParam1', default: 'defaultValue' }, + { key: 'customParam2', default: 123 }, + ], + }, + }); + + expect(result.llmConfig.modelKwargs).toMatchObject({ + customParam1: 'defaultValue', + customParam2: 123, + }); + }); + + it('should handle defaultParams with web_search', () => { + const result = getOpenAIConfig(mockApiKey, { + modelOptions: { + model: 'gpt-4o', + }, + customParams: { + defaultParamsEndpoint: 'openAI', + paramDefinitions: [{ key: 'web_search', default: true }], + }, + }); + + expect(result.llmConfig.useResponsesApi).toBe(true); + expect(result.tools).toEqual([{ type: 'web_search' }]); + }); + + it('should allow addParams to override defaultParams web_search', () => { + const result = getOpenAIConfig(mockApiKey, { + modelOptions: { + model: 'gpt-4o', + }, + customParams: { + defaultParamsEndpoint: 'openAI', + paramDefinitions: [{ key: 'web_search', default: true }], + }, + addParams: { + web_search: false, + }, + }); + + expect(result.tools).toEqual([]); + }); + + it('should apply defaultParams for Anthropic via customParams', () => { + const result = getOpenAIConfig('test-key', { + modelOptions: { + model: 'claude-3-5-sonnet-20241022', + }, + customParams: { + defaultParamsEndpoint: 'anthropic', + paramDefinitions: [ + { key: 'temperature', default: 0.7 }, + { key: 'topK', default: 50 }, + ], + }, + reverseProxyUrl: 'https://api.anthropic.com', + }); + + expect(result.llmConfig.temperature).toBe(0.7); + expect(result.llmConfig.modelKwargs?.topK).toBe(50); + }); + + it('should apply defaultParams for Google via customParams', () => { + const credentials = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' }); + const result = getOpenAIConfig(credentials, { + modelOptions: { + model: 'gemini-2.0-flash-exp', + }, + customParams: { + defaultParamsEndpoint: 'google', + paramDefinitions: [ + { key: 'temperature', default: 0.6 }, + { key: 'topK', default: 40 }, + ], + }, + reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai', + }); + + expect(result.llmConfig.temperature).toBe(0.6); + expect(result.llmConfig.modelKwargs?.topK).toBe(40); + }); + + it('should handle empty paramDefinitions', () => { + const result = getOpenAIConfig(mockApiKey, { + modelOptions: { + model: 'gpt-4o', + temperature: 0.9, + }, + customParams: { + defaultParamsEndpoint: 'azureOpenAI', + paramDefinitions: [], + }, + }); + + expect(result.llmConfig.temperature).toBe(0.9); + }); + + it('should handle missing paramDefinitions', () => { + const result = getOpenAIConfig(mockApiKey, { + modelOptions: { + model: 'gpt-4o', + temperature: 0.9, + }, + customParams: { + defaultParamsEndpoint: 'azureOpenAI', + }, + }); + + expect(result.llmConfig.temperature).toBe(0.9); + }); + + it('should preserve order: defaultParams < addParams < modelOptions', () => { + const result = getOpenAIConfig(mockApiKey, { + modelOptions: { + model: 'gpt-5', + temperature: 0.9, + }, + customParams: { + defaultParamsEndpoint: 'openAI', + paramDefinitions: [ + { key: 'temperature', default: 0.3 }, + { key: 'topP', default: 0.5 }, + { key: 'maxTokens', default: 500 }, + ], + }, + addParams: { + topP: 0.8, + }, + }); + + expect(result.llmConfig.temperature).toBe(0.9); + expect(result.llmConfig.topP).toBe(0.8); + expect(result.llmConfig.modelKwargs?.max_completion_tokens).toBe(500); + }); + }); }); }); diff --git a/packages/api/src/endpoints/openai/config.ts b/packages/api/src/endpoints/openai/config.ts index ac88c59aaa..c84d3b07c3 100644 --- a/packages/api/src/endpoints/openai/config.ts +++ b/packages/api/src/endpoints/openai/config.ts @@ -3,11 +3,11 @@ import { Providers } from '@librechat/agents'; import { KnownEndpoints, EModelEndpoint } from 'librechat-data-provider'; import type * as t from '~/types'; import { getLLMConfig as getAnthropicLLMConfig } from '~/endpoints/anthropic/llm'; +import { getOpenAILLMConfig, extractDefaultParams } from './llm'; import { getGoogleConfig } from '~/endpoints/google/llm'; import { transformToOpenAIConfig } from './transform'; import { constructAzureURL } from '~/utils/azure'; import { createFetch } from '~/utils/generators'; -import { getOpenAILLMConfig } from './llm'; type Fetch = (input: string | URL | Request, init?: RequestInit) => Promise; @@ -34,6 +34,9 @@ export function getOpenAIConfig( reverseProxyUrl: baseURL, } = options; + /** Extract default params from customParams.paramDefinitions */ + const defaultParams = extractDefaultParams(options.customParams?.paramDefinitions); + let llmConfig: t.OAIClientOptions; let tools: t.LLMConfigResult['tools']; const isAnthropic = options.customParams?.defaultParamsEndpoint === EModelEndpoint.anthropic; @@ -59,6 +62,7 @@ export function getOpenAIConfig( reverseProxyUrl: baseURL, addParams, dropParams, + defaultParams, }); /** Transform handles addParams/dropParams - it knows about OpenAI params */ const transformed = transformToOpenAIConfig({ @@ -79,6 +83,7 @@ export function getOpenAIConfig( authHeader: true, addParams, dropParams, + defaultParams, }); /** Transform handles addParams/dropParams - it knows about OpenAI params */ const transformed = transformToOpenAIConfig({ @@ -98,6 +103,7 @@ export function getOpenAIConfig( streaming, addParams, dropParams, + defaultParams, modelOptions, useOpenRouter, }); diff --git a/packages/api/src/endpoints/openai/llm.ts b/packages/api/src/endpoints/openai/llm.ts index ee2a3da330..c74bb93eac 100644 --- a/packages/api/src/endpoints/openai/llm.ts +++ b/packages/api/src/endpoints/openai/llm.ts @@ -1,5 +1,6 @@ import { EModelEndpoint, removeNullishValues } from 'librechat-data-provider'; import type { BindToolsInput } from '@langchain/core/language_models/chat_models'; +import type { SettingDefinition } from 'librechat-data-provider'; import type { AzureOpenAIInput } from '@langchain/openai'; import type { OpenAI } from 'openai'; import type * as t from '~/types'; @@ -75,6 +76,44 @@ function hasReasoningParams({ ); } +/** + * Extracts default parameters from customParams.paramDefinitions + * @param paramDefinitions - Array of parameter definitions with key and default values + * @returns Record of default parameters + */ +export function extractDefaultParams( + paramDefinitions?: Partial[], +): Record { + if (!paramDefinitions || !Array.isArray(paramDefinitions)) { + return {}; + } + + const defaults: Record = {}; + for (let i = 0; i < paramDefinitions.length; i++) { + const param = paramDefinitions[i]; + if (param.key !== undefined && param.default !== undefined) { + defaults[param.key] = param.default; + } + } + return defaults; +} + +/** + * Applies default parameters to the target object only if the field is undefined + * @param target - The target object to apply defaults to + * @param defaults - Record of default parameter values + */ +export function applyDefaultParams( + target: Record, + defaults: Record, +) { + for (const [key, value] of Object.entries(defaults)) { + if (target[key] === undefined) { + target[key] = value; + } + } +} + export function getOpenAILLMConfig({ azure, apiKey, @@ -83,6 +122,7 @@ export function getOpenAILLMConfig({ streaming, addParams, dropParams, + defaultParams, useOpenRouter, modelOptions: _modelOptions, }: { @@ -93,6 +133,7 @@ export function getOpenAILLMConfig({ modelOptions: Partial; addParams?: Record; dropParams?: string[]; + defaultParams?: Record; useOpenRouter?: boolean; azure?: false | t.AzureOptions; }): Pick & { @@ -133,6 +174,30 @@ export function getOpenAILLMConfig({ let enableWebSearch = web_search; + /** Apply defaultParams first - only if fields are undefined */ + if (defaultParams && typeof defaultParams === 'object') { + for (const [key, value] of Object.entries(defaultParams)) { + /** Handle web_search separately - don't add to config */ + if (key === 'web_search') { + if (enableWebSearch === undefined && typeof value === 'boolean') { + enableWebSearch = value; + } + continue; + } + + if (knownOpenAIParams.has(key)) { + applyDefaultParams(llmConfig as Record, { [key]: value }); + } else { + /** Apply to modelKwargs if not a known param */ + if (modelKwargs[key] === undefined) { + modelKwargs[key] = value; + hasModelKwargs = true; + } + } + } + } + + /** Apply addParams - can override defaultParams */ if (addParams && typeof addParams === 'object') { for (const [key, value] of Object.entries(addParams)) { /** Handle web_search directly here instead of adding to modelKwargs or llmConfig */ diff --git a/packages/api/src/types/anthropic.ts b/packages/api/src/types/anthropic.ts index 6374be494b..c57016ebf2 100644 --- a/packages/api/src/types/anthropic.ts +++ b/packages/api/src/types/anthropic.ts @@ -54,6 +54,8 @@ export interface AnthropicConfigOptions { proxy?: string | null; /** URL for a reverse proxy, if used */ reverseProxyUrl?: string | null; + /** Default parameters to apply only if fields are undefined */ + defaultParams?: Record; /** Additional parameters to add to the configuration */ addParams?: Record; /** Parameters to drop/exclude from the configuration */ diff --git a/packages/api/src/types/google.ts b/packages/api/src/types/google.ts index 1bc40f06e8..e0bf43f3de 100644 --- a/packages/api/src/types/google.ts +++ b/packages/api/src/types/google.ts @@ -19,6 +19,8 @@ export interface GoogleConfigOptions { proxy?: string; streaming?: boolean; authHeader?: boolean; + /** Default parameters to apply only if fields are undefined */ + defaultParams?: Record; addParams?: Record; dropParams?: string[]; } diff --git a/packages/data-provider/src/types.ts b/packages/data-provider/src/types.ts index c01db59c17..fd6190f50e 100644 --- a/packages/data-provider/src/types.ts +++ b/packages/data-provider/src/types.ts @@ -347,7 +347,7 @@ export type TConfig = { capabilities?: string[]; customParams?: { defaultParamsEndpoint?: string; - paramDefinitions?: SettingDefinition[]; + paramDefinitions?: Partial[]; }; };