🔍 feat: Anthropic/Google Web Search Support via addParams / dropParams (#10456)

* feat: add support for known/add/drop parameters in Anthropic and Google LLM configurations

* ci: add tests for web search support for Anthropic and Google configurations with addParams and dropParams handling
This commit is contained in:
Danny Avila 2025-11-11 14:39:12 -05:00 committed by GitHub
parent 4685a063f5
commit 2b0fe036a8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 521 additions and 8 deletions

View file

@ -4,6 +4,25 @@ import { anthropicSettings, removeNullishValues } from 'librechat-data-provider'
import type { AnthropicLLMConfigResult, AnthropicConfigOptions } from '~/types/anthropic';
import { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } from './helpers';
/** Known Anthropic parameters that map directly to the client config */
export const knownAnthropicParams = new Set([
'model',
'temperature',
'topP',
'topK',
'maxTokens',
'maxOutputTokens',
'stopSequences',
'stop',
'stream',
'apiKey',
'maxRetries',
'timeout',
'anthropicVersion',
'anthropicApiUrl',
'defaultHeaders',
]);
/**
* Generates configuration options for creating an Anthropic language model (LLM) instance.
* @param apiKey - The API key for authentication with Anthropic.
@ -39,6 +58,8 @@ function getLLMConfig(
const mergedOptions = Object.assign(defaultOptions, options.modelOptions);
let enableWebSearch = mergedOptions.web_search;
let requestOptions: AnthropicClientOptions & { stream?: boolean } = {
apiKey,
model: mergedOptions.model,
@ -84,9 +105,45 @@ function getLLMConfig(
requestOptions.anthropicApiUrl = options.reverseProxyUrl;
}
/** Handle addParams - only process Anthropic-native params, leave OpenAI params for transform */
if (options.addParams && typeof options.addParams === 'object') {
for (const [key, value] of Object.entries(options.addParams)) {
/** Handle web_search separately - don't add to config */
if (key === 'web_search') {
if (typeof value === 'boolean') {
enableWebSearch = value;
}
continue;
}
if (knownAnthropicParams.has(key)) {
/** Route known Anthropic params to requestOptions */
(requestOptions as Record<string, unknown>)[key] = value;
}
/** Leave other params for transform to handle - they might be OpenAI params */
}
}
/** Handle dropParams - only drop from Anthropic config */
if (options.dropParams && Array.isArray(options.dropParams)) {
options.dropParams.forEach((param) => {
if (param === 'web_search') {
enableWebSearch = false;
return;
}
if (param in requestOptions) {
delete requestOptions[param as keyof AnthropicClientOptions];
}
if (requestOptions.invocationKwargs && param in requestOptions.invocationKwargs) {
delete (requestOptions.invocationKwargs as Record<string, unknown>)[param];
}
});
}
const tools = [];
if (mergedOptions.web_search) {
if (enableWebSearch) {
tools.push({
type: 'web_search_20250305',
name: 'web_search',

View file

@ -5,6 +5,33 @@ import type { GoogleAIToolType } from '@langchain/google-common';
import type * as t from '~/types';
import { isEnabled } from '~/utils';
/** Known Google/Vertex AI parameters that map directly to the client config */
export const knownGoogleParams = new Set([
'model',
'modelName',
'temperature',
'maxOutputTokens',
'maxReasoningTokens',
'topP',
'topK',
'seed',
'presencePenalty',
'frequencyPenalty',
'stopSequences',
'stop',
'logprobs',
'topLogprobs',
'safetySettings',
'responseModalities',
'convertSystemMessageToHumanContent',
'speechConfig',
'streamUsage',
'apiKey',
'baseUrl',
'location',
'authOptions',
]);
function getThresholdMapping(model: string) {
const gemini1Pattern = /gemini-(1\.0|1\.5|pro$|1\.0-pro|1\.5-pro|1\.5-flash-001)/;
const restrictedPattern = /(gemini-(1\.5-flash-8b|2\.0|exp)|learnlm)/;
@ -112,6 +139,8 @@ export function getGoogleConfig(
...modelOptions
} = options.modelOptions || {};
let enableWebSearch = web_search;
const llmConfig: GoogleClientOptions | VertexAIClientOptions = removeNullishValues({
...(modelOptions || {}),
model: modelOptions?.model ?? '',
@ -193,9 +222,42 @@ export function getGoogleConfig(
};
}
/** Handle addParams - only process Google-native params, leave OpenAI params for transform */
if (options.addParams && typeof options.addParams === 'object') {
for (const [key, value] of Object.entries(options.addParams)) {
/** Handle web_search separately - don't add to config */
if (key === 'web_search') {
if (typeof value === 'boolean') {
enableWebSearch = value;
}
continue;
}
if (knownGoogleParams.has(key)) {
/** Route known Google params to llmConfig */
(llmConfig as Record<string, unknown>)[key] = value;
}
/** Leave other params for transform to handle - they might be OpenAI params */
}
}
/** Handle dropParams - only drop from Google config */
if (options.dropParams && Array.isArray(options.dropParams)) {
options.dropParams.forEach((param) => {
if (param === 'web_search') {
enableWebSearch = false;
return;
}
if (param in llmConfig) {
delete (llmConfig as Record<string, unknown>)[param];
}
});
}
const tools: GoogleAIToolType[] = [];
if (web_search) {
if (enableWebSearch) {
tools.push({ googleSearch: {} });
}

View file

@ -548,4 +548,139 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
});
});
});
describe('Web Search Support via addParams', () => {
it('should enable web_search tool when web_search: true in addParams', () => {
const apiKey = 'sk-web-search';
const endpoint = 'Anthropic (Custom)';
const options = {
modelOptions: {
model: 'claude-3-5-sonnet-latest',
user: 'search-user',
},
customParams: {
defaultParamsEndpoint: 'anthropic',
},
addParams: {
web_search: true,
},
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.tools).toEqual([
{
type: 'web_search_20250305',
name: 'web_search',
},
]);
expect(result.llmConfig).toMatchObject({
model: 'claude-3-5-sonnet-latest',
stream: true,
});
});
it('should disable web_search tool when web_search: false in addParams', () => {
const apiKey = 'sk-no-search';
const endpoint = 'Anthropic (Custom)';
const options = {
modelOptions: {
model: 'claude-3-opus-20240229',
web_search: true, // This should be overridden by addParams
},
customParams: {
defaultParamsEndpoint: 'anthropic',
},
addParams: {
web_search: false,
},
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.tools).toEqual([]);
});
it('should disable web_search when in dropParams', () => {
const apiKey = 'sk-drop-search';
const endpoint = 'Anthropic (Custom)';
const options = {
modelOptions: {
model: 'claude-3-5-sonnet-latest',
web_search: true,
},
customParams: {
defaultParamsEndpoint: 'anthropic',
},
dropParams: ['web_search'],
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.tools).toEqual([]);
});
it('should handle web_search with mixed Anthropic and OpenAI params in addParams', () => {
const apiKey = 'sk-mixed';
const endpoint = 'Anthropic (Custom)';
const options = {
modelOptions: {
model: 'claude-3-opus-20240229',
user: 'mixed-user',
},
customParams: {
defaultParamsEndpoint: 'anthropic',
},
addParams: {
web_search: true,
temperature: 0.7, // Anthropic native
maxRetries: 3, // OpenAI param (known), should go to top level
customParam: 'custom', // Unknown param, should go to modelKwargs
},
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.tools).toEqual([
{
type: 'web_search_20250305',
name: 'web_search',
},
]);
expect(result.llmConfig.temperature).toBe(0.7);
expect(result.llmConfig.maxRetries).toBe(3); // Known OpenAI param at top level
expect(result.llmConfig.modelKwargs).toMatchObject({
customParam: 'custom', // Unknown param in modelKwargs
metadata: { user_id: 'mixed-user' }, // From invocationKwargs
});
});
it('should handle Anthropic native params in addParams without web_search', () => {
const apiKey = 'sk-native';
const endpoint = 'Anthropic (Custom)';
const options = {
modelOptions: {
model: 'claude-3-opus-20240229',
},
customParams: {
defaultParamsEndpoint: 'anthropic',
},
addParams: {
temperature: 0.9,
topP: 0.95,
maxTokens: 4096,
},
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.llmConfig).toMatchObject({
model: 'claude-3-opus-20240229',
temperature: 0.9,
topP: 0.95,
maxTokens: 4096,
});
expect(result.tools).toEqual([]);
});
});
});

View file

@ -0,0 +1,196 @@
import { getOpenAIConfig } from './config';
describe('getOpenAIConfig - Google Compatibility', () => {
describe('Google via Custom Endpoint', () => {
describe('Web Search Support via addParams', () => {
it('should enable googleSearch tool when web_search: true in addParams', () => {
const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' });
const endpoint = 'Gemini (Custom)';
const options = {
modelOptions: {
model: 'gemini-2.0-flash-exp',
},
customParams: {
defaultParamsEndpoint: 'google',
},
addParams: {
web_search: true,
},
reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.tools).toEqual([{ googleSearch: {} }]);
expect(result.llmConfig).toMatchObject({
model: 'gemini-2.0-flash-exp',
});
});
it('should disable googleSearch tool when web_search: false in addParams', () => {
const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' });
const endpoint = 'Gemini (Custom)';
const options = {
modelOptions: {
model: 'gemini-2.0-flash-exp',
web_search: true, // Should be overridden by addParams
},
customParams: {
defaultParamsEndpoint: 'google',
},
addParams: {
web_search: false,
},
reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.tools).toEqual([]);
});
it('should disable googleSearch when in dropParams', () => {
const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' });
const endpoint = 'Gemini (Custom)';
const options = {
modelOptions: {
model: 'gemini-2.0-flash-exp',
web_search: true,
},
customParams: {
defaultParamsEndpoint: 'google',
},
dropParams: ['web_search'],
reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.tools).toEqual([]);
});
it('should handle web_search with mixed Google and OpenAI params in addParams', () => {
const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' });
const endpoint = 'Gemini (Custom)';
const options = {
modelOptions: {
model: 'gemini-2.0-flash-exp',
},
customParams: {
defaultParamsEndpoint: 'google',
},
addParams: {
web_search: true,
temperature: 0.8, // Shared param (both Google and OpenAI)
topK: 40, // Google-only param, goes to modelKwargs
frequencyPenalty: 0.5, // Known OpenAI param, goes to top level
customUnknown: 'test', // Unknown param, goes to modelKwargs
},
reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.tools).toEqual([{ googleSearch: {} }]);
expect(result.llmConfig.temperature).toBe(0.8); // Shared param at top level
expect(result.llmConfig.frequencyPenalty).toBe(0.5); // Known OpenAI param at top level
expect(result.llmConfig.modelKwargs).toMatchObject({
topK: 40, // Google-specific in modelKwargs
customUnknown: 'test', // Unknown param in modelKwargs
});
});
it('should handle Google native params in addParams without web_search', () => {
const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' });
const endpoint = 'Gemini (Custom)';
const options = {
modelOptions: {
model: 'gemini-2.0-flash-exp',
},
customParams: {
defaultParamsEndpoint: 'google',
},
addParams: {
temperature: 0.9, // Shared param (both Google and OpenAI)
topP: 0.95, // Shared param (both Google and OpenAI)
topK: 50, // Google-only, goes to modelKwargs
maxOutputTokens: 8192, // Google-only, goes to modelKwargs
},
reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.llmConfig).toMatchObject({
model: 'gemini-2.0-flash-exp',
temperature: 0.9, // Shared params at top level
topP: 0.95,
});
expect(result.llmConfig.modelKwargs).toMatchObject({
topK: 50, // Google-specific in modelKwargs
maxOutputTokens: 8192,
});
expect(result.tools).toEqual([]);
});
it('should drop Google native params with dropParams', () => {
const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' });
const endpoint = 'Gemini (Custom)';
const options = {
modelOptions: {
model: 'gemini-2.0-flash-exp',
temperature: 0.7,
topK: 40,
topP: 0.9,
},
customParams: {
defaultParamsEndpoint: 'google',
},
dropParams: ['topK', 'topP'],
reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.llmConfig.temperature).toBe(0.7);
expect((result.llmConfig as Record<string, unknown>).topK).toBeUndefined();
expect(result.llmConfig.topP).toBeUndefined();
});
it('should handle both addParams and dropParams for Google', () => {
const apiKey = JSON.stringify({ GOOGLE_API_KEY: 'test-google-key' });
const endpoint = 'Gemini (Custom)';
const options = {
modelOptions: {
model: 'gemini-2.0-flash-exp',
topK: 30, // Will be dropped
},
customParams: {
defaultParamsEndpoint: 'google',
},
addParams: {
web_search: true,
temperature: 0.8, // Shared param
maxOutputTokens: 4096, // Google-only param
},
dropParams: ['topK'],
reverseProxyUrl: 'https://generativelanguage.googleapis.com/v1beta/openai',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result.tools).toEqual([{ googleSearch: {} }]);
expect(result.llmConfig).toMatchObject({
model: 'gemini-2.0-flash-exp',
temperature: 0.8,
});
expect(result.llmConfig.modelKwargs).toMatchObject({
maxOutputTokens: 4096, // Google-specific in modelKwargs
});
expect((result.llmConfig as Record<string, unknown>).topK).toBeUndefined();
// Verify topK is not in modelKwargs either
expect(result.llmConfig.modelKwargs?.topK).toBeUndefined();
});
});
});
});

View file

@ -3,6 +3,7 @@ import { Providers } from '@librechat/agents';
import { KnownEndpoints, EModelEndpoint } from 'librechat-data-provider';
import type * as t from '~/types';
import { getLLMConfig as getAnthropicLLMConfig } from '~/endpoints/anthropic/llm';
import { getGoogleConfig } from '~/endpoints/google/llm';
import { transformToOpenAIConfig } from './transform';
import { constructAzureURL } from '~/utils/azure';
import { createFetch } from '~/utils/generators';
@ -36,14 +37,18 @@ export function getOpenAIConfig(
let llmConfig: t.OAIClientOptions;
let tools: t.LLMConfigResult['tools'];
const isAnthropic = options.customParams?.defaultParamsEndpoint === EModelEndpoint.anthropic;
const isGoogle = options.customParams?.defaultParamsEndpoint === EModelEndpoint.google;
const useOpenRouter =
!isAnthropic &&
!isGoogle &&
((baseURL && baseURL.includes(KnownEndpoints.openrouter)) ||
(endpoint != null && endpoint.toLowerCase().includes(KnownEndpoints.openrouter)));
const isVercel =
(baseURL && baseURL.includes('ai-gateway.vercel.sh')) ||
(endpoint != null && endpoint.toLowerCase().includes(KnownEndpoints.vercel));
!isAnthropic &&
!isGoogle &&
((baseURL && baseURL.includes('ai-gateway.vercel.sh')) ||
(endpoint != null && endpoint.toLowerCase().includes(KnownEndpoints.vercel)));
let azure = options.azure;
let headers = options.headers;
@ -51,7 +56,11 @@ export function getOpenAIConfig(
const anthropicResult = getAnthropicLLMConfig(apiKey, {
modelOptions,
proxy: options.proxy,
reverseProxyUrl: baseURL,
addParams,
dropParams,
});
/** Transform handles addParams/dropParams - it knows about OpenAI params */
const transformed = transformToOpenAIConfig({
addParams,
dropParams,
@ -63,6 +72,23 @@ export function getOpenAIConfig(
if (transformed.configOptions?.defaultHeaders) {
headers = Object.assign(headers ?? {}, transformed.configOptions?.defaultHeaders);
}
} else if (isGoogle) {
const googleResult = getGoogleConfig(apiKey, {
modelOptions,
reverseProxyUrl: baseURL ?? undefined,
authHeader: true,
addParams,
dropParams,
});
/** Transform handles addParams/dropParams - it knows about OpenAI params */
const transformed = transformToOpenAIConfig({
addParams,
dropParams,
llmConfig: googleResult.llmConfig,
fromEndpoint: EModelEndpoint.google,
});
llmConfig = transformed.llmConfig;
tools = googleResult.tools;
} else {
const openaiResult = getOpenAILLMConfig({
azure,

View file

@ -4,6 +4,7 @@ import type * as t from '~/types';
import { knownOpenAIParams } from './llm';
const anthropicExcludeParams = new Set(['anthropicApiUrl']);
const googleExcludeParams = new Set(['safetySettings', 'location', 'baseUrl', 'customHeaders']);
/**
* Transforms a Non-OpenAI LLM config to an OpenAI-conformant config.
@ -31,7 +32,14 @@ export function transformToOpenAIConfig({
let hasModelKwargs = false;
const isAnthropic = fromEndpoint === EModelEndpoint.anthropic;
const excludeParams = isAnthropic ? anthropicExcludeParams : new Set();
const isGoogle = fromEndpoint === EModelEndpoint.google;
let excludeParams = new Set<string>();
if (isAnthropic) {
excludeParams = anthropicExcludeParams;
} else if (isGoogle) {
excludeParams = googleExcludeParams;
}
for (const [key, value] of Object.entries(llmConfig)) {
if (value === undefined || value === null) {
@ -49,6 +57,19 @@ export function transformToOpenAIConfig({
modelKwargs = Object.assign({}, modelKwargs, value as Record<string, unknown>);
hasModelKwargs = true;
continue;
} else if (isGoogle && key === 'authOptions') {
// Handle Google authOptions
modelKwargs = Object.assign({}, modelKwargs, value as Record<string, unknown>);
hasModelKwargs = true;
continue;
} else if (
isGoogle &&
(key === 'thinkingConfig' || key === 'thinkingBudget' || key === 'includeThoughts')
) {
// Handle Google thinking configuration
modelKwargs = Object.assign({}, modelKwargs, { [key]: value });
hasModelKwargs = true;
continue;
}
if (knownOpenAIParams.has(key)) {
@ -61,6 +82,11 @@ export function transformToOpenAIConfig({
if (addParams && typeof addParams === 'object') {
for (const [key, value] of Object.entries(addParams)) {
/** Skip web_search - it's handled separately as a tool */
if (key === 'web_search') {
continue;
}
if (knownOpenAIParams.has(key)) {
(openAIConfig as Record<string, unknown>)[key] = value;
} else {
@ -76,16 +102,23 @@ export function transformToOpenAIConfig({
if (dropParams && Array.isArray(dropParams)) {
dropParams.forEach((param) => {
/** Skip web_search - handled separately */
if (param === 'web_search') {
return;
}
if (param in openAIConfig) {
delete openAIConfig[param as keyof t.OAIClientOptions];
}
if (openAIConfig.modelKwargs && param in openAIConfig.modelKwargs) {
delete openAIConfig.modelKwargs[param];
if (Object.keys(openAIConfig.modelKwargs).length === 0) {
delete openAIConfig.modelKwargs;
}
}
});
/** Clean up empty modelKwargs after dropParams processing */
if (openAIConfig.modelKwargs && Object.keys(openAIConfig.modelKwargs).length === 0) {
delete openAIConfig.modelKwargs;
}
}
return {

View file

@ -54,6 +54,10 @@ export interface AnthropicConfigOptions {
proxy?: string | null;
/** URL for a reverse proxy, if used */
reverseProxyUrl?: string | null;
/** Additional parameters to add to the configuration */
addParams?: Record<string, unknown>;
/** Parameters to drop/exclude from the configuration */
dropParams?: string[];
}
/**