🔍 feat: Anthropic Web Search (#8281)

* chore: bump @librechat/agents to ^2.4.54 for anthropic web search support

* WIP: hardcoded web search tool usage

* feat: Implement web search functionality in Anthropic integration

- Updated parameters panel to include web search for anthropic models.
- Updated necessary schemas to accomodate toggle functionality

* chore: Set default web search option to false in anthropicSettings

* refactor: Rename webSearch to web_search for consistency across settings and schemas

* chore: bump @librechat/agents to v2.4.55

---------

Co-authored-by: Dustin Healy <dustinhealy1@gmail.com>
This commit is contained in:
Danny Avila 2025-07-06 21:43:09 -04:00 committed by GitHub
parent 5b392f9cb0
commit e60c0cf201
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 59 additions and 17 deletions

View file

@ -48,7 +48,7 @@
"@langchain/google-genai": "^0.2.13", "@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13",
"@langchain/textsplitters": "^0.1.0", "@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.4.52", "@librechat/agents": "^2.4.55",
"@librechat/api": "*", "@librechat/api": "*",
"@librechat/data-schemas": "*", "@librechat/data-schemas": "*",
"@node-saml/passport-saml": "^5.0.0", "@node-saml/passport-saml": "^5.0.0",

View file

@ -149,7 +149,9 @@ const initializeAgent = async ({
) { ) {
throw new Error(`{ "type": "${ErrorTypes.GOOGLE_TOOL_CONFLICT}"}`); throw new Error(`{ "type": "${ErrorTypes.GOOGLE_TOOL_CONFLICT}"}`);
} else if ( } else if (
(agent.provider === Providers.OPENAI || agent.provider === Providers.AZURE) && (agent.provider === Providers.OPENAI ||
agent.provider === Providers.AZURE ||
agent.provider === Providers.ANTHROPIC) &&
options.tools?.length && options.tools?.length &&
structuredTools?.length structuredTools?.length
) { ) {

View file

@ -78,7 +78,17 @@ function getLLMConfig(apiKey, options = {}) {
requestOptions.anthropicApiUrl = options.reverseProxyUrl; requestOptions.anthropicApiUrl = options.reverseProxyUrl;
} }
const tools = [];
if (mergedOptions.web_search) {
tools.push({
type: 'web_search_20250305',
name: 'web_search',
});
}
return { return {
tools,
/** @type {AnthropicClientOptions} */ /** @type {AnthropicClientOptions} */
llmConfig: removeNullishValues(requestOptions), llmConfig: removeNullishValues(requestOptions),
}; };

View file

@ -160,6 +160,7 @@
"com_endpoint_anthropic_thinking_budget": "Determines the max number of tokens Claude is allowed use for its internal reasoning process. Larger budgets can improve response quality by enabling more thorough analysis for complex problems, although Claude may not use the entire budget allocated, especially at ranges above 32K. This setting must be lower than \"Max Output Tokens.\"", "com_endpoint_anthropic_thinking_budget": "Determines the max number of tokens Claude is allowed use for its internal reasoning process. Larger budgets can improve response quality by enabling more thorough analysis for complex problems, although Claude may not use the entire budget allocated, especially at ranges above 32K. This setting must be lower than \"Max Output Tokens.\"",
"com_endpoint_anthropic_topk": "Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model's vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).", "com_endpoint_anthropic_topk": "Top-k changes how the model selects tokens for output. A top-k of 1 means the selected token is the most probable among all tokens in the model's vocabulary (also called greedy decoding), while a top-k of 3 means that the next token is selected from among the 3 most probable tokens (using temperature).",
"com_endpoint_anthropic_topp": "Top-p changes how the model selects tokens for output. Tokens are selected from most K (see topK parameter) probable to least until the sum of their probabilities equals the top-p value.", "com_endpoint_anthropic_topp": "Top-p changes how the model selects tokens for output. Tokens are selected from most K (see topK parameter) probable to least until the sum of their probabilities equals the top-p value.",
"com_endpoint_anthropic_use_web_search": "Enable web search functionality using Anthropic's built-in search capabilities. This allows the model to search the web for up-to-date information and provide more accurate, current responses.",
"com_endpoint_assistant": "Assistant", "com_endpoint_assistant": "Assistant",
"com_endpoint_assistant_model": "Assistant Model", "com_endpoint_assistant_model": "Assistant Model",
"com_endpoint_assistant_placeholder": "Please select an Assistant from the right-hand Side Panel", "com_endpoint_assistant_placeholder": "Please select an Assistant from the right-hand Side Panel",

26
package-lock.json generated
View file

@ -64,7 +64,7 @@
"@langchain/google-genai": "^0.2.13", "@langchain/google-genai": "^0.2.13",
"@langchain/google-vertexai": "^0.2.13", "@langchain/google-vertexai": "^0.2.13",
"@langchain/textsplitters": "^0.1.0", "@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.4.52", "@librechat/agents": "^2.4.55",
"@librechat/api": "*", "@librechat/api": "*",
"@librechat/data-schemas": "*", "@librechat/data-schemas": "*",
"@node-saml/passport-saml": "^5.0.0", "@node-saml/passport-saml": "^5.0.0",
@ -17898,12 +17898,12 @@
} }
}, },
"node_modules/@langchain/anthropic": { "node_modules/@langchain/anthropic": {
"version": "0.3.23", "version": "0.3.24",
"resolved": "https://registry.npmjs.org/@langchain/anthropic/-/anthropic-0.3.23.tgz", "resolved": "https://registry.npmjs.org/@langchain/anthropic/-/anthropic-0.3.24.tgz",
"integrity": "sha512-lwp43HUcCM0bJqJEwBwutskvV85G3R3rQDW5XNCntPDzelW+fCmlsm40P7dg7uG/3uOtDGhj4eDMapKpbPvtlA==", "integrity": "sha512-Gi1TwXu5vkCxUMToiXaiwTTWq9v3WMyU3ldB/VEWjzbkr3nKF5kcp+HLqhvV7WWOFVTTNgG+pzfq8JALecq5MA==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@anthropic-ai/sdk": "^0.52.0", "@anthropic-ai/sdk": "^0.56.0",
"fast-xml-parser": "^4.4.1" "fast-xml-parser": "^4.4.1"
}, },
"engines": { "engines": {
@ -17914,9 +17914,9 @@
} }
}, },
"node_modules/@langchain/anthropic/node_modules/@anthropic-ai/sdk": { "node_modules/@langchain/anthropic/node_modules/@anthropic-ai/sdk": {
"version": "0.52.0", "version": "0.56.0",
"resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.52.0.tgz", "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.56.0.tgz",
"integrity": "sha512-d4c+fg+xy9e46c8+YnrrgIQR45CZlAi7PwdzIfDXDM6ACxEZli1/fxhURsq30ZpMZy6LvSkr41jGq5aF5TD7rQ==", "integrity": "sha512-SLCB8M8+VMg1cpCucnA1XWHGWqVSZtIWzmOdDOEu3eTFZMB+A0sGZ1ESO5MHDnqrNTXz3safMrWx9x4rMZSOqA==",
"license": "MIT", "license": "MIT",
"bin": { "bin": {
"anthropic-ai-sdk": "bin/cli" "anthropic-ai-sdk": "bin/cli"
@ -19343,12 +19343,12 @@
} }
}, },
"node_modules/@librechat/agents": { "node_modules/@librechat/agents": {
"version": "2.4.52", "version": "2.4.55",
"resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.52.tgz", "resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.4.55.tgz",
"integrity": "sha512-E0CbuXZEIx3J3MjiZ7wDQuDIMaeGPMkSkcm2foOE2PmneAGiGpIjTgvxa9UjJUUWQku191fydZXr9dE826N1MA==", "integrity": "sha512-PaEwR/jQP1dkzrEL6YAMUtRIizPdSymU3/VDHdnxrKB3rS1hirb1QZx8kZ3bsRugPdoR1N3Vgerpw0m0uBMB5w==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@langchain/anthropic": "^0.3.23", "@langchain/anthropic": "^0.3.24",
"@langchain/aws": "^0.1.11", "@langchain/aws": "^0.1.11",
"@langchain/community": "^0.3.47", "@langchain/community": "^0.3.47",
"@langchain/core": "^0.3.62", "@langchain/core": "^0.3.62",
@ -46494,7 +46494,7 @@
"typescript": "^5.0.4" "typescript": "^5.0.4"
}, },
"peerDependencies": { "peerDependencies": {
"@librechat/agents": "^2.4.52", "@librechat/agents": "^2.4.55",
"@librechat/data-schemas": "*", "@librechat/data-schemas": "*",
"@modelcontextprotocol/sdk": "^1.13.3", "@modelcontextprotocol/sdk": "^1.13.3",
"axios": "^1.8.2", "axios": "^1.8.2",

View file

@ -69,7 +69,7 @@
"registry": "https://registry.npmjs.org/" "registry": "https://registry.npmjs.org/"
}, },
"peerDependencies": { "peerDependencies": {
"@librechat/agents": "^2.4.52", "@librechat/agents": "^2.4.55",
"@librechat/data-schemas": "*", "@librechat/data-schemas": "*",
"@modelcontextprotocol/sdk": "^1.13.3", "@modelcontextprotocol/sdk": "^1.13.3",
"axios": "^1.8.2", "axios": "^1.8.2",

View file

@ -381,6 +381,19 @@ const anthropic: Record<string, SettingDefinition> = {
optionType: 'conversation', optionType: 'conversation',
columnSpan: 2, columnSpan: 2,
}, },
web_search: {
key: 'web_search',
label: 'com_ui_web_search',
labelCode: true,
description: 'com_endpoint_anthropic_use_web_search',
descriptionCode: true,
type: 'boolean',
default: anthropicSettings.web_search.default,
component: 'switch',
optionType: 'conversation',
showDefault: false,
columnSpan: 2,
},
}; };
const bedrock: Record<string, SettingDefinition> = { const bedrock: Record<string, SettingDefinition> = {
@ -649,6 +662,7 @@ const anthropicConfig: SettingsConfiguration = [
anthropic.promptCache, anthropic.promptCache,
anthropic.thinking, anthropic.thinking,
anthropic.thinkingBudget, anthropic.thinkingBudget,
anthropic.web_search,
]; ];
const anthropicCol1: SettingsConfiguration = [ const anthropicCol1: SettingsConfiguration = [
@ -667,6 +681,7 @@ const anthropicCol2: SettingsConfiguration = [
anthropic.promptCache, anthropic.promptCache,
anthropic.thinking, anthropic.thinking,
anthropic.thinkingBudget, anthropic.thinkingBudget,
anthropic.web_search,
]; ];
const bedrockAnthropic: SettingsConfiguration = [ const bedrockAnthropic: SettingsConfiguration = [

View file

@ -352,6 +352,9 @@ export const anthropicSettings = {
default: LEGACY_ANTHROPIC_MAX_OUTPUT, default: LEGACY_ANTHROPIC_MAX_OUTPUT,
}, },
}, },
web_search: {
default: false as const,
},
}; };
export const agentsSettings = { export const agentsSettings = {
@ -634,7 +637,7 @@ export const tConversationSchema = z.object({
reasoning_summary: eReasoningSummarySchema.optional().nullable(), reasoning_summary: eReasoningSummarySchema.optional().nullable(),
/* OpenAI: use Responses API */ /* OpenAI: use Responses API */
useResponsesApi: z.boolean().optional(), useResponsesApi: z.boolean().optional(),
/* OpenAI: use Responses API with Web Search */ /* OpenAI Responses API / Anthropic API */
web_search: z.boolean().optional(), web_search: z.boolean().optional(),
/* Google: use Search Grounding */ /* Google: use Search Grounding */
grounding: z.boolean().optional(), grounding: z.boolean().optional(),
@ -742,6 +745,8 @@ export const tQueryParamsSchema = tConversationSchema
useResponsesApi: true, useResponsesApi: true,
/** @endpoints google */ /** @endpoints google */
grounding: true, grounding: true,
/** @endpoints openAI, anthropic */
web_search: true,
/** @endpoints google, anthropic, bedrock */ /** @endpoints google, anthropic, bedrock */
topP: true, topP: true,
/** @endpoints google, anthropic */ /** @endpoints google, anthropic */
@ -1117,6 +1122,7 @@ export const anthropicBaseSchema = tConversationSchema.pick({
greeting: true, greeting: true,
spec: true, spec: true,
maxContextTokens: true, maxContextTokens: true,
web_search: true,
}); });
export const anthropicSchema = anthropicBaseSchema export const anthropicSchema = anthropicBaseSchema

View file

@ -134,6 +134,14 @@ export const conversationPreset = {
useResponsesApi: { useResponsesApi: {
type: Boolean, type: Boolean,
}, },
/** OpenAI Responses API / Anthropic API */
web_search: {
type: Boolean,
},
/** Google */
grounding: {
type: Boolean,
},
/** Reasoning models only */ /** Reasoning models only */
reasoning_effort: { reasoning_effort: {
type: String, type: String,