feat: add User-Agent header for Anthropic API calls

This commit is contained in:
Mike Lambert 2026-02-08 14:09:31 -05:00
parent 9054ca9c15
commit 7064eea5b8
3 changed files with 57 additions and 9 deletions

View file

@ -1,4 +1,4 @@
import { AnthropicEffort } from 'librechat-data-provider';
import { AnthropicEffort, Constants } from 'librechat-data-provider';
import type * as t from '~/types';
import { getLLMConfig } from './llm';
@ -20,6 +20,15 @@ describe('getLLMConfig', () => {
expect(result.llmConfig).toHaveProperty('maxTokens');
});
it('should include User-Agent header in defaultHeaders', () => {
const result = getLLMConfig('test-api-key', { modelOptions: {} });
const defaultHeaders = result.llmConfig.clientOptions?.defaultHeaders as
| Record<string, string>
| undefined;
expect(defaultHeaders).toBeDefined();
expect(defaultHeaders?.['User-Agent']).toBe(`LibreChat/${Constants.VERSION}`);
});
it('should include proxy settings when provided', () => {
const result = getLLMConfig('test-api-key', {
modelOptions: {},
@ -128,7 +137,8 @@ describe('getLLMConfig', () => {
};
const result = getLLMConfig('test-key', { modelOptions });
const clientOptions = result.llmConfig.clientOptions;
expect(clientOptions?.defaultHeaders).toBeUndefined();
const defaultHeaders = clientOptions?.defaultHeaders as Record<string, string> | undefined;
expect(defaultHeaders?.['anthropic-beta']).toBeUndefined();
expect(result.llmConfig.promptCache).toBe(true);
});
@ -144,7 +154,8 @@ describe('getLLMConfig', () => {
const modelOptions = { model, promptCache: true };
const result = getLLMConfig('test-key', { modelOptions });
const clientOptions = result.llmConfig.clientOptions;
expect(clientOptions?.defaultHeaders).toBeUndefined();
const defaultHeaders = clientOptions?.defaultHeaders as Record<string, string> | undefined;
expect(defaultHeaders?.['anthropic-beta']).toBeUndefined();
expect(result.llmConfig.promptCache).toBe(true);
});
});
@ -306,6 +317,7 @@ describe('getLLMConfig', () => {
// claude-3-5-sonnet supports prompt caching and should get the max-tokens header and promptCache boolean
expect(result.llmConfig.clientOptions?.defaultHeaders).toEqual({
'User-Agent': `LibreChat/${Constants.VERSION}`,
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
});
expect(result.llmConfig.promptCache).toBe(true);
@ -516,6 +528,7 @@ describe('getLLMConfig', () => {
expect(result.llmConfig).not.toHaveProperty('topK');
// Should have appropriate headers for Claude-3.7 with prompt cache
expect(result.llmConfig.clientOptions?.defaultHeaders).toEqual({
'User-Agent': `LibreChat/${Constants.VERSION}`,
'anthropic-beta': 'token-efficient-tools-2025-02-19,output-128k-2025-02-19',
});
// Should pass promptCache boolean
@ -1358,13 +1371,15 @@ describe('getLLMConfig', () => {
modelOptions: { model, promptCache },
});
const headers = result.llmConfig.clientOptions?.defaultHeaders;
const headers = result.llmConfig.clientOptions?.defaultHeaders as
| Record<string, string>
| undefined;
if (shouldHaveHeaders) {
expect(headers).toBeDefined();
expect((headers as Record<string, string>)['anthropic-beta']).toBeDefined();
expect(headers?.['anthropic-beta']).toBeDefined();
} else {
expect(headers).toBeUndefined();
expect(headers?.['anthropic-beta']).toBeUndefined();
}
if (shouldHavePromptCache) {

View file

@ -1,7 +1,12 @@
import { Dispatcher, ProxyAgent } from 'undici';
import { logger } from '@librechat/data-schemas';
import { AnthropicClientOptions } from '@librechat/agents';
import { anthropicSettings, removeNullishValues, AuthKeys } from 'librechat-data-provider';
import {
anthropicSettings,
removeNullishValues,
AuthKeys,
Constants,
} from 'librechat-data-provider';
import type {
AnthropicLLMConfigResult,
AnthropicConfigOptions,
@ -189,8 +194,11 @@ function getLLMConfig(
}
const headers = getClaudeHeaders(requestOptions.model ?? '', supportsCacheControl);
if (headers && requestOptions.clientOptions) {
requestOptions.clientOptions.defaultHeaders = headers;
if (requestOptions.clientOptions) {
requestOptions.clientOptions.defaultHeaders = {
'User-Agent': `LibreChat/${Constants.VERSION}`,
...(headers || {}),
};
}
if (options.proxy && requestOptions.clientOptions) {
@ -274,6 +282,7 @@ function getLLMConfig(
}
requestOptions.clientOptions.defaultHeaders = {
'User-Agent': `LibreChat/${Constants.VERSION}`,
...requestOptions.clientOptions.defaultHeaders,
'anthropic-beta': 'web-search-2025-03-05',
};

View file

@ -45,6 +45,7 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
configOptions: {
baseURL: 'http://host.docker.internal:4000/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
'anthropic-beta': 'context-1m-2025-08-07',
},
},
@ -94,6 +95,7 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
configOptions: {
baseURL: 'http://localhost:4000/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
'anthropic-beta': 'token-efficient-tools-2025-02-19,output-128k-2025-02-19',
},
},
@ -142,6 +144,7 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
configOptions: {
baseURL: 'http://localhost:4000/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
'anthropic-beta': 'token-efficient-tools-2025-02-19,output-128k-2025-02-19',
},
},
@ -184,6 +187,7 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
configOptions: {
baseURL: 'https://api.anthropic.proxy.com/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
},
},
@ -228,6 +232,7 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
configOptions: {
baseURL: 'http://custom.proxy/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
'Custom-Header': 'custom-value',
Authorization: 'Bearer custom-token',
},
@ -270,6 +275,9 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
},
configOptions: {
baseURL: 'http://litellm:4000/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
},
},
tools: [],
});
@ -314,6 +322,9 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
},
configOptions: {
baseURL: 'http://proxy.litellm/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
},
},
tools: [],
});
@ -353,6 +364,9 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
},
configOptions: {
baseURL: 'http://litellm/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
},
},
tools: [],
});
@ -392,6 +406,9 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
},
configOptions: {
baseURL: 'http://litellm/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
},
},
tools: [
{
@ -439,6 +456,9 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
},
configOptions: {
baseURL: 'http://litellm/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
},
},
tools: [],
});
@ -487,6 +507,9 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
},
configOptions: {
baseURL: 'http://litellm/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
},
},
tools: [],
});
@ -538,6 +561,7 @@ describe('getOpenAIConfig - Anthropic Compatibility', () => {
configOptions: {
baseURL: 'http://litellm/v1',
defaultHeaders: {
'User-Agent': expect.stringContaining('LibreChat/'),
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
},
},