LibreChat/packages/api/src/endpoints/openai/config.anthropic.spec.ts

552 lines
15 KiB
TypeScript
Raw Normal View History

🎚️ feat: Anthropic Parameter Set Support via Custom Endpoints (#9415) * refactor: modularize openai llm config logic into new getOpenAILLMConfig function (#9412) * ✈️ refactor: Migrate Anthropic's getLLMConfig to TypeScript (#9413) * refactor: move tokens.js over to packages/api and update imports * refactor: port tokens.js to typescript * refactor: move helpers.js over to packages/api and update imports * refactor: port helpers.js to typescript * refactor: move anthropic/llm.js over to packages/api and update imports * refactor: port anthropic/llm.js to typescript with supporting types in types/anthropic.ts and updated tests in llm.spec.js * refactor: move llm.spec.js over to packages/api and update import * refactor: port llm.spec.js over to typescript * 📝 Add Prompt Parameter Support for Anthropic Custom Endpoints (#9414) feat: add anthropic llm config support for openai-like (custom) endpoints * fix: missed compiler / type issues from addition of getAnthropicLLMConfig * refactor: update tokens.ts to export constants and functions, enhance type definitions, and adjust default values * WIP: first pass, decouple `llmConfig` from `configOptions` * chore: update import path for OpenAI configuration from 'llm' to 'config' * refactor: enhance type definitions for ThinkingConfig and update modelOptions in AnthropicConfigOptions * refactor: cleanup type, introduce openai transform from alt provider * chore: integrate removeNullishValues in Google llmConfig and update OpenAI exports * chore: bump version of @librechat/api to 1.3.5 in package.json and package-lock.json * refactor: update customParams type in OpenAIConfigOptions to use TConfig['customParams'] * refactor: enhance transformToOpenAIConfig to include fromEndpoint and improve config extraction * refactor: conform userId field for anthropic/openai, cleanup anthropic typing * ci: add backward compatibility tests for getOpenAIConfig with various endpoints and configurations * ci: replace userId with user in clientOptions for getLLMConfig * test: add Azure OpenAI endpoint tests for various configurations in getOpenAIConfig * refactor: defaultHeaders retrieval for prompt caching for anthropic-based custom endpoint (litellm) * test: add unit tests for getOpenAIConfig with various Anthropic model configurations * test: enhance Anthropic compatibility tests with addParams and dropParams handling * chore: update @librechat/agents dependency to version 2.4.78 in package.json and package-lock.json * chore: update @librechat/agents dependency to version 2.4.79 in package.json and package-lock.json --------- Co-authored-by: Danny Avila <danny@librechat.ai>
2025-09-08 11:35:29 -07:00
import { getOpenAIConfig } from './config';
describe('getOpenAIConfig - Anthropic Compatibility', () => {
describe('Anthropic via LiteLLM', () => {
it('should handle basic Anthropic configuration with defaultParamsEndpoint', () => {
const apiKey = 'sk-xxxx';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-sonnet-4',
user: 'some_user_id',
},
reverseProxyUrl: 'http://host.docker.internal:4000/v1',
proxy: '',
headers: {},
addParams: undefined,
dropParams: undefined,
customParams: {
defaultParamsEndpoint: 'anthropic',
paramDefinitions: [],
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-xxxx',
model: 'claude-sonnet-4',
stream: true,
maxTokens: 64000,
🎚️ feat: Anthropic Parameter Set Support via Custom Endpoints (#9415) * refactor: modularize openai llm config logic into new getOpenAILLMConfig function (#9412) * ✈️ refactor: Migrate Anthropic's getLLMConfig to TypeScript (#9413) * refactor: move tokens.js over to packages/api and update imports * refactor: port tokens.js to typescript * refactor: move helpers.js over to packages/api and update imports * refactor: port helpers.js to typescript * refactor: move anthropic/llm.js over to packages/api and update imports * refactor: port anthropic/llm.js to typescript with supporting types in types/anthropic.ts and updated tests in llm.spec.js * refactor: move llm.spec.js over to packages/api and update import * refactor: port llm.spec.js over to typescript * 📝 Add Prompt Parameter Support for Anthropic Custom Endpoints (#9414) feat: add anthropic llm config support for openai-like (custom) endpoints * fix: missed compiler / type issues from addition of getAnthropicLLMConfig * refactor: update tokens.ts to export constants and functions, enhance type definitions, and adjust default values * WIP: first pass, decouple `llmConfig` from `configOptions` * chore: update import path for OpenAI configuration from 'llm' to 'config' * refactor: enhance type definitions for ThinkingConfig and update modelOptions in AnthropicConfigOptions * refactor: cleanup type, introduce openai transform from alt provider * chore: integrate removeNullishValues in Google llmConfig and update OpenAI exports * chore: bump version of @librechat/api to 1.3.5 in package.json and package-lock.json * refactor: update customParams type in OpenAIConfigOptions to use TConfig['customParams'] * refactor: enhance transformToOpenAIConfig to include fromEndpoint and improve config extraction * refactor: conform userId field for anthropic/openai, cleanup anthropic typing * ci: add backward compatibility tests for getOpenAIConfig with various endpoints and configurations * ci: replace userId with user in clientOptions for getLLMConfig * test: add Azure OpenAI endpoint tests for various configurations in getOpenAIConfig * refactor: defaultHeaders retrieval for prompt caching for anthropic-based custom endpoint (litellm) * test: add unit tests for getOpenAIConfig with various Anthropic model configurations * test: enhance Anthropic compatibility tests with addParams and dropParams handling * chore: update @librechat/agents dependency to version 2.4.78 in package.json and package-lock.json * chore: update @librechat/agents dependency to version 2.4.79 in package.json and package-lock.json --------- Co-authored-by: Danny Avila <danny@librechat.ai>
2025-09-08 11:35:29 -07:00
modelKwargs: {
metadata: {
user_id: 'some_user_id',
},
thinking: {
type: 'enabled',
budget_tokens: 2000,
},
},
},
configOptions: {
baseURL: 'http://host.docker.internal:4000/v1',
defaultHeaders: {
'anthropic-beta': 'prompt-caching-2024-07-31,context-1m-2025-08-07',
},
},
tools: [],
});
});
it('should handle Claude 3.7 model with thinking enabled', () => {
const apiKey = 'sk-yyyy';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-3.7-sonnet-20241022',
user: 'user123',
temperature: 0.7,
thinking: true,
thinkingBudget: 3000,
},
reverseProxyUrl: 'http://localhost:4000/v1',
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-yyyy',
model: 'claude-3.7-sonnet-20241022',
stream: true,
temperature: 0.7,
maxTokens: 8192,
modelKwargs: {
metadata: {
user_id: 'user123',
},
thinking: {
type: 'enabled',
budget_tokens: 3000,
},
},
},
configOptions: {
baseURL: 'http://localhost:4000/v1',
defaultHeaders: {
'anthropic-beta':
'token-efficient-tools-2025-02-19,output-128k-2025-02-19,prompt-caching-2024-07-31',
},
},
tools: [],
});
});
it('should handle Claude 3.7 model with thinking disabled (topP and topK included)', () => {
const apiKey = 'sk-yyyy';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-3.7-sonnet-20241022',
user: 'user123',
temperature: 0.7,
topP: 0.9,
topK: 50,
thinking: false,
},
reverseProxyUrl: 'http://localhost:4000/v1',
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-yyyy',
model: 'claude-3.7-sonnet-20241022',
stream: true,
temperature: 0.7,
topP: 0.9,
maxTokens: 8192,
modelKwargs: {
metadata: {
user_id: 'user123',
},
topK: 50,
},
},
configOptions: {
baseURL: 'http://localhost:4000/v1',
defaultHeaders: {
'anthropic-beta':
'token-efficient-tools-2025-02-19,output-128k-2025-02-19,prompt-caching-2024-07-31',
},
},
tools: [],
});
});
it('should handle Claude 3.5 sonnet with special headers', () => {
const apiKey = 'sk-zzzz';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-3.5-sonnet-20240620',
user: 'user456',
maxOutputTokens: 4096,
},
reverseProxyUrl: 'https://api.anthropic.proxy.com/v1',
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-zzzz',
model: 'claude-3.5-sonnet-20240620',
stream: true,
maxTokens: 4096,
modelKwargs: {
metadata: {
user_id: 'user456',
},
},
},
configOptions: {
baseURL: 'https://api.anthropic.proxy.com/v1',
defaultHeaders: {
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15,prompt-caching-2024-07-31',
},
},
tools: [],
});
});
it('should apply anthropic-beta headers based on model pattern', () => {
const apiKey = 'sk-custom';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-3-sonnet',
},
reverseProxyUrl: 'http://custom.proxy/v1',
headers: {
'Custom-Header': 'custom-value',
Authorization: 'Bearer custom-token',
},
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-custom',
model: 'claude-3-sonnet',
stream: true,
maxTokens: 8192,
modelKwargs: {
metadata: {
user_id: undefined,
},
},
},
configOptions: {
baseURL: 'http://custom.proxy/v1',
defaultHeaders: {
'Custom-Header': 'custom-value',
Authorization: 'Bearer custom-token',
'anthropic-beta': 'prompt-caching-2024-07-31',
},
},
tools: [],
});
});
it('should handle models that do not match Claude patterns', () => {
const apiKey = 'sk-other';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'gpt-4-turbo',
user: 'userGPT',
temperature: 0.8,
},
reverseProxyUrl: 'http://litellm:4000/v1',
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-other',
model: 'gpt-4-turbo',
stream: true,
temperature: 0.8,
maxTokens: 8192,
modelKwargs: {
metadata: {
user_id: 'userGPT',
},
},
},
configOptions: {
baseURL: 'http://litellm:4000/v1',
},
tools: [],
});
});
it('should handle dropParams correctly in Anthropic path', () => {
const apiKey = 'sk-drop';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-3-opus-20240229',
user: 'userDrop',
temperature: 0.5,
maxOutputTokens: 2048,
topP: 0.9,
topK: 40,
},
reverseProxyUrl: 'http://proxy.litellm/v1',
dropParams: ['temperature', 'topK', 'metadata'],
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-drop',
model: 'claude-3-opus-20240229',
stream: true,
topP: 0.9,
maxTokens: 2048,
// temperature is dropped
// modelKwargs.topK is dropped
// modelKwargs.metadata is dropped completely
},
configOptions: {
baseURL: 'http://proxy.litellm/v1',
defaultHeaders: {
'anthropic-beta': 'prompt-caching-2024-07-31',
},
},
tools: [],
});
});
it('should handle empty user string', () => {
const apiKey = 'sk-edge';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-2.1',
user: '',
temperature: 0,
},
reverseProxyUrl: 'http://litellm/v1',
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-edge',
model: 'claude-2.1',
stream: true,
temperature: 0,
maxTokens: 8192,
modelKwargs: {
metadata: {
user_id: '',
},
},
},
configOptions: {
baseURL: 'http://litellm/v1',
},
tools: [],
});
});
it('should handle web_search tool', () => {
const apiKey = 'sk-search';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-3-opus-20240229',
user: 'searchUser',
web_search: true,
},
reverseProxyUrl: 'http://litellm/v1',
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-search',
model: 'claude-3-opus-20240229',
stream: true,
maxTokens: 8192,
modelKwargs: {
metadata: {
user_id: 'searchUser',
},
},
},
configOptions: {
baseURL: 'http://litellm/v1',
defaultHeaders: {
'anthropic-beta': 'prompt-caching-2024-07-31',
},
},
tools: [
{
type: 'web_search_20250305',
name: 'web_search',
},
],
});
});
it('should properly transform Anthropic config with invocationKwargs', () => {
const apiKey = 'sk-test';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-3.5-haiku-20241022',
user: 'testUser',
topP: 0.9,
topK: 40,
},
reverseProxyUrl: 'http://litellm/v1',
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-test',
model: 'claude-3.5-haiku-20241022',
stream: true,
topP: 0.9,
maxTokens: 8192,
modelKwargs: {
metadata: {
user_id: 'testUser',
},
topK: 40,
},
},
configOptions: {
baseURL: 'http://litellm/v1',
defaultHeaders: {
'anthropic-beta': 'prompt-caching-2024-07-31',
},
},
tools: [],
});
});
it('should handle addParams with Anthropic defaults', () => {
const apiKey = 'sk-add';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-3-opus-20240229',
user: 'addUser',
temperature: 0.7,
},
reverseProxyUrl: 'http://litellm/v1',
addParams: {
customParam1: 'value1',
customParam2: 42,
frequencyPenalty: 0.5, // Known OpenAI param
},
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-add',
model: 'claude-3-opus-20240229',
stream: true,
temperature: 0.7,
frequencyPenalty: 0.5, // Known param added to main config
maxTokens: 8192,
modelKwargs: {
metadata: {
user_id: 'addUser',
},
customParam1: 'value1', // Unknown params added to modelKwargs
customParam2: 42,
},
},
configOptions: {
baseURL: 'http://litellm/v1',
defaultHeaders: {
'anthropic-beta': 'prompt-caching-2024-07-31',
},
},
tools: [],
});
});
it('should handle both addParams and dropParams together', () => {
const apiKey = 'sk-both';
const endpoint = 'Anthropic (via LiteLLM)';
const options = {
modelOptions: {
model: 'claude-3.5-sonnet-20240620',
user: 'bothUser',
temperature: 0.6,
topP: 0.9,
topK: 40,
},
reverseProxyUrl: 'http://litellm/v1',
addParams: {
customParam: 'customValue',
maxRetries: 3, // Known OpenAI param
},
dropParams: ['temperature', 'topK'], // Drop one known and one unknown param
customParams: {
defaultParamsEndpoint: 'anthropic',
},
endpoint: 'Anthropic (via LiteLLM)',
endpointType: 'custom',
};
const result = getOpenAIConfig(apiKey, options, endpoint);
expect(result).toEqual({
llmConfig: {
apiKey: 'sk-both',
model: 'claude-3.5-sonnet-20240620',
stream: true,
topP: 0.9,
maxRetries: 3,
maxTokens: 8192,
modelKwargs: {
metadata: {
user_id: 'bothUser',
},
customParam: 'customValue',
// topK is dropped
},
},
configOptions: {
baseURL: 'http://litellm/v1',
defaultHeaders: {
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15,prompt-caching-2024-07-31',
},
},
tools: [],
});
});
});
});