mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 00:40:14 +01:00
* chore: update peer dependency for @librechat/agents to version 2.4.41 * 🔧 chore: proxy handling in OpenAI endpoint to use undici * 🔧 chore: update @anthropic-ai/sdk to version 0.52.0 and refactor proxy handling to use undici * 🔧 chore: update globIgnores in vite.config.ts to exclude index.html from caching * 🔧 ci: update proxy handling in getLLMConfig to use fetchOptions and ProxyAgent * 🔧 chore: refactor proxy handling in Anthropic and OpenAI clients to use fetchOptions * refactor: agent initialization to streamline model parameters and resendFiles handling * chore: update @google/generative-ai to version 0.24.0
157 lines
4.8 KiB
JavaScript
157 lines
4.8 KiB
JavaScript
const { anthropicSettings } = require('librechat-data-provider');
|
|
const { getLLMConfig } = require('~/server/services/Endpoints/anthropic/llm');
|
|
|
|
jest.mock('https-proxy-agent', () => ({
|
|
HttpsProxyAgent: jest.fn().mockImplementation((proxy) => ({ proxy })),
|
|
}));
|
|
|
|
describe('getLLMConfig', () => {
|
|
it('should create a basic configuration with default values', () => {
|
|
const result = getLLMConfig('test-api-key', { modelOptions: {} });
|
|
|
|
expect(result.llmConfig).toHaveProperty('apiKey', 'test-api-key');
|
|
expect(result.llmConfig).toHaveProperty('model', anthropicSettings.model.default);
|
|
expect(result.llmConfig).toHaveProperty('stream', true);
|
|
expect(result.llmConfig).toHaveProperty('maxTokens');
|
|
});
|
|
|
|
it('should include proxy settings when provided', () => {
|
|
const result = getLLMConfig('test-api-key', {
|
|
modelOptions: {},
|
|
proxy: 'http://proxy:8080',
|
|
});
|
|
|
|
expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions');
|
|
expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher');
|
|
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined();
|
|
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe(
|
|
'ProxyAgent',
|
|
);
|
|
});
|
|
|
|
it('should include reverse proxy URL when provided', () => {
|
|
const result = getLLMConfig('test-api-key', {
|
|
modelOptions: {},
|
|
reverseProxyUrl: 'http://reverse-proxy',
|
|
});
|
|
|
|
expect(result.llmConfig.clientOptions).toHaveProperty('baseURL', 'http://reverse-proxy');
|
|
});
|
|
|
|
it('should include topK and topP for non-Claude-3.7 models', () => {
|
|
const result = getLLMConfig('test-api-key', {
|
|
modelOptions: {
|
|
model: 'claude-3-opus',
|
|
topK: 10,
|
|
topP: 0.9,
|
|
},
|
|
});
|
|
|
|
expect(result.llmConfig).toHaveProperty('topK', 10);
|
|
expect(result.llmConfig).toHaveProperty('topP', 0.9);
|
|
});
|
|
|
|
it('should include topK and topP for Claude-3.5 models', () => {
|
|
const result = getLLMConfig('test-api-key', {
|
|
modelOptions: {
|
|
model: 'claude-3-5-sonnet',
|
|
topK: 10,
|
|
topP: 0.9,
|
|
},
|
|
});
|
|
|
|
expect(result.llmConfig).toHaveProperty('topK', 10);
|
|
expect(result.llmConfig).toHaveProperty('topP', 0.9);
|
|
});
|
|
|
|
it('should NOT include topK and topP for Claude-3-7 models (hyphen notation)', () => {
|
|
const result = getLLMConfig('test-api-key', {
|
|
modelOptions: {
|
|
model: 'claude-3-7-sonnet',
|
|
topK: 10,
|
|
topP: 0.9,
|
|
},
|
|
});
|
|
|
|
expect(result.llmConfig).not.toHaveProperty('topK');
|
|
expect(result.llmConfig).not.toHaveProperty('topP');
|
|
});
|
|
|
|
it('should NOT include topK and topP for Claude-3.7 models (decimal notation)', () => {
|
|
const result = getLLMConfig('test-api-key', {
|
|
modelOptions: {
|
|
model: 'claude-3.7-sonnet',
|
|
topK: 10,
|
|
topP: 0.9,
|
|
},
|
|
});
|
|
|
|
expect(result.llmConfig).not.toHaveProperty('topK');
|
|
expect(result.llmConfig).not.toHaveProperty('topP');
|
|
});
|
|
|
|
it('should handle custom maxOutputTokens', () => {
|
|
const result = getLLMConfig('test-api-key', {
|
|
modelOptions: {
|
|
model: 'claude-3-opus',
|
|
maxOutputTokens: 2048,
|
|
},
|
|
});
|
|
|
|
expect(result.llmConfig).toHaveProperty('maxTokens', 2048);
|
|
});
|
|
|
|
it('should handle promptCache setting', () => {
|
|
const result = getLLMConfig('test-api-key', {
|
|
modelOptions: {
|
|
model: 'claude-3-5-sonnet',
|
|
promptCache: true,
|
|
},
|
|
});
|
|
|
|
// We're not checking specific header values since that depends on the actual helper function
|
|
// Just verifying that the promptCache setting is processed
|
|
expect(result.llmConfig).toBeDefined();
|
|
});
|
|
|
|
it('should include topK and topP for Claude-3.7 models when thinking is not enabled', () => {
|
|
// Test with thinking explicitly set to null/undefined
|
|
const result = getLLMConfig('test-api-key', {
|
|
modelOptions: {
|
|
model: 'claude-3-7-sonnet',
|
|
topK: 10,
|
|
topP: 0.9,
|
|
thinking: false,
|
|
},
|
|
});
|
|
|
|
expect(result.llmConfig).toHaveProperty('topK', 10);
|
|
expect(result.llmConfig).toHaveProperty('topP', 0.9);
|
|
|
|
// Test with thinking explicitly set to false
|
|
const result2 = getLLMConfig('test-api-key', {
|
|
modelOptions: {
|
|
model: 'claude-3-7-sonnet',
|
|
topK: 10,
|
|
topP: 0.9,
|
|
thinking: false,
|
|
},
|
|
});
|
|
|
|
expect(result2.llmConfig).toHaveProperty('topK', 10);
|
|
expect(result2.llmConfig).toHaveProperty('topP', 0.9);
|
|
|
|
// Test with decimal notation as well
|
|
const result3 = getLLMConfig('test-api-key', {
|
|
modelOptions: {
|
|
model: 'claude-3.7-sonnet',
|
|
topK: 10,
|
|
topP: 0.9,
|
|
thinking: false,
|
|
},
|
|
});
|
|
|
|
expect(result3.llmConfig).toHaveProperty('topK', 10);
|
|
expect(result3.llmConfig).toHaveProperty('topP', 0.9);
|
|
});
|
|
});
|