refactor: port llm.spec.js over to typescript

This commit is contained in:
Dustin Healy 2025-09-01 18:15:36 -07:00 committed by Danny Avila
parent f1dab7f924
commit 5b63aceda9
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
4 changed files with 29 additions and 19 deletions

View file

@ -1,4 +1,4 @@
const { getLLMConfig } = require('./llm');
import { getLLMConfig } from './llm';
jest.mock('https-proxy-agent', () => ({
HttpsProxyAgent: jest.fn().mockImplementation((proxy) => ({ proxy })),
@ -25,9 +25,9 @@ describe('getLLMConfig', () => {
});
expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions');
expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher');
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined();
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe(
expect(result.llmConfig.clientOptions?.fetchOptions).toHaveProperty('dispatcher');
expect(result.llmConfig.clientOptions?.fetchOptions?.dispatcher).toBeDefined();
expect(result.llmConfig.clientOptions?.fetchOptions?.dispatcher.constructor.name).toBe(
'ProxyAgent',
);
});
@ -93,9 +93,10 @@ describe('getLLMConfig', () => {
};
const result = getLLMConfig('test-key', { modelOptions });
const clientOptions = result.llmConfig.clientOptions;
expect(clientOptions.defaultHeaders).toBeDefined();
expect(clientOptions.defaultHeaders).toHaveProperty('anthropic-beta');
expect(clientOptions.defaultHeaders['anthropic-beta']).toBe(
expect(clientOptions?.defaultHeaders).toBeDefined();
expect(clientOptions?.defaultHeaders).toHaveProperty('anthropic-beta');
const defaultHeaders = clientOptions?.defaultHeaders as Record<string, string>;
expect(defaultHeaders['anthropic-beta']).toBe(
'prompt-caching-2024-07-31,context-1m-2025-08-07',
);
});
@ -111,9 +112,10 @@ describe('getLLMConfig', () => {
const modelOptions = { model, promptCache: true };
const result = getLLMConfig('test-key', { modelOptions });
const clientOptions = result.llmConfig.clientOptions;
expect(clientOptions.defaultHeaders).toBeDefined();
expect(clientOptions.defaultHeaders).toHaveProperty('anthropic-beta');
expect(clientOptions.defaultHeaders['anthropic-beta']).toBe(
expect(clientOptions?.defaultHeaders).toBeDefined();
expect(clientOptions?.defaultHeaders).toHaveProperty('anthropic-beta');
const defaultHeaders = clientOptions?.defaultHeaders as Record<string, string>;
expect(defaultHeaders['anthropic-beta']).toBe(
'prompt-caching-2024-07-31,context-1m-2025-08-07',
);
});
@ -254,9 +256,9 @@ describe('getLLMConfig', () => {
});
expect(result.llmConfig.clientOptions).toHaveProperty('fetchOptions');
expect(result.llmConfig.clientOptions.fetchOptions).toHaveProperty('dispatcher');
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher).toBeDefined();
expect(result.llmConfig.clientOptions.fetchOptions.dispatcher.constructor.name).toBe(
expect(result.llmConfig.clientOptions?.fetchOptions).toHaveProperty('dispatcher');
expect(result.llmConfig.clientOptions?.fetchOptions?.dispatcher).toBeDefined();
expect(result.llmConfig.clientOptions?.fetchOptions?.dispatcher.constructor.name).toBe(
'ProxyAgent',
);
expect(result.llmConfig.clientOptions).toHaveProperty('baseURL', 'https://reverse-proxy.com');
@ -272,7 +274,7 @@ describe('getLLMConfig', () => {
});
// claude-3-5-sonnet supports prompt caching and should get the appropriate headers
expect(result.llmConfig.clientOptions.defaultHeaders).toEqual({
expect(result.llmConfig.clientOptions?.defaultHeaders).toEqual({
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15,prompt-caching-2024-07-31',
});
});

View file

@ -28,7 +28,7 @@ import { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } from '.
* @returns {Object} Configuration options for creating an Anthropic LLM instance, with null and undefined values removed.
*/
function getLLMConfig(
apiKey: string,
apiKey?: string,
options: AnthropicConfigOptions = {} as AnthropicConfigOptions,
): AnthropicLLMConfigResult {
const systemOptions = {

View file

@ -1,6 +1,7 @@
import { z } from 'zod';
import { AnthropicClientOptions } from '@librechat/agents';
import { Dispatcher } from 'undici';
import { anthropicSchema } from 'librechat-data-provider';
import { AnthropicClientOptions } from '@librechat/agents';
export type AnthropicParameters = z.infer<typeof anthropicSchema>;
@ -22,7 +23,11 @@ export interface AnthropicConfigOptions {
*/
export interface AnthropicLLMConfigResult {
/** Configuration options for creating an Anthropic LLM instance */
llmConfig: AnthropicClientOptions;
llmConfig: AnthropicClientOptions & {
clientOptions?: {
fetchOptions?: { dispatcher: Dispatcher };
};
};
/** Array of tools to be used */
tools: Array<{
type: string;

View file

@ -619,14 +619,14 @@ export const tConversationSchema = z.object({
userLabel: z.string().optional(),
model: z.string().nullable().optional(),
promptPrefix: z.string().nullable().optional(),
temperature: z.number().optional(),
temperature: z.number().nullable().optional(),
topP: z.number().optional(),
topK: z.number().optional(),
top_p: z.number().optional(),
frequency_penalty: z.number().optional(),
presence_penalty: z.number().optional(),
parentMessageId: z.string().optional(),
maxOutputTokens: coerceNumber.optional(),
maxOutputTokens: coerceNumber.nullable().optional(),
maxContextTokens: coerceNumber.optional(),
max_tokens: coerceNumber.optional(),
/* Anthropic */
@ -634,6 +634,7 @@ export const tConversationSchema = z.object({
system: z.string().optional(),
thinking: z.boolean().optional(),
thinkingBudget: coerceNumber.optional(),
stream: z.boolean().optional(),
/* artifacts */
artifacts: z.string().optional(),
/* google */
@ -1152,6 +1153,8 @@ export const anthropicBaseSchema = tConversationSchema.pick({
maxContextTokens: true,
web_search: true,
fileTokenLimit: true,
stop: true,
stream: true,
});
export const anthropicSchema = anthropicBaseSchema