diff --git a/api/server/services/Endpoints/anthropic/llm.spec.js b/api/server/services/Endpoints/anthropic/llm.spec.js index 5037ab6424..14f5ec3a0c 100644 --- a/api/server/services/Endpoints/anthropic/llm.spec.js +++ b/api/server/services/Endpoints/anthropic/llm.spec.js @@ -211,13 +211,13 @@ describe('getLLMConfig', () => { it('should handle empty modelOptions', () => { expect(() => { getLLMConfig('test-api-key', {}); - }).toThrow("Cannot read properties of undefined (reading 'thinking')"); + }).toThrow('No modelOptions provided'); }); it('should handle no options parameter', () => { expect(() => { getLLMConfig('test-api-key'); - }).toThrow("Cannot read properties of undefined (reading 'thinking')"); + }).toThrow('No modelOptions provided'); }); it('should handle temperature, stop sequences, and stream settings', () => { diff --git a/packages/api/src/endpoints/anthropic/llm.js b/packages/api/src/endpoints/anthropic/llm.ts similarity index 60% rename from packages/api/src/endpoints/anthropic/llm.js rename to packages/api/src/endpoints/anthropic/llm.ts index cae9348488..5519ea7c5f 100644 --- a/packages/api/src/endpoints/anthropic/llm.js +++ b/packages/api/src/endpoints/anthropic/llm.ts @@ -1,6 +1,12 @@ -const { ProxyAgent } = require('undici'); -const { anthropicSettings, removeNullishValues } = require('librechat-data-provider'); -const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = require('./helpers'); +import { ProxyAgent } from 'undici'; +import { AnthropicClientOptions } from '@librechat/agents'; +import { anthropicSettings, removeNullishValues } from 'librechat-data-provider'; +import type { + AnthropicConfigOptions, + AnthropicLLMConfigResult, + AnthropicParameters, +} from '~/types/anthropic'; +import { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } from './helpers'; /** * Generates configuration options for creating an Anthropic language model (LLM) instance. @@ -21,25 +27,42 @@ const { checkPromptCacheSupport, getClaudeHeaders, configureReasoning } = requir * * @returns {Object} Configuration options for creating an Anthropic LLM instance, with null and undefined values removed. */ -function getLLMConfig(apiKey, options = {}) { +function getLLMConfig( + apiKey: string, + options: AnthropicConfigOptions = {} as AnthropicConfigOptions, +): AnthropicLLMConfigResult { const systemOptions = { - thinking: options.modelOptions.thinking ?? anthropicSettings.thinking.default, - promptCache: options.modelOptions.promptCache ?? anthropicSettings.promptCache.default, - thinkingBudget: options.modelOptions.thinkingBudget ?? anthropicSettings.thinkingBudget.default, + thinking: options.modelOptions?.thinking ?? anthropicSettings.thinking.default, + promptCache: options.modelOptions?.promptCache ?? anthropicSettings.promptCache.default, + thinkingBudget: + options.modelOptions?.thinkingBudget ?? anthropicSettings.thinkingBudget.default, }; - for (let key in systemOptions) { - delete options.modelOptions[key]; + + /** Couldn't figure out a way to still loop through the object while deleting the overlapping keys when porting this + * over from javascript, so for now they are being deleted manually until a better way presents itself. + */ + if (options.modelOptions) { + delete options.modelOptions.thinking; + delete options.modelOptions.promptCache; + delete options.modelOptions.thinkingBudget; + } else { + throw new Error('No modelOptions provided'); } + const defaultOptions = { model: anthropicSettings.model.default, maxOutputTokens: anthropicSettings.maxOutputTokens.default, stream: true, }; - const mergedOptions = Object.assign(defaultOptions, options.modelOptions); + const mergedOptions = Object.assign( + defaultOptions, + options.modelOptions, + ) as typeof defaultOptions & + Partial & { stop?: string[]; web_search?: boolean }; /** @type {AnthropicClientOptions} */ - let requestOptions = { + let requestOptions: AnthropicClientOptions & { stream?: boolean } = { apiKey, model: mergedOptions.model, stream: mergedOptions.stream, @@ -66,20 +89,20 @@ function getLLMConfig(apiKey, options = {}) { } const supportsCacheControl = - systemOptions.promptCache === true && checkPromptCacheSupport(requestOptions.model); - const headers = getClaudeHeaders(requestOptions.model, supportsCacheControl); - if (headers) { + systemOptions.promptCache === true && checkPromptCacheSupport(requestOptions.model ?? ''); + const headers = getClaudeHeaders(requestOptions.model ?? '', supportsCacheControl); + if (headers && requestOptions.clientOptions) { requestOptions.clientOptions.defaultHeaders = headers; } - if (options.proxy) { + if (options.proxy && requestOptions.clientOptions) { const proxyAgent = new ProxyAgent(options.proxy); requestOptions.clientOptions.fetchOptions = { dispatcher: proxyAgent, }; } - if (options.reverseProxyUrl) { + if (options.reverseProxyUrl && requestOptions.clientOptions) { requestOptions.clientOptions.baseURL = options.reverseProxyUrl; requestOptions.anthropicApiUrl = options.reverseProxyUrl; } @@ -96,7 +119,9 @@ function getLLMConfig(apiKey, options = {}) { return { tools, /** @type {AnthropicClientOptions} */ - llmConfig: removeNullishValues(requestOptions), + llmConfig: removeNullishValues( + requestOptions as Record, + ) as AnthropicClientOptions, }; } diff --git a/packages/api/src/types/anthropic.ts b/packages/api/src/types/anthropic.ts new file mode 100644 index 0000000000..bbef7dd4b5 --- /dev/null +++ b/packages/api/src/types/anthropic.ts @@ -0,0 +1,31 @@ +import { z } from 'zod'; +import { AnthropicClientOptions } from '@librechat/agents'; +import { anthropicSchema } from 'librechat-data-provider'; + +export type AnthropicParameters = z.infer; + +/** + * Configuration options for the getLLMConfig function + */ +export interface AnthropicConfigOptions { + modelOptions?: Partial; + /** The user ID for tracking and personalization */ + userId?: string; + /** Proxy server URL */ + proxy?: string; + /** URL for a reverse proxy, if used */ + reverseProxyUrl?: string; +} + +/** + * Return type for getLLMConfig function + */ +export interface AnthropicLLMConfigResult { + /** Configuration options for creating an Anthropic LLM instance */ + llmConfig: AnthropicClientOptions; + /** Array of tools to be used */ + tools: Array<{ + type: string; + name?: string; + }>; +}