🎭 feat: Override Custom Endpoint Schema with Specified Params Endpoint (#11788)

* 🔧 refactor: Simplify payload parsing and enhance getSaveOptions logic

- Removed unused bedrockInputSchema from payloadParser, streamlining the function.
- Updated payloadParser to handle optional chaining for model parameters.
- Enhanced getSaveOptions to ensure runOptions defaults to an empty object if parsing fails, improving robustness.
- Adjusted the assignment of maxContextTokens to use the instance variable for consistency.

* 🔧 fix: Update maxContextTokens assignment logic in initializeAgent function

- Enhanced the maxContextTokens assignment to allow for user-defined values, ensuring it defaults to a calculated value only when not provided or invalid. This change improves flexibility in agent initialization.

* 🧪 test: Add unit tests for initializeAgent function

- Introduced comprehensive unit tests for the initializeAgent function, focusing on maxContextTokens behavior.
- Tests cover scenarios for user-defined values, fallback calculations, and edge cases such as zero and negative values, enhancing overall test coverage and reliability of agent initialization logic.

* refactor: default params Endpoint Configuration Handling

- Integrated `getEndpointsConfig` to fetch endpoint configurations, allowing for dynamic handling of `defaultParamsEndpoint`.
- Updated `buildEndpointOption` to pass `defaultParamsEndpoint` to `parseCompactConvo`, ensuring correct parameter handling based on endpoint type.
- Added comprehensive unit tests for `buildDefaultConvo` and `cleanupPreset` to validate behavior with `defaultParamsEndpoint`, covering various scenarios and edge cases.
- Refactored related hooks and utility functions to support the new configuration structure, improving overall flexibility and maintainability.

* refactor: Centralize defaultParamsEndpoint retrieval

- Introduced `getDefaultParamsEndpoint` function to streamline the retrieval of `defaultParamsEndpoint` across various hooks and middleware.
- Updated multiple files to utilize the new function, enhancing code consistency and maintainability.
- Removed redundant logic for fetching `defaultParamsEndpoint`, simplifying the codebase.
This commit is contained in:
Danny Avila 2026-02-13 23:04:51 -05:00 committed by GitHub
parent 6cc6ee3207
commit 467df0f07a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 1234 additions and 45 deletions

View file

@ -0,0 +1,202 @@
import { EModelEndpoint } from 'librechat-data-provider';
import type { TConversation } from 'librechat-data-provider';
import buildDefaultConvo from '../buildDefaultConvo';
jest.mock('../localStorage', () => ({
getLocalStorageItems: jest.fn(() => ({
lastSelectedModel: {},
lastSelectedTools: [],
lastConversationSetup: {},
})),
}));
const baseConversation: TConversation = {
conversationId: 'test-convo-id',
title: 'Test Conversation',
createdAt: '2024-01-01T00:00:00Z',
updatedAt: '2024-01-01T00:00:00Z',
endpoint: null,
};
describe('buildDefaultConvo - defaultParamsEndpoint', () => {
describe('custom endpoint with defaultParamsEndpoint: anthropic', () => {
const models = ['anthropic/claude-opus-4.5', 'anthropic/claude-sonnet-4'];
it('should preserve maxOutputTokens from model spec preset', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
maxContextTokens: 50000,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
lastConversationSetup: preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.maxOutputTokens).toBe(8192);
expect(result.topP).toBe(0.9);
expect(result.temperature).toBe(0.7);
expect(result.maxContextTokens).toBe(50000);
expect(result.model).toBe('anthropic/claude-opus-4.5');
});
it('should strip maxOutputTokens without defaultParamsEndpoint', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
lastConversationSetup: preset,
});
expect(result.maxOutputTokens).toBeUndefined();
expect(result.temperature).toBe(0.7);
});
it('should strip OpenAI-specific fields when using anthropic params', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
max_tokens: 4096,
top_p: 0.9,
presence_penalty: 0.5,
frequency_penalty: 0.3,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
lastConversationSetup: preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.max_tokens).toBeUndefined();
expect(result.top_p).toBeUndefined();
expect(result.presence_penalty).toBeUndefined();
expect(result.frequency_penalty).toBeUndefined();
});
});
describe('custom endpoint without defaultParamsEndpoint (OpenAI default)', () => {
const models = ['gpt-4o', 'gpt-4.1'];
it('should preserve OpenAI fields and strip anthropic fields', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'MyOpenRouterEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
max_tokens: 4096,
top_p: 0.9,
maxOutputTokens: 8192,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'MyOpenRouterEndpoint' as EModelEndpoint,
lastConversationSetup: preset,
});
expect(result.max_tokens).toBe(4096);
expect(result.top_p).toBe(0.9);
expect(result.temperature).toBe(0.7);
expect(result.maxOutputTokens).toBeUndefined();
});
});
describe('custom endpoint with defaultParamsEndpoint: google', () => {
const models = ['gemini-pro', 'gemini-1.5-pro'];
it('should preserve Google-specific fields', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'MyGoogleEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'gemini-pro',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
topK: 40,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'MyGoogleEndpoint' as EModelEndpoint,
lastConversationSetup: preset,
defaultParamsEndpoint: EModelEndpoint.google,
});
expect(result.maxOutputTokens).toBe(8192);
expect(result.topP).toBe(0.9);
expect(result.topK).toBe(40);
});
});
describe('cross-endpoint field isolation', () => {
it('should not carry bedrock region to a custom endpoint', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'MyChatEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
region: 'us-east-1',
};
const result = buildDefaultConvo({
models: ['gpt-4o'],
conversation: baseConversation,
endpoint: 'MyChatEndpoint' as EModelEndpoint,
lastConversationSetup: preset,
});
expect(result.region).toBeUndefined();
expect(result.temperature).toBe(0.7);
});
it('should not carry bedrock region even with anthropic defaultParamsEndpoint', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'MyChatEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'claude-3-opus',
region: 'us-east-1',
maxOutputTokens: 8192,
};
const result = buildDefaultConvo({
models: ['claude-3-opus'],
conversation: baseConversation,
endpoint: 'MyChatEndpoint' as EModelEndpoint,
lastConversationSetup: preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.region).toBeUndefined();
expect(result.maxOutputTokens).toBe(8192);
});
});
});

View file

@ -0,0 +1,119 @@
import { EModelEndpoint } from 'librechat-data-provider';
import cleanupPreset from '../cleanupPreset';
/**
* Integration tests for cleanupPreset NO mocks.
* Uses the real parseConvo to verify actual schema behavior
* with defaultParamsEndpoint for custom endpoints.
*/
describe('cleanupPreset - real parsing with defaultParamsEndpoint', () => {
it('should preserve maxOutputTokens when defaultParamsEndpoint is anthropic', () => {
const preset = {
presetId: 'test-id',
title: 'Claude Opus',
endpoint: 'AnthropicClaude',
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
maxContextTokens: 50000,
};
const result = cleanupPreset({
preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.maxOutputTokens).toBe(8192);
expect(result.topP).toBe(0.9);
expect(result.temperature).toBe(0.7);
expect(result.maxContextTokens).toBe(50000);
expect(result.model).toBe('anthropic/claude-opus-4.5');
});
it('should strip maxOutputTokens without defaultParamsEndpoint (OpenAI schema)', () => {
const preset = {
presetId: 'test-id',
title: 'GPT Custom',
endpoint: 'MyOpenRouter',
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
maxOutputTokens: 8192,
max_tokens: 4096,
};
const result = cleanupPreset({ preset });
expect(result.maxOutputTokens).toBeUndefined();
expect(result.max_tokens).toBe(4096);
expect(result.temperature).toBe(0.7);
});
it('should strip OpenAI-specific fields when using anthropic params', () => {
const preset = {
presetId: 'test-id',
title: 'Claude Custom',
endpoint: 'AnthropicClaude',
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-3-opus',
max_tokens: 4096,
top_p: 0.9,
presence_penalty: 0.5,
frequency_penalty: 0.3,
temperature: 0.7,
};
const result = cleanupPreset({
preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.max_tokens).toBeUndefined();
expect(result.top_p).toBeUndefined();
expect(result.presence_penalty).toBeUndefined();
expect(result.frequency_penalty).toBeUndefined();
expect(result.temperature).toBe(0.7);
});
it('should not carry bedrock region to custom endpoint', () => {
const preset = {
presetId: 'test-id',
title: 'Custom',
endpoint: 'MyEndpoint',
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
region: 'us-east-1',
};
const result = cleanupPreset({ preset });
expect(result.region).toBeUndefined();
expect(result.temperature).toBe(0.7);
});
it('should preserve Google-specific fields when defaultParamsEndpoint is google', () => {
const preset = {
presetId: 'test-id',
title: 'Gemini Custom',
endpoint: 'MyGoogleEndpoint',
endpointType: EModelEndpoint.custom,
model: 'gemini-pro',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
topK: 40,
};
const result = cleanupPreset({
preset,
defaultParamsEndpoint: EModelEndpoint.google,
});
expect(result.maxOutputTokens).toBe(8192);
expect(result.topP).toBe(0.9);
expect(result.topK).toBe(40);
});
});

View file

@ -1,12 +1,9 @@
import { EModelEndpoint } from 'librechat-data-provider';
import { EModelEndpoint, parseConvo } from 'librechat-data-provider';
import cleanupPreset from '../cleanupPreset';
import type { TPreset } from 'librechat-data-provider';
// Mock parseConvo since we're focusing on testing the chatGptLabel migration logic
jest.mock('librechat-data-provider', () => ({
...jest.requireActual('librechat-data-provider'),
parseConvo: jest.fn((input) => {
// Return a simplified mock that passes through most properties
const { conversation } = input;
return {
...conversation,
@ -221,4 +218,41 @@ describe('cleanupPreset', () => {
expect(result.presetId).toBeNull();
});
});
describe('defaultParamsEndpoint threading', () => {
it('should pass defaultParamsEndpoint to parseConvo', () => {
const preset = {
...basePreset,
endpoint: 'MyCustomEndpoint',
endpointType: EModelEndpoint.custom,
};
cleanupPreset({
preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(parseConvo).toHaveBeenCalledWith(
expect.objectContaining({
defaultParamsEndpoint: EModelEndpoint.anthropic,
}),
);
});
it('should pass undefined defaultParamsEndpoint when not provided', () => {
const preset = {
...basePreset,
endpoint: 'MyCustomEndpoint',
endpointType: EModelEndpoint.custom,
};
cleanupPreset({ preset });
expect(parseConvo).toHaveBeenCalledWith(
expect.objectContaining({
defaultParamsEndpoint: undefined,
}),
);
});
});
});

View file

@ -14,11 +14,13 @@ const buildDefaultConvo = ({
conversation,
endpoint = null,
lastConversationSetup,
defaultParamsEndpoint,
}: {
models: string[];
conversation: TConversation;
endpoint?: EModelEndpoint | null;
lastConversationSetup: TConversation | null;
defaultParamsEndpoint?: string | null;
}): TConversation => {
const { lastSelectedModel, lastSelectedTools } = getLocalStorageItems();
const endpointType = lastConversationSetup?.endpointType ?? conversation.endpointType;
@ -49,6 +51,7 @@ const buildDefaultConvo = ({
possibleValues: {
models: possibleModels,
},
defaultParamsEndpoint,
});
const defaultConvo = {

View file

@ -4,9 +4,10 @@ import type { TPreset } from 'librechat-data-provider';
type UIPreset = Partial<TPreset> & { presetOverride?: Partial<TPreset> };
type TCleanupPreset = {
preset?: UIPreset;
defaultParamsEndpoint?: string | null;
};
const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
const cleanupPreset = ({ preset: _preset, defaultParamsEndpoint }: TCleanupPreset): TPreset => {
const { endpoint, endpointType } = _preset ?? ({} as UIPreset);
if (endpoint == null || endpoint === '') {
console.error(`Unknown endpoint ${endpoint}`, _preset);
@ -35,8 +36,13 @@ const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
delete preset.chatGptLabel;
}
/* @ts-ignore: endpoint can be a custom defined name */
const parsedPreset = parseConvo({ endpoint, endpointType, conversation: preset });
const parsedPreset = parseConvo({
/* @ts-ignore: endpoint can be a custom defined name */
endpoint,
endpointType,
conversation: preset,
defaultParamsEndpoint,
});
return {
presetId: _preset?.presetId ?? null,