🎭 feat: Override Custom Endpoint Schema with Specified Params Endpoint (#11788)

* 🔧 refactor: Simplify payload parsing and enhance getSaveOptions logic

- Removed unused bedrockInputSchema from payloadParser, streamlining the function.
- Updated payloadParser to handle optional chaining for model parameters.
- Enhanced getSaveOptions to ensure runOptions defaults to an empty object if parsing fails, improving robustness.
- Adjusted the assignment of maxContextTokens to use the instance variable for consistency.

* 🔧 fix: Update maxContextTokens assignment logic in initializeAgent function

- Enhanced the maxContextTokens assignment to allow for user-defined values, ensuring it defaults to a calculated value only when not provided or invalid. This change improves flexibility in agent initialization.

* 🧪 test: Add unit tests for initializeAgent function

- Introduced comprehensive unit tests for the initializeAgent function, focusing on maxContextTokens behavior.
- Tests cover scenarios for user-defined values, fallback calculations, and edge cases such as zero and negative values, enhancing overall test coverage and reliability of agent initialization logic.

* refactor: default params Endpoint Configuration Handling

- Integrated `getEndpointsConfig` to fetch endpoint configurations, allowing for dynamic handling of `defaultParamsEndpoint`.
- Updated `buildEndpointOption` to pass `defaultParamsEndpoint` to `parseCompactConvo`, ensuring correct parameter handling based on endpoint type.
- Added comprehensive unit tests for `buildDefaultConvo` and `cleanupPreset` to validate behavior with `defaultParamsEndpoint`, covering various scenarios and edge cases.
- Refactored related hooks and utility functions to support the new configuration structure, improving overall flexibility and maintainability.

* refactor: Centralize defaultParamsEndpoint retrieval

- Introduced `getDefaultParamsEndpoint` function to streamline the retrieval of `defaultParamsEndpoint` across various hooks and middleware.
- Updated multiple files to utilize the new function, enhancing code consistency and maintainability.
- Removed redundant logic for fetching `defaultParamsEndpoint`, simplifying the codebase.
This commit is contained in:
Danny Avila 2026-02-13 23:04:51 -05:00 committed by GitHub
parent 6cc6ee3207
commit 467df0f07a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 1234 additions and 45 deletions

View file

@ -39,7 +39,6 @@ const {
PermissionTypes,
isAgentsEndpoint,
isEphemeralAgentId,
bedrockInputSchema,
removeNullishValues,
} = require('librechat-data-provider');
const { spendTokens, spendStructuredTokens } = require('~/models/spendTokens');
@ -69,17 +68,11 @@ const omitTitleOptions = new Set([
* @param {Agent} agent
* @param {string} endpoint
*/
const payloadParser = ({ req, agent, endpoint }) => {
const payloadParser = ({ req, endpoint }) => {
if (isAgentsEndpoint(endpoint)) {
return { model: undefined };
} else if (endpoint === EModelEndpoint.bedrock) {
const parsedValues = bedrockInputSchema.parse(agent.model_parameters);
if (parsedValues.thinking == null) {
parsedValues.thinking = false;
}
return parsedValues;
return;
}
return req.body.endpointOption.model_parameters;
return req.body?.endpointOption?.model_parameters;
};
function createTokenCounter(encoding) {
@ -296,14 +289,9 @@ class AgentClient extends BaseClient {
checkVisionRequest() {}
getSaveOptions() {
// TODO:
// would need to be override settings; otherwise, model needs to be undefined
// model: this.override.model,
// instructions: this.override.instructions,
// additional_instructions: this.override.additional_instructions,
let runOptions = {};
try {
runOptions = payloadParser(this.options);
runOptions = payloadParser(this.options) ?? {};
} catch (error) {
logger.error(
'[api/server/controllers/agents/client.js #getSaveOptions] Error parsing options',
@ -314,14 +302,14 @@ class AgentClient extends BaseClient {
return removeNullishValues(
Object.assign(
{
spec: this.options.spec,
iconURL: this.options.iconURL,
endpoint: this.options.endpoint,
agent_id: this.options.agent.id,
modelLabel: this.options.modelLabel,
maxContextTokens: this.options.maxContextTokens,
resendFiles: this.options.resendFiles,
imageDetail: this.options.imageDetail,
spec: this.options.spec,
iconURL: this.options.iconURL,
maxContextTokens: this.maxContextTokens,
},
// TODO: PARSE OPTIONS BY PROVIDER, MAY CONTAIN SENSITIVE DATA
runOptions,

View file

@ -5,9 +5,11 @@ const {
EModelEndpoint,
isAgentsEndpoint,
parseCompactConvo,
getDefaultParamsEndpoint,
} = require('librechat-data-provider');
const azureAssistants = require('~/server/services/Endpoints/azureAssistants');
const assistants = require('~/server/services/Endpoints/assistants');
const { getEndpointsConfig } = require('~/server/services/Config');
const agents = require('~/server/services/Endpoints/agents');
const { updateFilesUsage } = require('~/models');
@ -19,9 +21,24 @@ const buildFunction = {
async function buildEndpointOption(req, res, next) {
const { endpoint, endpointType } = req.body;
let endpointsConfig;
try {
endpointsConfig = await getEndpointsConfig(req);
} catch (error) {
logger.error('Error fetching endpoints config in buildEndpointOption', error);
}
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, endpoint);
let parsedBody;
try {
parsedBody = parseCompactConvo({ endpoint, endpointType, conversation: req.body });
parsedBody = parseCompactConvo({
endpoint,
endpointType,
conversation: req.body,
defaultParamsEndpoint,
});
} catch (error) {
logger.error(`Error parsing compact conversation for endpoint ${endpoint}`, error);
logger.debug({
@ -55,6 +72,7 @@ async function buildEndpointOption(req, res, next) {
endpoint,
endpointType,
conversation: currentModelSpec.preset,
defaultParamsEndpoint,
});
if (currentModelSpec.iconURL != null && currentModelSpec.iconURL !== '') {
parsedBody.iconURL = currentModelSpec.iconURL;

View file

@ -0,0 +1,237 @@
/**
* Wrap parseCompactConvo: the REAL function runs, but jest can observe
* calls and return values. Must be declared before require('./buildEndpointOption')
* so the destructured reference in the middleware captures the wrapper.
*/
jest.mock('librechat-data-provider', () => {
const actual = jest.requireActual('librechat-data-provider');
return {
...actual,
parseCompactConvo: jest.fn((...args) => actual.parseCompactConvo(...args)),
};
});
const { EModelEndpoint, parseCompactConvo } = require('librechat-data-provider');
const mockBuildOptions = jest.fn((_endpoint, parsedBody) => ({
...parsedBody,
endpoint: _endpoint,
}));
jest.mock('~/server/services/Endpoints/azureAssistants', () => ({
buildOptions: mockBuildOptions,
}));
jest.mock('~/server/services/Endpoints/assistants', () => ({
buildOptions: mockBuildOptions,
}));
jest.mock('~/server/services/Endpoints/agents', () => ({
buildOptions: mockBuildOptions,
}));
jest.mock('~/models', () => ({
updateFilesUsage: jest.fn(),
}));
const mockGetEndpointsConfig = jest.fn();
jest.mock('~/server/services/Config', () => ({
getEndpointsConfig: (...args) => mockGetEndpointsConfig(...args),
}));
jest.mock('@librechat/api', () => ({
handleError: jest.fn(),
}));
const buildEndpointOption = require('./buildEndpointOption');
const createReq = (body, config = {}) => ({
body,
config,
baseUrl: '/api/chat',
});
const createRes = () => ({
status: jest.fn().mockReturnThis(),
json: jest.fn().mockReturnThis(),
});
describe('buildEndpointOption - defaultParamsEndpoint parsing', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('should pass defaultParamsEndpoint to parseCompactConvo and preserve maxOutputTokens', async () => {
mockGetEndpointsConfig.mockResolvedValue({
AnthropicClaude: {
type: EModelEndpoint.custom,
customParams: {
defaultParamsEndpoint: EModelEndpoint.anthropic,
},
},
});
const req = createReq(
{
endpoint: 'AnthropicClaude',
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
maxContextTokens: 50000,
},
{ modelSpecs: null },
);
await buildEndpointOption(req, createRes(), jest.fn());
expect(parseCompactConvo).toHaveBeenCalledWith(
expect.objectContaining({
defaultParamsEndpoint: EModelEndpoint.anthropic,
}),
);
const parsedResult = parseCompactConvo.mock.results[0].value;
expect(parsedResult.maxOutputTokens).toBe(8192);
expect(parsedResult.topP).toBe(0.9);
expect(parsedResult.temperature).toBe(0.7);
expect(parsedResult.maxContextTokens).toBe(50000);
});
it('should strip maxOutputTokens when no defaultParamsEndpoint is configured', async () => {
mockGetEndpointsConfig.mockResolvedValue({
MyOpenRouter: {
type: EModelEndpoint.custom,
},
});
const req = createReq(
{
endpoint: 'MyOpenRouter',
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
maxOutputTokens: 8192,
max_tokens: 4096,
},
{ modelSpecs: null },
);
await buildEndpointOption(req, createRes(), jest.fn());
expect(parseCompactConvo).toHaveBeenCalledWith(
expect.objectContaining({
defaultParamsEndpoint: undefined,
}),
);
const parsedResult = parseCompactConvo.mock.results[0].value;
expect(parsedResult.maxOutputTokens).toBeUndefined();
expect(parsedResult.max_tokens).toBe(4096);
expect(parsedResult.temperature).toBe(0.7);
});
it('should strip bedrock region from custom endpoint without defaultParamsEndpoint', async () => {
mockGetEndpointsConfig.mockResolvedValue({
MyEndpoint: {
type: EModelEndpoint.custom,
},
});
const req = createReq(
{
endpoint: 'MyEndpoint',
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
region: 'us-east-1',
},
{ modelSpecs: null },
);
await buildEndpointOption(req, createRes(), jest.fn());
const parsedResult = parseCompactConvo.mock.results[0].value;
expect(parsedResult.region).toBeUndefined();
expect(parsedResult.temperature).toBe(0.7);
});
it('should pass defaultParamsEndpoint when re-parsing enforced model spec', async () => {
mockGetEndpointsConfig.mockResolvedValue({
AnthropicClaude: {
type: EModelEndpoint.custom,
customParams: {
defaultParamsEndpoint: EModelEndpoint.anthropic,
},
},
});
const modelSpec = {
name: 'claude-opus-4.5',
preset: {
endpoint: 'AnthropicClaude',
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
maxContextTokens: 50000,
},
};
const req = createReq(
{
endpoint: 'AnthropicClaude',
endpointType: EModelEndpoint.custom,
spec: 'claude-opus-4.5',
model: 'anthropic/claude-opus-4.5',
},
{
modelSpecs: {
enforce: true,
list: [modelSpec],
},
},
);
await buildEndpointOption(req, createRes(), jest.fn());
const enforcedCall = parseCompactConvo.mock.calls[1];
expect(enforcedCall[0]).toEqual(
expect.objectContaining({
defaultParamsEndpoint: EModelEndpoint.anthropic,
}),
);
const enforcedResult = parseCompactConvo.mock.results[1].value;
expect(enforcedResult.maxOutputTokens).toBe(8192);
expect(enforcedResult.temperature).toBe(0.7);
expect(enforcedResult.maxContextTokens).toBe(50000);
});
it('should fall back to OpenAI schema when getEndpointsConfig fails', async () => {
mockGetEndpointsConfig.mockRejectedValue(new Error('Config unavailable'));
const req = createReq(
{
endpoint: 'AnthropicClaude',
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
max_tokens: 4096,
},
{ modelSpecs: null },
);
await buildEndpointOption(req, createRes(), jest.fn());
expect(parseCompactConvo).toHaveBeenCalledWith(
expect.objectContaining({
defaultParamsEndpoint: undefined,
}),
);
const parsedResult = parseCompactConvo.mock.results[0].value;
expect(parsedResult.maxOutputTokens).toBeUndefined();
expect(parsedResult.max_tokens).toBe(4096);
});
});

View file

@ -1,7 +1,12 @@
import { useCallback } from 'react';
import { useRecoilValue } from 'recoil';
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
import { getEndpointField, LocalStorageKeys, isAssistantsEndpoint } from 'librechat-data-provider';
import {
getEndpointField,
LocalStorageKeys,
isAssistantsEndpoint,
getDefaultParamsEndpoint,
} from 'librechat-data-provider';
import type { TEndpointsConfig, EModelEndpoint, TConversation } from 'librechat-data-provider';
import type { AssistantListItem, NewConversationParams } from '~/common';
import useAssistantListMap from '~/hooks/Assistants/useAssistantListMap';
@ -84,11 +89,13 @@ export default function useAddedResponse() {
}
const models = modelsConfig?.[defaultEndpoint ?? ''] ?? [];
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint);
newConversation = buildDefaultConvo({
conversation: newConversation,
lastConversationSetup: preset as TConversation,
endpoint: defaultEndpoint ?? ('' as EModelEndpoint),
models,
defaultParamsEndpoint,
});
if (preset?.title != null && preset.title !== '') {

View file

@ -13,6 +13,7 @@ import {
parseCompactConvo,
replaceSpecialVars,
isAssistantsEndpoint,
getDefaultParamsEndpoint,
} from 'librechat-data-provider';
import type {
TMessage,
@ -173,12 +174,14 @@ export default function useChatFunctions({
const startupConfig = queryClient.getQueryData<TStartupConfig>([QueryKeys.startupConfig]);
const endpointType = getEndpointField(endpointsConfig, endpoint, 'type');
const iconURL = conversation?.iconURL;
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, endpoint);
/** This becomes part of the `endpointOption` */
const convo = parseCompactConvo({
endpoint: endpoint as EndpointSchemaKey,
endpointType: endpointType as EndpointSchemaKey,
conversation: conversation ?? {},
defaultParamsEndpoint,
});
const { modelDisplayLabel } = endpointsConfig?.[endpoint ?? ''] ?? {};

View file

@ -1,5 +1,5 @@
import { excludedKeys } from 'librechat-data-provider';
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
import { excludedKeys, getDefaultParamsEndpoint } from 'librechat-data-provider';
import type {
TEndpointsConfig,
TModelsConfig,
@ -47,11 +47,14 @@ const useDefaultConvo = () => {
}
}
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, endpoint);
const defaultConvo = buildDefaultConvo({
conversation: conversation as TConversation,
endpoint,
lastConversationSetup: preset as TConversation,
models,
defaultParamsEndpoint,
});
if (!cleanOutput) {

View file

@ -1,7 +1,12 @@
import { useRecoilValue } from 'recoil';
import { useCallback, useRef, useEffect } from 'react';
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
import { getEndpointField, LocalStorageKeys, isAssistantsEndpoint } from 'librechat-data-provider';
import {
getEndpointField,
LocalStorageKeys,
isAssistantsEndpoint,
getDefaultParamsEndpoint,
} from 'librechat-data-provider';
import type {
TEndpointsConfig,
EModelEndpoint,
@ -117,11 +122,13 @@ const useGenerateConvo = ({
}
const models = modelsConfig?.[defaultEndpoint ?? ''] ?? [];
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint);
conversation = buildDefaultConvo({
conversation,
lastConversationSetup: preset as TConversation,
endpoint: defaultEndpoint ?? ('' as EModelEndpoint),
models,
defaultParamsEndpoint,
});
if (preset?.title != null && preset.title !== '') {

View file

@ -2,7 +2,13 @@ import { useCallback } from 'react';
import { useSetRecoilState } from 'recoil';
import { useNavigate } from 'react-router-dom';
import { useQueryClient } from '@tanstack/react-query';
import { QueryKeys, Constants, dataService, getEndpointField } from 'librechat-data-provider';
import {
QueryKeys,
Constants,
dataService,
getEndpointField,
getDefaultParamsEndpoint,
} from 'librechat-data-provider';
import type {
TEndpointsConfig,
TStartupConfig,
@ -106,11 +112,13 @@ const useNavigateToConvo = (index = 0) => {
const models = modelsConfig?.[defaultEndpoint ?? ''] ?? [];
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint);
convo = buildDefaultConvo({
models,
conversation,
endpoint: defaultEndpoint,
lastConversationSetup: conversation,
defaultParamsEndpoint,
});
}
clearAllConversations(true);

View file

@ -14,6 +14,7 @@ import {
LocalStorageKeys,
isEphemeralAgentId,
isAssistantsEndpoint,
getDefaultParamsEndpoint,
} from 'librechat-data-provider';
import type {
TPreset,
@ -191,11 +192,13 @@ const useNewConvo = (index = 0) => {
}
const models = modelsConfig?.[defaultEndpoint] ?? [];
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint);
conversation = buildDefaultConvo({
conversation,
lastConversationSetup: activePreset as TConversation,
endpoint: defaultEndpoint,
models,
defaultParamsEndpoint,
});
}

View file

@ -0,0 +1,202 @@
import { EModelEndpoint } from 'librechat-data-provider';
import type { TConversation } from 'librechat-data-provider';
import buildDefaultConvo from '../buildDefaultConvo';
jest.mock('../localStorage', () => ({
getLocalStorageItems: jest.fn(() => ({
lastSelectedModel: {},
lastSelectedTools: [],
lastConversationSetup: {},
})),
}));
const baseConversation: TConversation = {
conversationId: 'test-convo-id',
title: 'Test Conversation',
createdAt: '2024-01-01T00:00:00Z',
updatedAt: '2024-01-01T00:00:00Z',
endpoint: null,
};
describe('buildDefaultConvo - defaultParamsEndpoint', () => {
describe('custom endpoint with defaultParamsEndpoint: anthropic', () => {
const models = ['anthropic/claude-opus-4.5', 'anthropic/claude-sonnet-4'];
it('should preserve maxOutputTokens from model spec preset', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
maxContextTokens: 50000,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
lastConversationSetup: preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.maxOutputTokens).toBe(8192);
expect(result.topP).toBe(0.9);
expect(result.temperature).toBe(0.7);
expect(result.maxContextTokens).toBe(50000);
expect(result.model).toBe('anthropic/claude-opus-4.5');
});
it('should strip maxOutputTokens without defaultParamsEndpoint', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
lastConversationSetup: preset,
});
expect(result.maxOutputTokens).toBeUndefined();
expect(result.temperature).toBe(0.7);
});
it('should strip OpenAI-specific fields when using anthropic params', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
max_tokens: 4096,
top_p: 0.9,
presence_penalty: 0.5,
frequency_penalty: 0.3,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'AnthropicClaude' as EModelEndpoint,
lastConversationSetup: preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.max_tokens).toBeUndefined();
expect(result.top_p).toBeUndefined();
expect(result.presence_penalty).toBeUndefined();
expect(result.frequency_penalty).toBeUndefined();
});
});
describe('custom endpoint without defaultParamsEndpoint (OpenAI default)', () => {
const models = ['gpt-4o', 'gpt-4.1'];
it('should preserve OpenAI fields and strip anthropic fields', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'MyOpenRouterEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
max_tokens: 4096,
top_p: 0.9,
maxOutputTokens: 8192,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'MyOpenRouterEndpoint' as EModelEndpoint,
lastConversationSetup: preset,
});
expect(result.max_tokens).toBe(4096);
expect(result.top_p).toBe(0.9);
expect(result.temperature).toBe(0.7);
expect(result.maxOutputTokens).toBeUndefined();
});
});
describe('custom endpoint with defaultParamsEndpoint: google', () => {
const models = ['gemini-pro', 'gemini-1.5-pro'];
it('should preserve Google-specific fields', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'MyGoogleEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'gemini-pro',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
topK: 40,
};
const result = buildDefaultConvo({
models,
conversation: baseConversation,
endpoint: 'MyGoogleEndpoint' as EModelEndpoint,
lastConversationSetup: preset,
defaultParamsEndpoint: EModelEndpoint.google,
});
expect(result.maxOutputTokens).toBe(8192);
expect(result.topP).toBe(0.9);
expect(result.topK).toBe(40);
});
});
describe('cross-endpoint field isolation', () => {
it('should not carry bedrock region to a custom endpoint', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'MyChatEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
region: 'us-east-1',
};
const result = buildDefaultConvo({
models: ['gpt-4o'],
conversation: baseConversation,
endpoint: 'MyChatEndpoint' as EModelEndpoint,
lastConversationSetup: preset,
});
expect(result.region).toBeUndefined();
expect(result.temperature).toBe(0.7);
});
it('should not carry bedrock region even with anthropic defaultParamsEndpoint', () => {
const preset: TConversation = {
...baseConversation,
endpoint: 'MyChatEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
model: 'claude-3-opus',
region: 'us-east-1',
maxOutputTokens: 8192,
};
const result = buildDefaultConvo({
models: ['claude-3-opus'],
conversation: baseConversation,
endpoint: 'MyChatEndpoint' as EModelEndpoint,
lastConversationSetup: preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.region).toBeUndefined();
expect(result.maxOutputTokens).toBe(8192);
});
});
});

View file

@ -0,0 +1,119 @@
import { EModelEndpoint } from 'librechat-data-provider';
import cleanupPreset from '../cleanupPreset';
/**
* Integration tests for cleanupPreset NO mocks.
* Uses the real parseConvo to verify actual schema behavior
* with defaultParamsEndpoint for custom endpoints.
*/
describe('cleanupPreset - real parsing with defaultParamsEndpoint', () => {
it('should preserve maxOutputTokens when defaultParamsEndpoint is anthropic', () => {
const preset = {
presetId: 'test-id',
title: 'Claude Opus',
endpoint: 'AnthropicClaude',
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
maxContextTokens: 50000,
};
const result = cleanupPreset({
preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.maxOutputTokens).toBe(8192);
expect(result.topP).toBe(0.9);
expect(result.temperature).toBe(0.7);
expect(result.maxContextTokens).toBe(50000);
expect(result.model).toBe('anthropic/claude-opus-4.5');
});
it('should strip maxOutputTokens without defaultParamsEndpoint (OpenAI schema)', () => {
const preset = {
presetId: 'test-id',
title: 'GPT Custom',
endpoint: 'MyOpenRouter',
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
maxOutputTokens: 8192,
max_tokens: 4096,
};
const result = cleanupPreset({ preset });
expect(result.maxOutputTokens).toBeUndefined();
expect(result.max_tokens).toBe(4096);
expect(result.temperature).toBe(0.7);
});
it('should strip OpenAI-specific fields when using anthropic params', () => {
const preset = {
presetId: 'test-id',
title: 'Claude Custom',
endpoint: 'AnthropicClaude',
endpointType: EModelEndpoint.custom,
model: 'anthropic/claude-3-opus',
max_tokens: 4096,
top_p: 0.9,
presence_penalty: 0.5,
frequency_penalty: 0.3,
temperature: 0.7,
};
const result = cleanupPreset({
preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result.max_tokens).toBeUndefined();
expect(result.top_p).toBeUndefined();
expect(result.presence_penalty).toBeUndefined();
expect(result.frequency_penalty).toBeUndefined();
expect(result.temperature).toBe(0.7);
});
it('should not carry bedrock region to custom endpoint', () => {
const preset = {
presetId: 'test-id',
title: 'Custom',
endpoint: 'MyEndpoint',
endpointType: EModelEndpoint.custom,
model: 'gpt-4o',
temperature: 0.7,
region: 'us-east-1',
};
const result = cleanupPreset({ preset });
expect(result.region).toBeUndefined();
expect(result.temperature).toBe(0.7);
});
it('should preserve Google-specific fields when defaultParamsEndpoint is google', () => {
const preset = {
presetId: 'test-id',
title: 'Gemini Custom',
endpoint: 'MyGoogleEndpoint',
endpointType: EModelEndpoint.custom,
model: 'gemini-pro',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
topK: 40,
};
const result = cleanupPreset({
preset,
defaultParamsEndpoint: EModelEndpoint.google,
});
expect(result.maxOutputTokens).toBe(8192);
expect(result.topP).toBe(0.9);
expect(result.topK).toBe(40);
});
});

View file

@ -1,12 +1,9 @@
import { EModelEndpoint } from 'librechat-data-provider';
import { EModelEndpoint, parseConvo } from 'librechat-data-provider';
import cleanupPreset from '../cleanupPreset';
import type { TPreset } from 'librechat-data-provider';
// Mock parseConvo since we're focusing on testing the chatGptLabel migration logic
jest.mock('librechat-data-provider', () => ({
...jest.requireActual('librechat-data-provider'),
parseConvo: jest.fn((input) => {
// Return a simplified mock that passes through most properties
const { conversation } = input;
return {
...conversation,
@ -221,4 +218,41 @@ describe('cleanupPreset', () => {
expect(result.presetId).toBeNull();
});
});
describe('defaultParamsEndpoint threading', () => {
it('should pass defaultParamsEndpoint to parseConvo', () => {
const preset = {
...basePreset,
endpoint: 'MyCustomEndpoint',
endpointType: EModelEndpoint.custom,
};
cleanupPreset({
preset,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(parseConvo).toHaveBeenCalledWith(
expect.objectContaining({
defaultParamsEndpoint: EModelEndpoint.anthropic,
}),
);
});
it('should pass undefined defaultParamsEndpoint when not provided', () => {
const preset = {
...basePreset,
endpoint: 'MyCustomEndpoint',
endpointType: EModelEndpoint.custom,
};
cleanupPreset({ preset });
expect(parseConvo).toHaveBeenCalledWith(
expect.objectContaining({
defaultParamsEndpoint: undefined,
}),
);
});
});
});

View file

@ -14,11 +14,13 @@ const buildDefaultConvo = ({
conversation,
endpoint = null,
lastConversationSetup,
defaultParamsEndpoint,
}: {
models: string[];
conversation: TConversation;
endpoint?: EModelEndpoint | null;
lastConversationSetup: TConversation | null;
defaultParamsEndpoint?: string | null;
}): TConversation => {
const { lastSelectedModel, lastSelectedTools } = getLocalStorageItems();
const endpointType = lastConversationSetup?.endpointType ?? conversation.endpointType;
@ -49,6 +51,7 @@ const buildDefaultConvo = ({
possibleValues: {
models: possibleModels,
},
defaultParamsEndpoint,
});
const defaultConvo = {

View file

@ -4,9 +4,10 @@ import type { TPreset } from 'librechat-data-provider';
type UIPreset = Partial<TPreset> & { presetOverride?: Partial<TPreset> };
type TCleanupPreset = {
preset?: UIPreset;
defaultParamsEndpoint?: string | null;
};
const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
const cleanupPreset = ({ preset: _preset, defaultParamsEndpoint }: TCleanupPreset): TPreset => {
const { endpoint, endpointType } = _preset ?? ({} as UIPreset);
if (endpoint == null || endpoint === '') {
console.error(`Unknown endpoint ${endpoint}`, _preset);
@ -35,8 +36,13 @@ const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
delete preset.chatGptLabel;
}
/* @ts-ignore: endpoint can be a custom defined name */
const parsedPreset = parseConvo({ endpoint, endpointType, conversation: preset });
const parsedPreset = parseConvo({
/* @ts-ignore: endpoint can be a custom defined name */
endpoint,
endpointType,
conversation: preset,
defaultParamsEndpoint,
});
return {
presetId: _preset?.presetId ?? null,

View file

@ -0,0 +1,284 @@
import { Providers } from '@librechat/agents';
import { EModelEndpoint } from 'librechat-data-provider';
import type { Agent } from 'librechat-data-provider';
import type { ServerRequest, InitializeResultBase } from '~/types';
import type { InitializeAgentDbMethods } from '../initialize';
// Mock logger
jest.mock('winston', () => ({
createLogger: jest.fn(() => ({
debug: jest.fn(),
warn: jest.fn(),
error: jest.fn(),
})),
format: {
combine: jest.fn(),
colorize: jest.fn(),
simple: jest.fn(),
},
transports: {
Console: jest.fn(),
},
}));
const mockExtractLibreChatParams = jest.fn();
const mockGetModelMaxTokens = jest.fn();
const mockOptionalChainWithEmptyCheck = jest.fn();
const mockGetThreadData = jest.fn();
jest.mock('~/utils', () => ({
extractLibreChatParams: (...args: unknown[]) => mockExtractLibreChatParams(...args),
getModelMaxTokens: (...args: unknown[]) => mockGetModelMaxTokens(...args),
optionalChainWithEmptyCheck: (...args: unknown[]) => mockOptionalChainWithEmptyCheck(...args),
getThreadData: (...args: unknown[]) => mockGetThreadData(...args),
}));
const mockGetProviderConfig = jest.fn();
jest.mock('~/endpoints', () => ({
getProviderConfig: (...args: unknown[]) => mockGetProviderConfig(...args),
}));
jest.mock('~/files', () => ({
filterFilesByEndpointConfig: jest.fn(() => []),
}));
jest.mock('~/prompts', () => ({
generateArtifactsPrompt: jest.fn(() => null),
}));
jest.mock('../resources', () => ({
primeResources: jest.fn().mockResolvedValue({
attachments: [],
tool_resources: undefined,
}),
}));
import { initializeAgent } from '../initialize';
/**
* Creates minimal mock objects for initializeAgent tests.
*/
function createMocks(overrides?: {
maxContextTokens?: number;
modelDefault?: number;
maxOutputTokens?: number;
}) {
const { maxContextTokens, modelDefault = 200000, maxOutputTokens = 4096 } = overrides ?? {};
const agent = {
id: 'agent-1',
model: 'test-model',
provider: Providers.OPENAI,
tools: [],
model_parameters: { model: 'test-model' },
} as unknown as Agent;
const req = {
user: { id: 'user-1' },
config: {},
} as unknown as ServerRequest;
const res = {} as unknown as import('express').Response;
const mockGetOptions = jest.fn().mockResolvedValue({
llmConfig: {
model: 'test-model',
maxTokens: maxOutputTokens,
},
endpointTokenConfig: undefined,
} satisfies InitializeResultBase);
mockGetProviderConfig.mockReturnValue({
getOptions: mockGetOptions,
overrideProvider: Providers.OPENAI,
});
// extractLibreChatParams returns maxContextTokens when provided in model_parameters
mockExtractLibreChatParams.mockReturnValue({
resendFiles: false,
maxContextTokens,
modelOptions: { model: 'test-model' },
});
// getModelMaxTokens returns the model's default context window
mockGetModelMaxTokens.mockReturnValue(modelDefault);
// Implement real optionalChainWithEmptyCheck behavior
mockOptionalChainWithEmptyCheck.mockImplementation(
(...values: (string | number | undefined)[]) => {
for (const v of values) {
if (v !== undefined && v !== null && v !== '') {
return v;
}
}
return values[values.length - 1];
},
);
const loadTools = jest.fn().mockResolvedValue({
tools: [],
toolContextMap: {},
userMCPAuthMap: undefined,
toolRegistry: undefined,
toolDefinitions: [],
hasDeferredTools: false,
});
const db: InitializeAgentDbMethods = {
getFiles: jest.fn().mockResolvedValue([]),
getConvoFiles: jest.fn().mockResolvedValue([]),
updateFilesUsage: jest.fn().mockResolvedValue([]),
getUserKey: jest.fn().mockResolvedValue('user-1'),
getUserKeyValues: jest.fn().mockResolvedValue([]),
getToolFilesByIds: jest.fn().mockResolvedValue([]),
};
return { agent, req, res, loadTools, db };
}
describe('initializeAgent — maxContextTokens', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('uses user-configured maxContextTokens when provided via model_parameters', async () => {
const userValue = 50000;
const { agent, req, res, loadTools, db } = createMocks({
maxContextTokens: userValue,
modelDefault: 200000,
maxOutputTokens: 4096,
});
const result = await initializeAgent(
{
req,
res,
agent,
loadTools,
endpointOption: {
endpoint: EModelEndpoint.agents,
model_parameters: { maxContextTokens: userValue },
},
allowedProviders: new Set([Providers.OPENAI]),
isInitialAgent: true,
},
db,
);
expect(result.maxContextTokens).toBe(userValue);
});
it('falls back to formula when maxContextTokens is NOT provided', async () => {
const modelDefault = 200000;
const maxOutputTokens = 4096;
const { agent, req, res, loadTools, db } = createMocks({
maxContextTokens: undefined,
modelDefault,
maxOutputTokens,
});
const result = await initializeAgent(
{
req,
res,
agent,
loadTools,
endpointOption: { endpoint: EModelEndpoint.agents },
allowedProviders: new Set([Providers.OPENAI]),
isInitialAgent: true,
},
db,
);
const expected = Math.round((modelDefault - maxOutputTokens) * 0.9);
expect(result.maxContextTokens).toBe(expected);
});
it('falls back to formula when maxContextTokens is 0', async () => {
const maxOutputTokens = 4096;
const { agent, req, res, loadTools, db } = createMocks({
maxContextTokens: 0,
modelDefault: 200000,
maxOutputTokens,
});
const result = await initializeAgent(
{
req,
res,
agent,
loadTools,
endpointOption: {
endpoint: EModelEndpoint.agents,
model_parameters: { maxContextTokens: 0 },
},
allowedProviders: new Set([Providers.OPENAI]),
isInitialAgent: true,
},
db,
);
// 0 is not used as-is; the formula kicks in.
// optionalChainWithEmptyCheck(0, 200000, 18000) returns 0 (not null/undefined),
// then Number(0) || 18000 = 18000 (the fallback default).
expect(result.maxContextTokens).not.toBe(0);
const expected = Math.round((18000 - maxOutputTokens) * 0.9);
expect(result.maxContextTokens).toBe(expected);
});
it('falls back to formula when maxContextTokens is negative', async () => {
const maxOutputTokens = 4096;
const { agent, req, res, loadTools, db } = createMocks({
maxContextTokens: -1,
modelDefault: 200000,
maxOutputTokens,
});
const result = await initializeAgent(
{
req,
res,
agent,
loadTools,
endpointOption: {
endpoint: EModelEndpoint.agents,
model_parameters: { maxContextTokens: -1 },
},
allowedProviders: new Set([Providers.OPENAI]),
isInitialAgent: true,
},
db,
);
// -1 is not used as-is; the formula kicks in
expect(result.maxContextTokens).not.toBe(-1);
});
it('preserves small user-configured value (e.g. 1000 from modelSpec)', async () => {
const userValue = 1000;
const { agent, req, res, loadTools, db } = createMocks({
maxContextTokens: userValue,
modelDefault: 128000,
maxOutputTokens: 4096,
});
const result = await initializeAgent(
{
req,
res,
agent,
loadTools,
endpointOption: {
endpoint: EModelEndpoint.agents,
model_parameters: { maxContextTokens: userValue },
},
allowedProviders: new Set([Providers.OPENAI]),
isInitialAgent: true,
},
db,
);
// Should NOT be overridden to Math.round((128000 - 4096) * 0.9) = 111,514
expect(result.maxContextTokens).toBe(userValue);
});
});

View file

@ -413,7 +413,10 @@ export async function initializeAgent(
toolContextMap: toolContextMap ?? {},
useLegacyContent: !!options.useLegacyContent,
tools: (tools ?? []) as GenericTool[] & string[],
maxContextTokens: Math.round((agentMaxContextNum - maxOutputTokensNum) * 0.9),
maxContextTokens:
maxContextTokens != null && maxContextTokens > 0
? maxContextTokens
: Math.round((agentMaxContextNum - maxOutputTokensNum) * 0.9),
};
return initializedAgent;

View file

@ -1,4 +1,4 @@
import { replaceSpecialVars, parseCompactConvo, parseTextParts } from '../src/parsers';
import { replaceSpecialVars, parseConvo, parseCompactConvo, parseTextParts } from '../src/parsers';
import { specialVariables } from '../src/config';
import { EModelEndpoint } from '../src/schemas';
import { ContentTypes } from '../src/types/runs';
@ -262,6 +262,257 @@ describe('parseCompactConvo', () => {
});
});
describe('parseConvo - defaultParamsEndpoint', () => {
test('should strip maxOutputTokens for custom endpoint without defaultParamsEndpoint', () => {
const conversation: Partial<TConversation> = {
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
maxContextTokens: 50000,
};
const result = parseConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
});
expect(result).not.toBeNull();
expect(result?.temperature).toBe(0.7);
expect(result?.maxContextTokens).toBe(50000);
expect(result?.maxOutputTokens).toBeUndefined();
});
test('should preserve maxOutputTokens when defaultParamsEndpoint is anthropic', () => {
const conversation: Partial<TConversation> = {
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
topK: 40,
maxContextTokens: 50000,
};
const result = parseConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result).not.toBeNull();
expect(result?.model).toBe('anthropic/claude-opus-4.5');
expect(result?.temperature).toBe(0.7);
expect(result?.maxOutputTokens).toBe(8192);
expect(result?.topP).toBe(0.9);
expect(result?.topK).toBe(40);
expect(result?.maxContextTokens).toBe(50000);
});
test('should strip OpenAI-specific fields when defaultParamsEndpoint is anthropic', () => {
const conversation: Partial<TConversation> = {
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
max_tokens: 4096,
top_p: 0.9,
presence_penalty: 0.5,
frequency_penalty: 0.3,
};
const result = parseConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result).not.toBeNull();
expect(result?.temperature).toBe(0.7);
expect(result?.max_tokens).toBeUndefined();
expect(result?.top_p).toBeUndefined();
expect(result?.presence_penalty).toBeUndefined();
expect(result?.frequency_penalty).toBeUndefined();
});
test('should preserve max_tokens when defaultParamsEndpoint is not set (OpenAI default)', () => {
const conversation: Partial<TConversation> = {
model: 'gpt-4o',
temperature: 0.7,
max_tokens: 4096,
top_p: 0.9,
};
const result = parseConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
});
expect(result).not.toBeNull();
expect(result?.max_tokens).toBe(4096);
expect(result?.top_p).toBe(0.9);
});
test('should preserve Google-specific fields when defaultParamsEndpoint is google', () => {
const conversation: Partial<TConversation> = {
model: 'gemini-pro',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
topK: 40,
};
const result = parseConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
defaultParamsEndpoint: EModelEndpoint.google,
});
expect(result).not.toBeNull();
expect(result?.maxOutputTokens).toBe(8192);
expect(result?.topP).toBe(0.9);
expect(result?.topK).toBe(40);
});
test('should not strip fields from non-custom endpoints that already have a schema', () => {
const conversation: Partial<TConversation> = {
model: 'gpt-4o',
temperature: 0.7,
max_tokens: 4096,
top_p: 0.9,
};
const result = parseConvo({
endpoint: EModelEndpoint.openAI,
conversation,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result).not.toBeNull();
expect(result?.max_tokens).toBe(4096);
expect(result?.top_p).toBe(0.9);
});
test('should not carry bedrock region to custom endpoint without defaultParamsEndpoint', () => {
const conversation: Partial<TConversation> = {
model: 'gpt-4o',
temperature: 0.7,
region: 'us-east-1',
};
const result = parseConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
});
expect(result).not.toBeNull();
expect(result?.temperature).toBe(0.7);
expect(result?.region).toBeUndefined();
});
test('should fall back to endpointType schema when defaultParamsEndpoint is invalid', () => {
const conversation: Partial<TConversation> = {
model: 'gpt-4o',
temperature: 0.7,
max_tokens: 4096,
maxOutputTokens: 8192,
};
const result = parseConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
defaultParamsEndpoint: 'nonexistent_endpoint',
});
expect(result).not.toBeNull();
expect(result?.max_tokens).toBe(4096);
expect(result?.maxOutputTokens).toBeUndefined();
});
});
describe('parseCompactConvo - defaultParamsEndpoint', () => {
test('should strip maxOutputTokens for custom endpoint without defaultParamsEndpoint', () => {
const conversation: Partial<TConversation> = {
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
};
const result = parseCompactConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
});
expect(result).not.toBeNull();
expect(result?.temperature).toBe(0.7);
expect(result?.maxOutputTokens).toBeUndefined();
});
test('should preserve maxOutputTokens when defaultParamsEndpoint is anthropic', () => {
const conversation: Partial<TConversation> = {
model: 'anthropic/claude-opus-4.5',
temperature: 0.7,
maxOutputTokens: 8192,
topP: 0.9,
maxContextTokens: 50000,
};
const result = parseCompactConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result).not.toBeNull();
expect(result?.maxOutputTokens).toBe(8192);
expect(result?.topP).toBe(0.9);
expect(result?.maxContextTokens).toBe(50000);
});
test('should strip iconURL even when defaultParamsEndpoint is set', () => {
const conversation: Partial<TConversation> = {
model: 'anthropic/claude-opus-4.5',
iconURL: 'https://malicious.com/track.png',
maxOutputTokens: 8192,
};
const result = parseCompactConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
defaultParamsEndpoint: EModelEndpoint.anthropic,
});
expect(result).not.toBeNull();
expect(result?.['iconURL']).toBeUndefined();
expect(result?.maxOutputTokens).toBe(8192);
});
test('should fall back to endpointType when defaultParamsEndpoint is null', () => {
const conversation: Partial<TConversation> = {
model: 'gpt-4o',
max_tokens: 4096,
maxOutputTokens: 8192,
};
const result = parseCompactConvo({
endpoint: 'MyCustomEndpoint' as EModelEndpoint,
endpointType: EModelEndpoint.custom,
conversation,
defaultParamsEndpoint: null,
});
expect(result).not.toBeNull();
expect(result?.max_tokens).toBe(4096);
expect(result?.maxOutputTokens).toBeUndefined();
});
});
describe('parseTextParts', () => {
test('should concatenate text parts', () => {
const parts: TMessageContentParts[] = [

View file

@ -1908,3 +1908,14 @@ export function getEndpointField<
}
return config[property];
}
/** Resolves the `defaultParamsEndpoint` for a given endpoint from its custom params config */
export function getDefaultParamsEndpoint(
endpointsConfig: TEndpointsConfig | undefined | null,
endpoint: string | null | undefined,
): string | undefined {
if (!endpointsConfig || !endpoint) {
return undefined;
}
return endpointsConfig[endpoint]?.customParams?.defaultParamsEndpoint;
}

View file

@ -144,26 +144,25 @@ export const parseConvo = ({
endpointType,
conversation,
possibleValues,
defaultParamsEndpoint,
}: {
endpoint: EndpointSchemaKey;
endpointType?: EndpointSchemaKey | null;
conversation: Partial<s.TConversation | s.TPreset> | null;
possibleValues?: TPossibleValues;
// TODO: POC for default schema
// defaultSchema?: Partial<EndpointSchema>,
defaultParamsEndpoint?: string | null;
}) => {
let schema = endpointSchemas[endpoint] as EndpointSchema | undefined;
if (!schema && !endpointType) {
throw new Error(`Unknown endpoint: ${endpoint}`);
} else if (!schema && endpointType) {
schema = endpointSchemas[endpointType];
} else if (!schema) {
const overrideSchema = defaultParamsEndpoint
? endpointSchemas[defaultParamsEndpoint as EndpointSchemaKey]
: undefined;
schema = overrideSchema ?? (endpointType ? endpointSchemas[endpointType] : undefined);
}
// if (defaultSchema && schemaCreators[endpoint]) {
// schema = schemaCreators[endpoint](defaultSchema);
// }
const convo = schema?.parse(conversation) as s.TConversation | undefined;
const { models } = possibleValues ?? {};
@ -310,13 +309,13 @@ export const parseCompactConvo = ({
endpointType,
conversation,
possibleValues,
defaultParamsEndpoint,
}: {
endpoint?: EndpointSchemaKey;
endpointType?: EndpointSchemaKey | null;
conversation: Partial<s.TConversation | s.TPreset>;
possibleValues?: TPossibleValues;
// TODO: POC for default schema
// defaultSchema?: Partial<EndpointSchema>,
defaultParamsEndpoint?: string | null;
}): Omit<s.TConversation, 'iconURL'> | null => {
if (!endpoint) {
throw new Error(`undefined endpoint: ${endpoint}`);
@ -326,8 +325,11 @@ export const parseCompactConvo = ({
if (!schema && !endpointType) {
throw new Error(`Unknown endpoint: ${endpoint}`);
} else if (!schema && endpointType) {
schema = compactEndpointSchemas[endpointType];
} else if (!schema) {
const overrideSchema = defaultParamsEndpoint
? compactEndpointSchemas[defaultParamsEndpoint as EndpointSchemaKey]
: undefined;
schema = overrideSchema ?? (endpointType ? compactEndpointSchemas[endpointType] : undefined);
}
if (!schema) {