mirror of
https://github.com/danny-avila/LibreChat.git
synced 2026-02-16 07:28:09 +01:00
🎭 feat: Override Custom Endpoint Schema with Specified Params Endpoint (#11788)
* 🔧 refactor: Simplify payload parsing and enhance getSaveOptions logic - Removed unused bedrockInputSchema from payloadParser, streamlining the function. - Updated payloadParser to handle optional chaining for model parameters. - Enhanced getSaveOptions to ensure runOptions defaults to an empty object if parsing fails, improving robustness. - Adjusted the assignment of maxContextTokens to use the instance variable for consistency. * 🔧 fix: Update maxContextTokens assignment logic in initializeAgent function - Enhanced the maxContextTokens assignment to allow for user-defined values, ensuring it defaults to a calculated value only when not provided or invalid. This change improves flexibility in agent initialization. * 🧪 test: Add unit tests for initializeAgent function - Introduced comprehensive unit tests for the initializeAgent function, focusing on maxContextTokens behavior. - Tests cover scenarios for user-defined values, fallback calculations, and edge cases such as zero and negative values, enhancing overall test coverage and reliability of agent initialization logic. * refactor: default params Endpoint Configuration Handling - Integrated `getEndpointsConfig` to fetch endpoint configurations, allowing for dynamic handling of `defaultParamsEndpoint`. - Updated `buildEndpointOption` to pass `defaultParamsEndpoint` to `parseCompactConvo`, ensuring correct parameter handling based on endpoint type. - Added comprehensive unit tests for `buildDefaultConvo` and `cleanupPreset` to validate behavior with `defaultParamsEndpoint`, covering various scenarios and edge cases. - Refactored related hooks and utility functions to support the new configuration structure, improving overall flexibility and maintainability. * refactor: Centralize defaultParamsEndpoint retrieval - Introduced `getDefaultParamsEndpoint` function to streamline the retrieval of `defaultParamsEndpoint` across various hooks and middleware. - Updated multiple files to utilize the new function, enhancing code consistency and maintainability. - Removed redundant logic for fetching `defaultParamsEndpoint`, simplifying the codebase.
This commit is contained in:
parent
6cc6ee3207
commit
467df0f07a
19 changed files with 1234 additions and 45 deletions
|
|
@ -1,7 +1,12 @@
|
|||
import { useCallback } from 'react';
|
||||
import { useRecoilValue } from 'recoil';
|
||||
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
import { getEndpointField, LocalStorageKeys, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import {
|
||||
getEndpointField,
|
||||
LocalStorageKeys,
|
||||
isAssistantsEndpoint,
|
||||
getDefaultParamsEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import type { TEndpointsConfig, EModelEndpoint, TConversation } from 'librechat-data-provider';
|
||||
import type { AssistantListItem, NewConversationParams } from '~/common';
|
||||
import useAssistantListMap from '~/hooks/Assistants/useAssistantListMap';
|
||||
|
|
@ -84,11 +89,13 @@ export default function useAddedResponse() {
|
|||
}
|
||||
|
||||
const models = modelsConfig?.[defaultEndpoint ?? ''] ?? [];
|
||||
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint);
|
||||
newConversation = buildDefaultConvo({
|
||||
conversation: newConversation,
|
||||
lastConversationSetup: preset as TConversation,
|
||||
endpoint: defaultEndpoint ?? ('' as EModelEndpoint),
|
||||
models,
|
||||
defaultParamsEndpoint,
|
||||
});
|
||||
|
||||
if (preset?.title != null && preset.title !== '') {
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ import {
|
|||
parseCompactConvo,
|
||||
replaceSpecialVars,
|
||||
isAssistantsEndpoint,
|
||||
getDefaultParamsEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import type {
|
||||
TMessage,
|
||||
|
|
@ -173,12 +174,14 @@ export default function useChatFunctions({
|
|||
const startupConfig = queryClient.getQueryData<TStartupConfig>([QueryKeys.startupConfig]);
|
||||
const endpointType = getEndpointField(endpointsConfig, endpoint, 'type');
|
||||
const iconURL = conversation?.iconURL;
|
||||
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, endpoint);
|
||||
|
||||
/** This becomes part of the `endpointOption` */
|
||||
const convo = parseCompactConvo({
|
||||
endpoint: endpoint as EndpointSchemaKey,
|
||||
endpointType: endpointType as EndpointSchemaKey,
|
||||
conversation: conversation ?? {},
|
||||
defaultParamsEndpoint,
|
||||
});
|
||||
|
||||
const { modelDisplayLabel } = endpointsConfig?.[endpoint ?? ''] ?? {};
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { excludedKeys } from 'librechat-data-provider';
|
||||
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
import { excludedKeys, getDefaultParamsEndpoint } from 'librechat-data-provider';
|
||||
import type {
|
||||
TEndpointsConfig,
|
||||
TModelsConfig,
|
||||
|
|
@ -47,11 +47,14 @@ const useDefaultConvo = () => {
|
|||
}
|
||||
}
|
||||
|
||||
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, endpoint);
|
||||
|
||||
const defaultConvo = buildDefaultConvo({
|
||||
conversation: conversation as TConversation,
|
||||
endpoint,
|
||||
lastConversationSetup: preset as TConversation,
|
||||
models,
|
||||
defaultParamsEndpoint,
|
||||
});
|
||||
|
||||
if (!cleanOutput) {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,12 @@
|
|||
import { useRecoilValue } from 'recoil';
|
||||
import { useCallback, useRef, useEffect } from 'react';
|
||||
import { useGetModelsQuery } from 'librechat-data-provider/react-query';
|
||||
import { getEndpointField, LocalStorageKeys, isAssistantsEndpoint } from 'librechat-data-provider';
|
||||
import {
|
||||
getEndpointField,
|
||||
LocalStorageKeys,
|
||||
isAssistantsEndpoint,
|
||||
getDefaultParamsEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import type {
|
||||
TEndpointsConfig,
|
||||
EModelEndpoint,
|
||||
|
|
@ -117,11 +122,13 @@ const useGenerateConvo = ({
|
|||
}
|
||||
|
||||
const models = modelsConfig?.[defaultEndpoint ?? ''] ?? [];
|
||||
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint);
|
||||
conversation = buildDefaultConvo({
|
||||
conversation,
|
||||
lastConversationSetup: preset as TConversation,
|
||||
endpoint: defaultEndpoint ?? ('' as EModelEndpoint),
|
||||
models,
|
||||
defaultParamsEndpoint,
|
||||
});
|
||||
|
||||
if (preset?.title != null && preset.title !== '') {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,13 @@ import { useCallback } from 'react';
|
|||
import { useSetRecoilState } from 'recoil';
|
||||
import { useNavigate } from 'react-router-dom';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { QueryKeys, Constants, dataService, getEndpointField } from 'librechat-data-provider';
|
||||
import {
|
||||
QueryKeys,
|
||||
Constants,
|
||||
dataService,
|
||||
getEndpointField,
|
||||
getDefaultParamsEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import type {
|
||||
TEndpointsConfig,
|
||||
TStartupConfig,
|
||||
|
|
@ -106,11 +112,13 @@ const useNavigateToConvo = (index = 0) => {
|
|||
|
||||
const models = modelsConfig?.[defaultEndpoint ?? ''] ?? [];
|
||||
|
||||
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint);
|
||||
convo = buildDefaultConvo({
|
||||
models,
|
||||
conversation,
|
||||
endpoint: defaultEndpoint,
|
||||
lastConversationSetup: conversation,
|
||||
defaultParamsEndpoint,
|
||||
});
|
||||
}
|
||||
clearAllConversations(true);
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ import {
|
|||
LocalStorageKeys,
|
||||
isEphemeralAgentId,
|
||||
isAssistantsEndpoint,
|
||||
getDefaultParamsEndpoint,
|
||||
} from 'librechat-data-provider';
|
||||
import type {
|
||||
TPreset,
|
||||
|
|
@ -191,11 +192,13 @@ const useNewConvo = (index = 0) => {
|
|||
}
|
||||
|
||||
const models = modelsConfig?.[defaultEndpoint] ?? [];
|
||||
const defaultParamsEndpoint = getDefaultParamsEndpoint(endpointsConfig, defaultEndpoint);
|
||||
conversation = buildDefaultConvo({
|
||||
conversation,
|
||||
lastConversationSetup: activePreset as TConversation,
|
||||
endpoint: defaultEndpoint,
|
||||
models,
|
||||
defaultParamsEndpoint,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
|||
202
client/src/utils/__tests__/buildDefaultConvo.test.ts
Normal file
202
client/src/utils/__tests__/buildDefaultConvo.test.ts
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import type { TConversation } from 'librechat-data-provider';
|
||||
import buildDefaultConvo from '../buildDefaultConvo';
|
||||
|
||||
jest.mock('../localStorage', () => ({
|
||||
getLocalStorageItems: jest.fn(() => ({
|
||||
lastSelectedModel: {},
|
||||
lastSelectedTools: [],
|
||||
lastConversationSetup: {},
|
||||
})),
|
||||
}));
|
||||
|
||||
const baseConversation: TConversation = {
|
||||
conversationId: 'test-convo-id',
|
||||
title: 'Test Conversation',
|
||||
createdAt: '2024-01-01T00:00:00Z',
|
||||
updatedAt: '2024-01-01T00:00:00Z',
|
||||
endpoint: null,
|
||||
};
|
||||
|
||||
describe('buildDefaultConvo - defaultParamsEndpoint', () => {
|
||||
describe('custom endpoint with defaultParamsEndpoint: anthropic', () => {
|
||||
const models = ['anthropic/claude-opus-4.5', 'anthropic/claude-sonnet-4'];
|
||||
|
||||
it('should preserve maxOutputTokens from model spec preset', () => {
|
||||
const preset: TConversation = {
|
||||
...baseConversation,
|
||||
endpoint: 'AnthropicClaude' as EModelEndpoint,
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'anthropic/claude-opus-4.5',
|
||||
temperature: 0.7,
|
||||
maxOutputTokens: 8192,
|
||||
topP: 0.9,
|
||||
maxContextTokens: 50000,
|
||||
};
|
||||
|
||||
const result = buildDefaultConvo({
|
||||
models,
|
||||
conversation: baseConversation,
|
||||
endpoint: 'AnthropicClaude' as EModelEndpoint,
|
||||
lastConversationSetup: preset,
|
||||
defaultParamsEndpoint: EModelEndpoint.anthropic,
|
||||
});
|
||||
|
||||
expect(result.maxOutputTokens).toBe(8192);
|
||||
expect(result.topP).toBe(0.9);
|
||||
expect(result.temperature).toBe(0.7);
|
||||
expect(result.maxContextTokens).toBe(50000);
|
||||
expect(result.model).toBe('anthropic/claude-opus-4.5');
|
||||
});
|
||||
|
||||
it('should strip maxOutputTokens without defaultParamsEndpoint', () => {
|
||||
const preset: TConversation = {
|
||||
...baseConversation,
|
||||
endpoint: 'AnthropicClaude' as EModelEndpoint,
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'anthropic/claude-opus-4.5',
|
||||
temperature: 0.7,
|
||||
maxOutputTokens: 8192,
|
||||
};
|
||||
|
||||
const result = buildDefaultConvo({
|
||||
models,
|
||||
conversation: baseConversation,
|
||||
endpoint: 'AnthropicClaude' as EModelEndpoint,
|
||||
lastConversationSetup: preset,
|
||||
});
|
||||
|
||||
expect(result.maxOutputTokens).toBeUndefined();
|
||||
expect(result.temperature).toBe(0.7);
|
||||
});
|
||||
|
||||
it('should strip OpenAI-specific fields when using anthropic params', () => {
|
||||
const preset: TConversation = {
|
||||
...baseConversation,
|
||||
endpoint: 'AnthropicClaude' as EModelEndpoint,
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'anthropic/claude-opus-4.5',
|
||||
max_tokens: 4096,
|
||||
top_p: 0.9,
|
||||
presence_penalty: 0.5,
|
||||
frequency_penalty: 0.3,
|
||||
};
|
||||
|
||||
const result = buildDefaultConvo({
|
||||
models,
|
||||
conversation: baseConversation,
|
||||
endpoint: 'AnthropicClaude' as EModelEndpoint,
|
||||
lastConversationSetup: preset,
|
||||
defaultParamsEndpoint: EModelEndpoint.anthropic,
|
||||
});
|
||||
|
||||
expect(result.max_tokens).toBeUndefined();
|
||||
expect(result.top_p).toBeUndefined();
|
||||
expect(result.presence_penalty).toBeUndefined();
|
||||
expect(result.frequency_penalty).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('custom endpoint without defaultParamsEndpoint (OpenAI default)', () => {
|
||||
const models = ['gpt-4o', 'gpt-4.1'];
|
||||
|
||||
it('should preserve OpenAI fields and strip anthropic fields', () => {
|
||||
const preset: TConversation = {
|
||||
...baseConversation,
|
||||
endpoint: 'MyOpenRouterEndpoint' as EModelEndpoint,
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'gpt-4o',
|
||||
temperature: 0.7,
|
||||
max_tokens: 4096,
|
||||
top_p: 0.9,
|
||||
maxOutputTokens: 8192,
|
||||
};
|
||||
|
||||
const result = buildDefaultConvo({
|
||||
models,
|
||||
conversation: baseConversation,
|
||||
endpoint: 'MyOpenRouterEndpoint' as EModelEndpoint,
|
||||
lastConversationSetup: preset,
|
||||
});
|
||||
|
||||
expect(result.max_tokens).toBe(4096);
|
||||
expect(result.top_p).toBe(0.9);
|
||||
expect(result.temperature).toBe(0.7);
|
||||
expect(result.maxOutputTokens).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('custom endpoint with defaultParamsEndpoint: google', () => {
|
||||
const models = ['gemini-pro', 'gemini-1.5-pro'];
|
||||
|
||||
it('should preserve Google-specific fields', () => {
|
||||
const preset: TConversation = {
|
||||
...baseConversation,
|
||||
endpoint: 'MyGoogleEndpoint' as EModelEndpoint,
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'gemini-pro',
|
||||
temperature: 0.7,
|
||||
maxOutputTokens: 8192,
|
||||
topP: 0.9,
|
||||
topK: 40,
|
||||
};
|
||||
|
||||
const result = buildDefaultConvo({
|
||||
models,
|
||||
conversation: baseConversation,
|
||||
endpoint: 'MyGoogleEndpoint' as EModelEndpoint,
|
||||
lastConversationSetup: preset,
|
||||
defaultParamsEndpoint: EModelEndpoint.google,
|
||||
});
|
||||
|
||||
expect(result.maxOutputTokens).toBe(8192);
|
||||
expect(result.topP).toBe(0.9);
|
||||
expect(result.topK).toBe(40);
|
||||
});
|
||||
});
|
||||
|
||||
describe('cross-endpoint field isolation', () => {
|
||||
it('should not carry bedrock region to a custom endpoint', () => {
|
||||
const preset: TConversation = {
|
||||
...baseConversation,
|
||||
endpoint: 'MyChatEndpoint' as EModelEndpoint,
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'gpt-4o',
|
||||
temperature: 0.7,
|
||||
region: 'us-east-1',
|
||||
};
|
||||
|
||||
const result = buildDefaultConvo({
|
||||
models: ['gpt-4o'],
|
||||
conversation: baseConversation,
|
||||
endpoint: 'MyChatEndpoint' as EModelEndpoint,
|
||||
lastConversationSetup: preset,
|
||||
});
|
||||
|
||||
expect(result.region).toBeUndefined();
|
||||
expect(result.temperature).toBe(0.7);
|
||||
});
|
||||
|
||||
it('should not carry bedrock region even with anthropic defaultParamsEndpoint', () => {
|
||||
const preset: TConversation = {
|
||||
...baseConversation,
|
||||
endpoint: 'MyChatEndpoint' as EModelEndpoint,
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'claude-3-opus',
|
||||
region: 'us-east-1',
|
||||
maxOutputTokens: 8192,
|
||||
};
|
||||
|
||||
const result = buildDefaultConvo({
|
||||
models: ['claude-3-opus'],
|
||||
conversation: baseConversation,
|
||||
endpoint: 'MyChatEndpoint' as EModelEndpoint,
|
||||
lastConversationSetup: preset,
|
||||
defaultParamsEndpoint: EModelEndpoint.anthropic,
|
||||
});
|
||||
|
||||
expect(result.region).toBeUndefined();
|
||||
expect(result.maxOutputTokens).toBe(8192);
|
||||
});
|
||||
});
|
||||
});
|
||||
119
client/src/utils/__tests__/cleanupPreset.integration.test.ts
Normal file
119
client/src/utils/__tests__/cleanupPreset.integration.test.ts
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import cleanupPreset from '../cleanupPreset';
|
||||
|
||||
/**
|
||||
* Integration tests for cleanupPreset — NO mocks.
|
||||
* Uses the real parseConvo to verify actual schema behavior
|
||||
* with defaultParamsEndpoint for custom endpoints.
|
||||
*/
|
||||
describe('cleanupPreset - real parsing with defaultParamsEndpoint', () => {
|
||||
it('should preserve maxOutputTokens when defaultParamsEndpoint is anthropic', () => {
|
||||
const preset = {
|
||||
presetId: 'test-id',
|
||||
title: 'Claude Opus',
|
||||
endpoint: 'AnthropicClaude',
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'anthropic/claude-opus-4.5',
|
||||
temperature: 0.7,
|
||||
maxOutputTokens: 8192,
|
||||
topP: 0.9,
|
||||
maxContextTokens: 50000,
|
||||
};
|
||||
|
||||
const result = cleanupPreset({
|
||||
preset,
|
||||
defaultParamsEndpoint: EModelEndpoint.anthropic,
|
||||
});
|
||||
|
||||
expect(result.maxOutputTokens).toBe(8192);
|
||||
expect(result.topP).toBe(0.9);
|
||||
expect(result.temperature).toBe(0.7);
|
||||
expect(result.maxContextTokens).toBe(50000);
|
||||
expect(result.model).toBe('anthropic/claude-opus-4.5');
|
||||
});
|
||||
|
||||
it('should strip maxOutputTokens without defaultParamsEndpoint (OpenAI schema)', () => {
|
||||
const preset = {
|
||||
presetId: 'test-id',
|
||||
title: 'GPT Custom',
|
||||
endpoint: 'MyOpenRouter',
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'gpt-4o',
|
||||
temperature: 0.7,
|
||||
maxOutputTokens: 8192,
|
||||
max_tokens: 4096,
|
||||
};
|
||||
|
||||
const result = cleanupPreset({ preset });
|
||||
|
||||
expect(result.maxOutputTokens).toBeUndefined();
|
||||
expect(result.max_tokens).toBe(4096);
|
||||
expect(result.temperature).toBe(0.7);
|
||||
});
|
||||
|
||||
it('should strip OpenAI-specific fields when using anthropic params', () => {
|
||||
const preset = {
|
||||
presetId: 'test-id',
|
||||
title: 'Claude Custom',
|
||||
endpoint: 'AnthropicClaude',
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'anthropic/claude-3-opus',
|
||||
max_tokens: 4096,
|
||||
top_p: 0.9,
|
||||
presence_penalty: 0.5,
|
||||
frequency_penalty: 0.3,
|
||||
temperature: 0.7,
|
||||
};
|
||||
|
||||
const result = cleanupPreset({
|
||||
preset,
|
||||
defaultParamsEndpoint: EModelEndpoint.anthropic,
|
||||
});
|
||||
|
||||
expect(result.max_tokens).toBeUndefined();
|
||||
expect(result.top_p).toBeUndefined();
|
||||
expect(result.presence_penalty).toBeUndefined();
|
||||
expect(result.frequency_penalty).toBeUndefined();
|
||||
expect(result.temperature).toBe(0.7);
|
||||
});
|
||||
|
||||
it('should not carry bedrock region to custom endpoint', () => {
|
||||
const preset = {
|
||||
presetId: 'test-id',
|
||||
title: 'Custom',
|
||||
endpoint: 'MyEndpoint',
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'gpt-4o',
|
||||
temperature: 0.7,
|
||||
region: 'us-east-1',
|
||||
};
|
||||
|
||||
const result = cleanupPreset({ preset });
|
||||
|
||||
expect(result.region).toBeUndefined();
|
||||
expect(result.temperature).toBe(0.7);
|
||||
});
|
||||
|
||||
it('should preserve Google-specific fields when defaultParamsEndpoint is google', () => {
|
||||
const preset = {
|
||||
presetId: 'test-id',
|
||||
title: 'Gemini Custom',
|
||||
endpoint: 'MyGoogleEndpoint',
|
||||
endpointType: EModelEndpoint.custom,
|
||||
model: 'gemini-pro',
|
||||
temperature: 0.7,
|
||||
maxOutputTokens: 8192,
|
||||
topP: 0.9,
|
||||
topK: 40,
|
||||
};
|
||||
|
||||
const result = cleanupPreset({
|
||||
preset,
|
||||
defaultParamsEndpoint: EModelEndpoint.google,
|
||||
});
|
||||
|
||||
expect(result.maxOutputTokens).toBe(8192);
|
||||
expect(result.topP).toBe(0.9);
|
||||
expect(result.topK).toBe(40);
|
||||
});
|
||||
});
|
||||
|
|
@ -1,12 +1,9 @@
|
|||
import { EModelEndpoint } from 'librechat-data-provider';
|
||||
import { EModelEndpoint, parseConvo } from 'librechat-data-provider';
|
||||
import cleanupPreset from '../cleanupPreset';
|
||||
import type { TPreset } from 'librechat-data-provider';
|
||||
|
||||
// Mock parseConvo since we're focusing on testing the chatGptLabel migration logic
|
||||
jest.mock('librechat-data-provider', () => ({
|
||||
...jest.requireActual('librechat-data-provider'),
|
||||
parseConvo: jest.fn((input) => {
|
||||
// Return a simplified mock that passes through most properties
|
||||
const { conversation } = input;
|
||||
return {
|
||||
...conversation,
|
||||
|
|
@ -221,4 +218,41 @@ describe('cleanupPreset', () => {
|
|||
expect(result.presetId).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('defaultParamsEndpoint threading', () => {
|
||||
it('should pass defaultParamsEndpoint to parseConvo', () => {
|
||||
const preset = {
|
||||
...basePreset,
|
||||
endpoint: 'MyCustomEndpoint',
|
||||
endpointType: EModelEndpoint.custom,
|
||||
};
|
||||
|
||||
cleanupPreset({
|
||||
preset,
|
||||
defaultParamsEndpoint: EModelEndpoint.anthropic,
|
||||
});
|
||||
|
||||
expect(parseConvo).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
defaultParamsEndpoint: EModelEndpoint.anthropic,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should pass undefined defaultParamsEndpoint when not provided', () => {
|
||||
const preset = {
|
||||
...basePreset,
|
||||
endpoint: 'MyCustomEndpoint',
|
||||
endpointType: EModelEndpoint.custom,
|
||||
};
|
||||
|
||||
cleanupPreset({ preset });
|
||||
|
||||
expect(parseConvo).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
defaultParamsEndpoint: undefined,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -14,11 +14,13 @@ const buildDefaultConvo = ({
|
|||
conversation,
|
||||
endpoint = null,
|
||||
lastConversationSetup,
|
||||
defaultParamsEndpoint,
|
||||
}: {
|
||||
models: string[];
|
||||
conversation: TConversation;
|
||||
endpoint?: EModelEndpoint | null;
|
||||
lastConversationSetup: TConversation | null;
|
||||
defaultParamsEndpoint?: string | null;
|
||||
}): TConversation => {
|
||||
const { lastSelectedModel, lastSelectedTools } = getLocalStorageItems();
|
||||
const endpointType = lastConversationSetup?.endpointType ?? conversation.endpointType;
|
||||
|
|
@ -49,6 +51,7 @@ const buildDefaultConvo = ({
|
|||
possibleValues: {
|
||||
models: possibleModels,
|
||||
},
|
||||
defaultParamsEndpoint,
|
||||
});
|
||||
|
||||
const defaultConvo = {
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ import type { TPreset } from 'librechat-data-provider';
|
|||
type UIPreset = Partial<TPreset> & { presetOverride?: Partial<TPreset> };
|
||||
type TCleanupPreset = {
|
||||
preset?: UIPreset;
|
||||
defaultParamsEndpoint?: string | null;
|
||||
};
|
||||
|
||||
const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
|
||||
const cleanupPreset = ({ preset: _preset, defaultParamsEndpoint }: TCleanupPreset): TPreset => {
|
||||
const { endpoint, endpointType } = _preset ?? ({} as UIPreset);
|
||||
if (endpoint == null || endpoint === '') {
|
||||
console.error(`Unknown endpoint ${endpoint}`, _preset);
|
||||
|
|
@ -35,8 +36,13 @@ const cleanupPreset = ({ preset: _preset }: TCleanupPreset): TPreset => {
|
|||
delete preset.chatGptLabel;
|
||||
}
|
||||
|
||||
/* @ts-ignore: endpoint can be a custom defined name */
|
||||
const parsedPreset = parseConvo({ endpoint, endpointType, conversation: preset });
|
||||
const parsedPreset = parseConvo({
|
||||
/* @ts-ignore: endpoint can be a custom defined name */
|
||||
endpoint,
|
||||
endpointType,
|
||||
conversation: preset,
|
||||
defaultParamsEndpoint,
|
||||
});
|
||||
|
||||
return {
|
||||
presetId: _preset?.presetId ?? null,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue