refactor: parameter extraction and organization in agent services, minimize redundancy of shared fields across objects, make clear distinction of parameters processed uniquely by LibreChat vs LLM Provider Configs

This commit is contained in:
Danny Avila 2025-06-21 14:31:54 -04:00
parent 6bc0bbeebb
commit 2797aff423
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
7 changed files with 257 additions and 21 deletions

View file

@ -4,5 +4,6 @@ export * from './common';
export * from './events';
export * from './files';
export * from './generators';
export * from './llm';
export * from './openid';
export { default as Tokenizer } from './tokenizer';

View file

@ -0,0 +1,189 @@
import { extractLibreChatParams } from './llm';
describe('extractLibreChatParams', () => {
it('should return defaults when options is undefined', () => {
const result = extractLibreChatParams(undefined);
expect(result.resendFiles).toBe(true);
expect(result.promptPrefix).toBeUndefined();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({});
});
it('should return defaults when options is null', () => {
const result = extractLibreChatParams();
expect(result.resendFiles).toBe(true);
expect(result.promptPrefix).toBeUndefined();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({});
});
it('should extract all LibreChat params and leave model options', () => {
const options = {
resendFiles: false,
promptPrefix: 'You are a helpful assistant',
maxContextTokens: 4096,
modelLabel: 'GPT-4',
model: 'gpt-4',
temperature: 0.7,
max_tokens: 1000,
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(false);
expect(result.promptPrefix).toBe('You are a helpful assistant');
expect(result.maxContextTokens).toBe(4096);
expect(result.modelLabel).toBe('GPT-4');
expect(result.modelOptions).toEqual({
model: 'gpt-4',
temperature: 0.7,
max_tokens: 1000,
});
});
it('should handle null values for LibreChat params', () => {
const options = {
resendFiles: true,
promptPrefix: null,
maxContextTokens: 2048,
modelLabel: null,
model: 'claude-3',
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(true);
expect(result.promptPrefix).toBeNull();
expect(result.maxContextTokens).toBe(2048);
expect(result.modelLabel).toBeNull();
expect(result.modelOptions).toEqual({
model: 'claude-3',
});
});
it('should use default for resendFiles when not provided', () => {
const options = {
promptPrefix: 'Test prefix',
model: 'gpt-3.5-turbo',
temperature: 0.5,
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(true); // Should use default
expect(result.promptPrefix).toBe('Test prefix');
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({
model: 'gpt-3.5-turbo',
temperature: 0.5,
});
});
it('should handle empty options object', () => {
const result = extractLibreChatParams({});
expect(result.resendFiles).toBe(true); // Should use default
expect(result.promptPrefix).toBeUndefined();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({});
});
it('should only extract known LibreChat params', () => {
const options = {
resendFiles: false,
promptPrefix: 'Custom prompt',
maxContextTokens: 8192,
modelLabel: 'Custom Model',
// Model options
model: 'gpt-4',
temperature: 0.9,
top_p: 0.95,
frequency_penalty: 0.5,
presence_penalty: 0.5,
// Unknown params should stay in modelOptions
unknownParam: 'should remain',
customSetting: 123,
};
const result = extractLibreChatParams(options);
// LibreChat params extracted
expect(result.resendFiles).toBe(false);
expect(result.promptPrefix).toBe('Custom prompt');
expect(result.maxContextTokens).toBe(8192);
expect(result.modelLabel).toBe('Custom Model');
// Model options should include everything else
expect(result.modelOptions).toEqual({
model: 'gpt-4',
temperature: 0.9,
top_p: 0.95,
frequency_penalty: 0.5,
presence_penalty: 0.5,
unknownParam: 'should remain',
customSetting: 123,
});
});
it('should not mutate the original options object', () => {
const options = {
resendFiles: false,
promptPrefix: 'Test',
model: 'gpt-4',
temperature: 0.7,
};
const originalOptions = { ...options };
extractLibreChatParams(options);
// Original object should remain unchanged
expect(options).toEqual(originalOptions);
});
it('should handle undefined values for optional LibreChat params', () => {
const options = {
resendFiles: false,
promptPrefix: undefined,
maxContextTokens: undefined,
modelLabel: undefined,
model: 'claude-2',
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(false);
expect(result.promptPrefix).toBeUndefined();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({
model: 'claude-2',
});
});
it('should handle mixed null and undefined values', () => {
const options = {
promptPrefix: null,
maxContextTokens: undefined,
modelLabel: null,
model: 'gpt-3.5-turbo',
stop: ['\\n', '\\n\\n'],
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(true); // default
expect(result.promptPrefix).toBeNull();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeNull();
expect(result.modelOptions).toEqual({
model: 'gpt-3.5-turbo',
stop: ['\\n', '\\n\\n'],
});
});
});

View file

@ -0,0 +1,47 @@
import { librechat } from 'librechat-data-provider';
import type { DynamicSettingProps } from 'librechat-data-provider';
type LibreChatKeys = keyof typeof librechat;
type LibreChatParams = {
modelOptions: Omit<NonNullable<DynamicSettingProps['conversation']>, LibreChatKeys>;
resendFiles: boolean;
promptPrefix?: string | null;
maxContextTokens?: number;
modelLabel?: string | null;
};
/**
* Separates LibreChat-specific parameters from model options
* @param options - The combined options object
*/
export function extractLibreChatParams(
options?: DynamicSettingProps['conversation'],
): LibreChatParams {
if (!options) {
return {
modelOptions: {} as Omit<NonNullable<DynamicSettingProps['conversation']>, LibreChatKeys>,
resendFiles: librechat.resendFiles.default as boolean,
};
}
const modelOptions = { ...options };
const resendFiles =
(delete modelOptions.resendFiles, options.resendFiles) ??
(librechat.resendFiles.default as boolean);
const promptPrefix = (delete modelOptions.promptPrefix, options.promptPrefix);
const maxContextTokens = (delete modelOptions.maxContextTokens, options.maxContextTokens);
const modelLabel = (delete modelOptions.modelLabel, options.modelLabel);
return {
modelOptions: modelOptions as Omit<
NonNullable<DynamicSettingProps['conversation']>,
LibreChatKeys
>,
maxContextTokens,
promptPrefix,
resendFiles,
modelLabel,
};
}