mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-17 17:00:15 +01:00
🤖 feat: Streamline Endpoints to Agent Framework (#8013)
* refactor(buildEndpointOption): Improve error logging in middleware, consolidate `isAgents` builder logic, remove adding `modelsConfig` to `endpointOption`
* refactor: parameter extraction and organization in agent services, minimize redundancy of shared fields across objects, make clear distinction of parameters processed uniquely by LibreChat vs LLM Provider Configs
* refactor(createPayload): streamline all endpoints to agent route
* fix: add `modelLabel` to response sender options for agent initialization
* chore: correct log message context in EditController abort controller cleanup
* chore: remove unused abortRequest hook
* chore: remove unused addToCache module and its dependencies
* refactor: remove AskController and related routes, update endpoint URLs (now all streamlined to agents route)
* chore: remove unused bedrock route and its related imports
* refactor: simplify response sender logic for Google endpoint
* chore: add `modelDisplayLabel` handling for agents endpoint
* feat: add file search capability to ephemeral agents, update code interpreter selection based of file upload, consolidate main upload menu for all endpoints
* feat: implement useToolToggle hook for managing tool toggle state, refactor CodeInterpreter and WebSearch components to utilize new hook
* feat: add ToolsDropdown component to BadgeRow for enhanced tool options
* feat: introduce BadgeRowContext and BadgeRowProvider for managing conversation state, refactor related components to utilize context
* feat: implement useMCPSelect hook for managing MCP selection state, refactor MCPSelect component to utilize new hook
* feat: enhance BadgeRowContext with MCPSelect and tool toggle functionality, refactor related components to utilize updated context and hooks
* refactor: streamline useToolToggle hook by integrating setEphemeralAgent directly into toggle logic and removing redundant setValue function
* refactor: consolidate codeApiKeyForm and searchApiKeyForm from CodeInterpreter and WebSearch to utilize new context properties
* refactor: update CheckboxButton to support controlled state and enhance ToolsDropdown with permission-based toggles for web search and code interpreter
* refactor: conditionally render CheckboxButton in CodeInterpreter and WebSearch components for improved UI responsiveness
* chore: add jotai dependency to package.json and package-lock.json
* chore: update brace-expansion package to version 2.0.2 in package-lock.json due to CVE-2025-5889
* Revert "chore: add jotai dependency to package.json and package-lock.json"
This reverts commit 69b6997396.
* refactor: add pinning functionality to CodeInterpreter and WebSearch components, and enhance ToolsDropdown with pin toggle for web search and code interpreter
* chore: move MCPIcon to correct location, remove duplicate
* fix: update MCP import to use type-only import from librechat-data-provider
* feat: implement MCPSubMenu component and integrate pinning functionality into ToolsDropdown
* fix: cycling to submenu by using parent menu context
* feat: add FileSearch component and integrate it into BadgeRow and ToolsDropdown
* chore: import order
* chore: remove agent specific logic that would block functionality for streamlined endpoints
* chore: linting for `createContextHandlers`
* chore: ensure ToolsDropdown doesn't show up for agents
* chore: ensure tool resource is selected when dragged to UI
* chore: update file search behavior to simulate legacy functionality
* feat: ToolDialogs with multiple trigger references, add settings to tool dropdown
* refactor: simplify web search and code interpreter settings checks
* chore: simplify local storage key for pinned state in useToolToggle
* refactor: reinstate agent check in AttachFileChat component, as individual providers will ahve different file configurations
* ci: increase timeout for MongoDB connection in Agent tests
This commit is contained in:
parent
d835f48307
commit
01e9b196bc
67 changed files with 1468 additions and 1433 deletions
|
|
@ -4,5 +4,6 @@ export * from './common';
|
|||
export * from './events';
|
||||
export * from './files';
|
||||
export * from './generators';
|
||||
export * from './llm';
|
||||
export * from './openid';
|
||||
export { default as Tokenizer } from './tokenizer';
|
||||
|
|
|
|||
189
packages/api/src/utils/llm.test.ts
Normal file
189
packages/api/src/utils/llm.test.ts
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
import { extractLibreChatParams } from './llm';
|
||||
|
||||
describe('extractLibreChatParams', () => {
|
||||
it('should return defaults when options is undefined', () => {
|
||||
const result = extractLibreChatParams(undefined);
|
||||
|
||||
expect(result.resendFiles).toBe(true);
|
||||
expect(result.promptPrefix).toBeUndefined();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({});
|
||||
});
|
||||
|
||||
it('should return defaults when options is null', () => {
|
||||
const result = extractLibreChatParams();
|
||||
|
||||
expect(result.resendFiles).toBe(true);
|
||||
expect(result.promptPrefix).toBeUndefined();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({});
|
||||
});
|
||||
|
||||
it('should extract all LibreChat params and leave model options', () => {
|
||||
const options = {
|
||||
resendFiles: false,
|
||||
promptPrefix: 'You are a helpful assistant',
|
||||
maxContextTokens: 4096,
|
||||
modelLabel: 'GPT-4',
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
max_tokens: 1000,
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(false);
|
||||
expect(result.promptPrefix).toBe('You are a helpful assistant');
|
||||
expect(result.maxContextTokens).toBe(4096);
|
||||
expect(result.modelLabel).toBe('GPT-4');
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
max_tokens: 1000,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle null values for LibreChat params', () => {
|
||||
const options = {
|
||||
resendFiles: true,
|
||||
promptPrefix: null,
|
||||
maxContextTokens: 2048,
|
||||
modelLabel: null,
|
||||
model: 'claude-3',
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(true);
|
||||
expect(result.promptPrefix).toBeNull();
|
||||
expect(result.maxContextTokens).toBe(2048);
|
||||
expect(result.modelLabel).toBeNull();
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'claude-3',
|
||||
});
|
||||
});
|
||||
|
||||
it('should use default for resendFiles when not provided', () => {
|
||||
const options = {
|
||||
promptPrefix: 'Test prefix',
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0.5,
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(true); // Should use default
|
||||
expect(result.promptPrefix).toBe('Test prefix');
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'gpt-3.5-turbo',
|
||||
temperature: 0.5,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty options object', () => {
|
||||
const result = extractLibreChatParams({});
|
||||
|
||||
expect(result.resendFiles).toBe(true); // Should use default
|
||||
expect(result.promptPrefix).toBeUndefined();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({});
|
||||
});
|
||||
|
||||
it('should only extract known LibreChat params', () => {
|
||||
const options = {
|
||||
resendFiles: false,
|
||||
promptPrefix: 'Custom prompt',
|
||||
maxContextTokens: 8192,
|
||||
modelLabel: 'Custom Model',
|
||||
// Model options
|
||||
model: 'gpt-4',
|
||||
temperature: 0.9,
|
||||
top_p: 0.95,
|
||||
frequency_penalty: 0.5,
|
||||
presence_penalty: 0.5,
|
||||
// Unknown params should stay in modelOptions
|
||||
unknownParam: 'should remain',
|
||||
customSetting: 123,
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
// LibreChat params extracted
|
||||
expect(result.resendFiles).toBe(false);
|
||||
expect(result.promptPrefix).toBe('Custom prompt');
|
||||
expect(result.maxContextTokens).toBe(8192);
|
||||
expect(result.modelLabel).toBe('Custom Model');
|
||||
|
||||
// Model options should include everything else
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'gpt-4',
|
||||
temperature: 0.9,
|
||||
top_p: 0.95,
|
||||
frequency_penalty: 0.5,
|
||||
presence_penalty: 0.5,
|
||||
unknownParam: 'should remain',
|
||||
customSetting: 123,
|
||||
});
|
||||
});
|
||||
|
||||
it('should not mutate the original options object', () => {
|
||||
const options = {
|
||||
resendFiles: false,
|
||||
promptPrefix: 'Test',
|
||||
model: 'gpt-4',
|
||||
temperature: 0.7,
|
||||
};
|
||||
const originalOptions = { ...options };
|
||||
|
||||
extractLibreChatParams(options);
|
||||
|
||||
// Original object should remain unchanged
|
||||
expect(options).toEqual(originalOptions);
|
||||
});
|
||||
|
||||
it('should handle undefined values for optional LibreChat params', () => {
|
||||
const options = {
|
||||
resendFiles: false,
|
||||
promptPrefix: undefined,
|
||||
maxContextTokens: undefined,
|
||||
modelLabel: undefined,
|
||||
model: 'claude-2',
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(false);
|
||||
expect(result.promptPrefix).toBeUndefined();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeUndefined();
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'claude-2',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle mixed null and undefined values', () => {
|
||||
const options = {
|
||||
promptPrefix: null,
|
||||
maxContextTokens: undefined,
|
||||
modelLabel: null,
|
||||
model: 'gpt-3.5-turbo',
|
||||
stop: ['\\n', '\\n\\n'],
|
||||
};
|
||||
|
||||
const result = extractLibreChatParams(options);
|
||||
|
||||
expect(result.resendFiles).toBe(true); // default
|
||||
expect(result.promptPrefix).toBeNull();
|
||||
expect(result.maxContextTokens).toBeUndefined();
|
||||
expect(result.modelLabel).toBeNull();
|
||||
expect(result.modelOptions).toEqual({
|
||||
model: 'gpt-3.5-turbo',
|
||||
stop: ['\\n', '\\n\\n'],
|
||||
});
|
||||
});
|
||||
});
|
||||
47
packages/api/src/utils/llm.ts
Normal file
47
packages/api/src/utils/llm.ts
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import { librechat } from 'librechat-data-provider';
|
||||
import type { DynamicSettingProps } from 'librechat-data-provider';
|
||||
|
||||
type LibreChatKeys = keyof typeof librechat;
|
||||
|
||||
type LibreChatParams = {
|
||||
modelOptions: Omit<NonNullable<DynamicSettingProps['conversation']>, LibreChatKeys>;
|
||||
resendFiles: boolean;
|
||||
promptPrefix?: string | null;
|
||||
maxContextTokens?: number;
|
||||
modelLabel?: string | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Separates LibreChat-specific parameters from model options
|
||||
* @param options - The combined options object
|
||||
*/
|
||||
export function extractLibreChatParams(
|
||||
options?: DynamicSettingProps['conversation'],
|
||||
): LibreChatParams {
|
||||
if (!options) {
|
||||
return {
|
||||
modelOptions: {} as Omit<NonNullable<DynamicSettingProps['conversation']>, LibreChatKeys>,
|
||||
resendFiles: librechat.resendFiles.default as boolean,
|
||||
};
|
||||
}
|
||||
|
||||
const modelOptions = { ...options };
|
||||
|
||||
const resendFiles =
|
||||
(delete modelOptions.resendFiles, options.resendFiles) ??
|
||||
(librechat.resendFiles.default as boolean);
|
||||
const promptPrefix = (delete modelOptions.promptPrefix, options.promptPrefix);
|
||||
const maxContextTokens = (delete modelOptions.maxContextTokens, options.maxContextTokens);
|
||||
const modelLabel = (delete modelOptions.modelLabel, options.modelLabel);
|
||||
|
||||
return {
|
||||
modelOptions: modelOptions as Omit<
|
||||
NonNullable<DynamicSettingProps['conversation']>,
|
||||
LibreChatKeys
|
||||
>,
|
||||
maxContextTokens,
|
||||
promptPrefix,
|
||||
resendFiles,
|
||||
modelLabel,
|
||||
};
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue