🤖 feat: Streamline Endpoints to Agent Framework (#8013)

* refactor(buildEndpointOption): Improve error logging in middleware, consolidate `isAgents` builder logic, remove adding `modelsConfig` to `endpointOption`

* refactor: parameter extraction and organization in agent services, minimize redundancy of shared fields across objects, make clear distinction of parameters processed uniquely by LibreChat vs LLM Provider Configs

* refactor(createPayload): streamline all endpoints to agent route

* fix: add `modelLabel` to response sender options for agent initialization

* chore: correct log message context in EditController abort controller cleanup

* chore: remove unused abortRequest hook

* chore: remove unused addToCache module and its dependencies

* refactor: remove AskController and related routes, update endpoint URLs (now all streamlined to agents route)

* chore: remove unused bedrock route and its related imports

* refactor: simplify response sender logic for Google endpoint

* chore: add `modelDisplayLabel` handling for agents endpoint

* feat: add file search capability to ephemeral agents, update code interpreter selection based of file upload, consolidate main upload menu for all endpoints

* feat: implement useToolToggle hook for managing tool toggle state, refactor CodeInterpreter and WebSearch components to utilize new hook

* feat: add ToolsDropdown component to BadgeRow for enhanced tool options

* feat: introduce BadgeRowContext and BadgeRowProvider for managing conversation state, refactor related components to utilize context

* feat: implement useMCPSelect hook for managing MCP selection state, refactor MCPSelect component to utilize new hook

* feat: enhance BadgeRowContext with MCPSelect and tool toggle functionality, refactor related components to utilize updated context and hooks

* refactor: streamline useToolToggle hook by integrating setEphemeralAgent directly into toggle logic and removing redundant setValue function

* refactor: consolidate codeApiKeyForm and searchApiKeyForm from CodeInterpreter and WebSearch to utilize new context properties

* refactor: update CheckboxButton to support controlled state and enhance ToolsDropdown with permission-based toggles for web search and code interpreter

* refactor: conditionally render CheckboxButton in CodeInterpreter and WebSearch components for improved UI responsiveness

* chore: add jotai dependency to package.json and package-lock.json

* chore: update brace-expansion package to version 2.0.2 in package-lock.json due to CVE-2025-5889

* Revert "chore: add jotai dependency to package.json and package-lock.json"

This reverts commit 69b6997396.

* refactor: add pinning functionality to CodeInterpreter and WebSearch components, and enhance ToolsDropdown with pin toggle for web search and code interpreter

* chore: move MCPIcon to correct location, remove duplicate

* fix: update MCP import to use type-only import from librechat-data-provider

* feat: implement MCPSubMenu component and integrate pinning functionality into ToolsDropdown

* fix: cycling to submenu by using parent menu context

* feat: add FileSearch component and integrate it into BadgeRow and ToolsDropdown

* chore: import order

* chore: remove agent specific logic that would block functionality for streamlined endpoints

* chore: linting for `createContextHandlers`

* chore: ensure ToolsDropdown doesn't show up for agents

* chore: ensure tool resource is selected when dragged to UI

* chore: update file search behavior to simulate legacy functionality

* feat: ToolDialogs with multiple trigger references, add settings to tool dropdown

* refactor: simplify web search and code interpreter settings checks

* chore: simplify local storage key for pinned state in useToolToggle

* refactor: reinstate agent check in AttachFileChat component, as individual providers will ahve different file configurations

* ci: increase timeout for MongoDB connection in Agent tests
This commit is contained in:
Danny Avila 2025-06-23 09:59:05 -04:00 committed by GitHub
parent d835f48307
commit 01e9b196bc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
67 changed files with 1468 additions and 1433 deletions

View file

@ -4,5 +4,6 @@ export * from './common';
export * from './events';
export * from './files';
export * from './generators';
export * from './llm';
export * from './openid';
export { default as Tokenizer } from './tokenizer';

View file

@ -0,0 +1,189 @@
import { extractLibreChatParams } from './llm';
describe('extractLibreChatParams', () => {
it('should return defaults when options is undefined', () => {
const result = extractLibreChatParams(undefined);
expect(result.resendFiles).toBe(true);
expect(result.promptPrefix).toBeUndefined();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({});
});
it('should return defaults when options is null', () => {
const result = extractLibreChatParams();
expect(result.resendFiles).toBe(true);
expect(result.promptPrefix).toBeUndefined();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({});
});
it('should extract all LibreChat params and leave model options', () => {
const options = {
resendFiles: false,
promptPrefix: 'You are a helpful assistant',
maxContextTokens: 4096,
modelLabel: 'GPT-4',
model: 'gpt-4',
temperature: 0.7,
max_tokens: 1000,
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(false);
expect(result.promptPrefix).toBe('You are a helpful assistant');
expect(result.maxContextTokens).toBe(4096);
expect(result.modelLabel).toBe('GPT-4');
expect(result.modelOptions).toEqual({
model: 'gpt-4',
temperature: 0.7,
max_tokens: 1000,
});
});
it('should handle null values for LibreChat params', () => {
const options = {
resendFiles: true,
promptPrefix: null,
maxContextTokens: 2048,
modelLabel: null,
model: 'claude-3',
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(true);
expect(result.promptPrefix).toBeNull();
expect(result.maxContextTokens).toBe(2048);
expect(result.modelLabel).toBeNull();
expect(result.modelOptions).toEqual({
model: 'claude-3',
});
});
it('should use default for resendFiles when not provided', () => {
const options = {
promptPrefix: 'Test prefix',
model: 'gpt-3.5-turbo',
temperature: 0.5,
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(true); // Should use default
expect(result.promptPrefix).toBe('Test prefix');
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({
model: 'gpt-3.5-turbo',
temperature: 0.5,
});
});
it('should handle empty options object', () => {
const result = extractLibreChatParams({});
expect(result.resendFiles).toBe(true); // Should use default
expect(result.promptPrefix).toBeUndefined();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({});
});
it('should only extract known LibreChat params', () => {
const options = {
resendFiles: false,
promptPrefix: 'Custom prompt',
maxContextTokens: 8192,
modelLabel: 'Custom Model',
// Model options
model: 'gpt-4',
temperature: 0.9,
top_p: 0.95,
frequency_penalty: 0.5,
presence_penalty: 0.5,
// Unknown params should stay in modelOptions
unknownParam: 'should remain',
customSetting: 123,
};
const result = extractLibreChatParams(options);
// LibreChat params extracted
expect(result.resendFiles).toBe(false);
expect(result.promptPrefix).toBe('Custom prompt');
expect(result.maxContextTokens).toBe(8192);
expect(result.modelLabel).toBe('Custom Model');
// Model options should include everything else
expect(result.modelOptions).toEqual({
model: 'gpt-4',
temperature: 0.9,
top_p: 0.95,
frequency_penalty: 0.5,
presence_penalty: 0.5,
unknownParam: 'should remain',
customSetting: 123,
});
});
it('should not mutate the original options object', () => {
const options = {
resendFiles: false,
promptPrefix: 'Test',
model: 'gpt-4',
temperature: 0.7,
};
const originalOptions = { ...options };
extractLibreChatParams(options);
// Original object should remain unchanged
expect(options).toEqual(originalOptions);
});
it('should handle undefined values for optional LibreChat params', () => {
const options = {
resendFiles: false,
promptPrefix: undefined,
maxContextTokens: undefined,
modelLabel: undefined,
model: 'claude-2',
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(false);
expect(result.promptPrefix).toBeUndefined();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeUndefined();
expect(result.modelOptions).toEqual({
model: 'claude-2',
});
});
it('should handle mixed null and undefined values', () => {
const options = {
promptPrefix: null,
maxContextTokens: undefined,
modelLabel: null,
model: 'gpt-3.5-turbo',
stop: ['\\n', '\\n\\n'],
};
const result = extractLibreChatParams(options);
expect(result.resendFiles).toBe(true); // default
expect(result.promptPrefix).toBeNull();
expect(result.maxContextTokens).toBeUndefined();
expect(result.modelLabel).toBeNull();
expect(result.modelOptions).toEqual({
model: 'gpt-3.5-turbo',
stop: ['\\n', '\\n\\n'],
});
});
});

View file

@ -0,0 +1,47 @@
import { librechat } from 'librechat-data-provider';
import type { DynamicSettingProps } from 'librechat-data-provider';
type LibreChatKeys = keyof typeof librechat;
type LibreChatParams = {
modelOptions: Omit<NonNullable<DynamicSettingProps['conversation']>, LibreChatKeys>;
resendFiles: boolean;
promptPrefix?: string | null;
maxContextTokens?: number;
modelLabel?: string | null;
};
/**
* Separates LibreChat-specific parameters from model options
* @param options - The combined options object
*/
export function extractLibreChatParams(
options?: DynamicSettingProps['conversation'],
): LibreChatParams {
if (!options) {
return {
modelOptions: {} as Omit<NonNullable<DynamicSettingProps['conversation']>, LibreChatKeys>,
resendFiles: librechat.resendFiles.default as boolean,
};
}
const modelOptions = { ...options };
const resendFiles =
(delete modelOptions.resendFiles, options.resendFiles) ??
(librechat.resendFiles.default as boolean);
const promptPrefix = (delete modelOptions.promptPrefix, options.promptPrefix);
const maxContextTokens = (delete modelOptions.maxContextTokens, options.maxContextTokens);
const modelLabel = (delete modelOptions.modelLabel, options.modelLabel);
return {
modelOptions: modelOptions as Omit<
NonNullable<DynamicSettingProps['conversation']>,
LibreChatKeys
>,
maxContextTokens,
promptPrefix,
resendFiles,
modelLabel,
};
}

View file

@ -70,8 +70,6 @@ export const revokeUserKey = (name: string) => `${keysEndpoint}/${name}`;
export const revokeAllUserKeys = () => `${keysEndpoint}?all=true`;
export const abortRequest = (endpoint: string) => `/api/ask/${endpoint}/abort`;
export const conversationsRoot = '/api/convos';
export const conversations = (params: q.ConversationListParams) => {

View file

@ -940,18 +940,10 @@ export const initialModelsConfig: TModelsConfig = {
[EModelEndpoint.bedrock]: defaultModels[EModelEndpoint.bedrock],
};
export const EndpointURLs: { [key in EModelEndpoint]: string } = {
[EModelEndpoint.openAI]: `/api/ask/${EModelEndpoint.openAI}`,
[EModelEndpoint.google]: `/api/ask/${EModelEndpoint.google}`,
[EModelEndpoint.custom]: `/api/ask/${EModelEndpoint.custom}`,
[EModelEndpoint.anthropic]: `/api/ask/${EModelEndpoint.anthropic}`,
[EModelEndpoint.gptPlugins]: `/api/ask/${EModelEndpoint.gptPlugins}`,
[EModelEndpoint.azureOpenAI]: `/api/ask/${EModelEndpoint.azureOpenAI}`,
[EModelEndpoint.chatGPTBrowser]: `/api/ask/${EModelEndpoint.chatGPTBrowser}`,
[EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat',
export const EndpointURLs: Record<string, string> = {
[EModelEndpoint.assistants]: '/api/assistants/v2/chat',
[EModelEndpoint.azureAssistants]: '/api/assistants/v1/chat',
[EModelEndpoint.agents]: `/api/${EModelEndpoint.agents}/chat`,
[EModelEndpoint.bedrock]: `/api/${EModelEndpoint.bedrock}/chat`,
};
export const modularEndpoints = new Set<EModelEndpoint | string>([
@ -1451,10 +1443,18 @@ export enum LocalStorageKeys {
LAST_CODE_TOGGLE_ = 'LAST_CODE_TOGGLE_',
/** Last checked toggle for Web Search per conversation ID */
LAST_WEB_SEARCH_TOGGLE_ = 'LAST_WEB_SEARCH_TOGGLE_',
/** Last checked toggle for File Search per conversation ID */
LAST_FILE_SEARCH_TOGGLE_ = 'LAST_FILE_SEARCH_TOGGLE_',
/** Key for the last selected agent provider */
LAST_AGENT_PROVIDER = 'lastAgentProvider',
/** Key for the last selected agent model */
LAST_AGENT_MODEL = 'lastAgentModel',
/** Pin state for MCP tools per conversation ID */
PIN_MCP_ = 'PIN_MCP_',
/** Pin state for Web Search per conversation ID */
PIN_WEB_SEARCH_ = 'PIN_WEB_SEARCH_',
/** Pin state for Code Interpreter per conversation ID */
PIN_CODE_INTERPRETER_ = 'PIN_CODE_INTERPRETER_',
}
export enum ForkOptions {

View file

@ -13,27 +13,23 @@ export default function createPayload(submission: t.TSubmission) {
ephemeralAgent,
} = submission;
const { conversationId } = s.tConvoUpdateSchema.parse(conversation);
const { endpoint: _e, endpointType } = endpointOption as {
const { endpoint: _e } = endpointOption as {
endpoint: s.EModelEndpoint;
endpointType?: s.EModelEndpoint;
};
const endpoint = _e as s.EModelEndpoint;
let server = EndpointURLs[endpointType ?? endpoint];
const isEphemeral = s.isEphemeralAgent(endpoint, ephemeralAgent);
let server = `${EndpointURLs[s.EModelEndpoint.agents]}/${endpoint}`;
if (isEdited && s.isAssistantsEndpoint(endpoint)) {
server += '/modify';
} else if (isEdited) {
server = server.replace('/ask/', '/edit/');
} else if (isEphemeral) {
server = `${EndpointURLs[s.EModelEndpoint.agents]}/${endpoint}`;
}
const payload: t.TPayload = {
...userMessage,
...endpointOption,
endpoint,
ephemeralAgent: isEphemeral ? ephemeralAgent : undefined,
ephemeralAgent: s.isAssistantsEndpoint(endpoint) ? undefined : ephemeralAgent,
isContinued: !!(isEdited && isContinued),
conversationId,
isTemporary,

View file

@ -11,14 +11,6 @@ import request from './request';
import * as s from './schemas';
import * as r from './roles';
export function abortRequestWithMessage(
endpoint: string,
abortKey: string,
message: string,
): Promise<void> {
return request.post(endpoints.abortRequest(endpoint), { arg: { abortKey, message } });
}
export function revokeUserKey(name: string): Promise<unknown> {
return request.delete(endpoints.revokeUserKey(name));
}

View file

@ -83,7 +83,7 @@ const createDefinition = (
return { ...base, ...overrides } as SettingDefinition;
};
const librechat: Record<string, SettingDefinition> = {
export const librechat = {
modelLabel: {
key: 'modelLabel',
label: 'com_endpoint_custom_name',
@ -94,7 +94,7 @@ const librechat: Record<string, SettingDefinition> = {
placeholder: 'com_endpoint_openai_custom_name_placeholder',
placeholderCode: true,
optionType: 'conversation',
},
} as const,
maxContextTokens: {
key: 'maxContextTokens',
label: 'com_endpoint_context_tokens',
@ -107,7 +107,7 @@ const librechat: Record<string, SettingDefinition> = {
descriptionCode: true,
optionType: 'model',
columnSpan: 2,
},
} as const,
resendFiles: {
key: 'resendFiles',
label: 'com_endpoint_plug_resend_files',
@ -120,7 +120,7 @@ const librechat: Record<string, SettingDefinition> = {
optionType: 'conversation',
showDefault: false,
columnSpan: 2,
},
} as const,
promptPrefix: {
key: 'promptPrefix',
label: 'com_endpoint_prompt_prefix',
@ -131,7 +131,7 @@ const librechat: Record<string, SettingDefinition> = {
placeholder: 'com_endpoint_openai_prompt_prefix_placeholder',
placeholderCode: true,
optionType: 'model',
},
} as const,
};
const openAIParams: Record<string, SettingDefinition> = {

View file

@ -275,15 +275,11 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
if (endpoint === EModelEndpoint.google) {
if (modelLabel) {
return modelLabel;
} else if (model && (model.includes('gemini') || model.includes('learnlm'))) {
return 'Gemini';
} else if (model?.toLowerCase().includes('gemma') === true) {
return 'Gemma';
} else if (model && model.includes('code')) {
return 'Codey';
}
return 'PaLM2';
return 'Gemini';
}
if (endpoint === EModelEndpoint.custom || endpointType === EModelEndpoint.custom) {

View file

@ -12,23 +12,6 @@ import { QueryKeys } from '../keys';
import * as s from '../schemas';
import * as t from '../types';
export const useAbortRequestWithMessage = (): UseMutationResult<
void,
Error,
{ endpoint: string; abortKey: string; message: string }
> => {
const queryClient = useQueryClient();
return useMutation(
({ endpoint, abortKey, message }) =>
dataService.abortRequestWithMessage(endpoint, abortKey, message),
{
onSuccess: () => {
queryClient.invalidateQueries([QueryKeys.balance]);
},
},
);
};
export const useGetSharedMessages = (
shareId: string,
config?: UseQueryOptions<t.TSharedMessagesResponse>,

View file

@ -3,7 +3,6 @@ import { Tools } from './types/assistants';
import type { TMessageContentParts, FunctionTool, FunctionToolCall } from './types/assistants';
import { TFeedback, feedbackSchema } from './feedback';
import type { SearchResultData } from './types/web';
import type { TEphemeralAgent } from './types';
import type { TFile } from './types/files';
export const isUUID = z.string().uuid();
@ -91,22 +90,6 @@ export const isAgentsEndpoint = (_endpoint?: EModelEndpoint.agents | null | stri
return endpoint === EModelEndpoint.agents;
};
export const isEphemeralAgent = (
endpoint?: EModelEndpoint.agents | null | string,
ephemeralAgent?: TEphemeralAgent | null,
) => {
if (!ephemeralAgent) {
return false;
}
if (isAgentsEndpoint(endpoint)) {
return false;
}
const hasMCPSelected = (ephemeralAgent?.mcp?.length ?? 0) > 0;
const hasCodeSelected = (ephemeralAgent?.execute_code ?? false) === true;
const hasSearchSelected = (ephemeralAgent?.web_search ?? false) === true;
return hasMCPSelected || hasCodeSelected || hasSearchSelected;
};
export const isParamEndpoint = (
endpoint: EModelEndpoint | string,
endpointType?: EModelEndpoint | string,

View file

@ -98,6 +98,7 @@ export type TEndpointOption = Pick<
export type TEphemeralAgent = {
mcp?: string[];
web_search?: boolean;
file_search?: boolean;
execute_code?: boolean;
};