🔦 feat: MCP Support for Non-Agent Endpoints (#6775)

* wip: mcp select

* refactor: Update useAvailableToolsQuery to support generic data types

* feat: Enhance MCPSelect to dynamically load server options and improve MultiSelect component styling

* WIP: ephemeral agents

* wip: Add null check for MCPSelect and improve MultiSelect focus handling

* feat: Pass conversationId prop to MCPSelect in BadgeRow to optimize badge rendering

* feat: useApplyNewAgentTemplate hook to manage ephemeral agent upon conversation creation

* WIP: eph. agent payload

* refactor(OpenAIClient): streamline message processing by replacing content handling with parseTextParts function

* feat: enhance applyAgentTemplate function to accept source conversation ID for improved template application

* feat(parsers): add skipReasoning parameter to parseTextParts for conditional reasoning handling

* WIP: first pass, ephemeral agent backend processing

* chore: import order

* feat: update loadEphemeralAgent and loadAgent functions to accept model_parameters for enhanced agent configuration

* feat: add showMCPServers prop to BadgeRow for conditional rendering of MCPSelect, fix react rule violation

* feat: enhance MCPSelect with localized placeholder and custom icon, add renderSelectedValues callback

* feat: simplify message processing in AnthropicClient by replacing content handling with parseTextParts function

* feat: implement useLocalStorage hook for managing MCP values and update MCPSelect to utilize it

* chore: remove chatGPTBrowserSchema from endpoint schemas and update types for improved schema management

* chore: remove compactChatGPTSchema from endpoint schemas and update types for better schema management

* refactor: rename schemas for clarity and improve schema management

* feat: extend model detection to include 'codestral' alongside 'mistral'

* feat: add endpointType parameter to buildOptions and initializeClient functions

* fix: update condition for handling completion in BaseClient to include agents client

* refactor: simplify payload parsing logic in AgentClient and remove unused providerParsers

* refactor: change useSetRecoilState to useRecoilState for better state management in MCPSelect component

* refactor: streamline chat route handlers by consolidating middleware and improving endpoint structure

* style: update MCPSelect and MultiSelect components for improved layout in mobile view

* v0.7.790

* feat: add getMessageMapMethod to process message text and content in GoogleClient

* chore: include LAST_MCP_ key prefix in clearLocalStorage function for proper teardown on logout
This commit is contained in:
Danny Avila 2025-04-07 19:16:56 -04:00 committed by GitHub
parent 018143b5cc
commit 910c73359b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
31 changed files with 741 additions and 285 deletions

View file

@ -4,6 +4,7 @@ const {
Constants,
ErrorTypes,
EModelEndpoint,
parseTextParts,
anthropicSettings,
getResponseSender,
validateVisionModel,
@ -696,15 +697,8 @@ class AnthropicClient extends BaseClient {
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
} else if (msg.content != null) {
/** @type {import('@librechat/agents').MessageContentComplex} */
const newContent = [];
for (let part of msg.content) {
if (part.think != null) {
continue;
}
newContent.push(part);
}
msg.content = newContent;
msg.text = parseTextParts(msg.content, true);
delete msg.content;
}
return msg;

View file

@ -676,7 +676,8 @@ class BaseClient {
responseMessage.text = addSpaceIfNeeded(generation) + completion;
} else if (
Array.isArray(completion) &&
isParamEndpoint(this.options.endpoint, this.options.endpointType)
(this.clientName === EModelEndpoint.agents ||
isParamEndpoint(this.options.endpoint, this.options.endpointType))
) {
responseMessage.text = '';
responseMessage.content = completion;

View file

@ -9,6 +9,7 @@ const {
validateVisionModel,
getResponseSender,
endpointSettings,
parseTextParts,
EModelEndpoint,
ContentTypes,
VisionModes,
@ -774,6 +775,22 @@ class GoogleClient extends BaseClient {
return this.usage;
}
getMessageMapMethod() {
/**
* @param {TMessage} msg
*/
return (msg) => {
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
} else if (msg.content != null) {
msg.text = parseTextParts(msg.content, true);
delete msg.content;
}
return msg;
};
}
/**
* Calculates the correct token count for the current user message based on the token count map and API usage.
* Edge case: If the calculation results in a negative value, it returns the original estimate.

View file

@ -6,6 +6,7 @@ const {
Constants,
ImageDetail,
ContentTypes,
parseTextParts,
EModelEndpoint,
resolveHeaders,
KnownEndpoints,
@ -1121,15 +1122,8 @@ ${convo}
if (msg.text != null && msg.text && msg.text.startsWith(':::thinking')) {
msg.text = msg.text.replace(/:::thinking.*?:::/gs, '').trim();
} else if (msg.content != null) {
/** @type {import('@librechat/agents').MessageContentComplex} */
const newContent = [];
for (let part of msg.content) {
if (part.think != null) {
continue;
}
newContent.push(part);
}
msg.content = newContent;
msg.text = parseTextParts(msg.content, true);
delete msg.content;
}
return msg;

View file

@ -1,6 +1,8 @@
const mongoose = require('mongoose');
const { agentSchema } = require('@librechat/data-schemas');
const { SystemRoles } = require('librechat-data-provider');
const { GLOBAL_PROJECT_NAME } = require('librechat-data-provider').Constants;
const { GLOBAL_PROJECT_NAME, EPHEMERAL_AGENT_ID, mcp_delimiter } =
require('librechat-data-provider').Constants;
const { CONFIG_STORE, STARTUP_CONFIG } = require('librechat-data-provider').CacheKeys;
const {
getProjectByName,
@ -9,7 +11,6 @@ const {
removeAgentFromAllProjects,
} = require('./Project');
const getLogStores = require('~/cache/getLogStores');
const { agentSchema } = require('@librechat/data-schemas');
const Agent = mongoose.model('agent', agentSchema);
@ -39,9 +40,55 @@ const getAgent = async (searchParameter) => await Agent.findOne(searchParameter)
* @param {Object} params
* @param {ServerRequest} params.req
* @param {string} params.agent_id
* @param {string} params.endpoint
* @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
* @returns {Agent|null} The agent document as a plain object, or null if not found.
*/
const loadEphemeralAgent = ({ req, agent_id, endpoint, model_parameters: _m }) => {
const { model, ...model_parameters } = _m;
/** @type {Record<string, FunctionTool>} */
const availableTools = req.app.locals.availableTools;
const mcpServers = new Set(req.body.ephemeralAgent?.mcp);
/** @type {string[]} */
const tools = [];
for (const toolName of Object.keys(availableTools)) {
if (!toolName.includes(mcp_delimiter)) {
continue;
}
const mcpServer = toolName.split(mcp_delimiter)?.[1];
if (mcpServer && mcpServers.has(mcpServer)) {
tools.push(toolName);
}
}
const instructions = req.body.promptPrefix;
return {
id: agent_id,
instructions,
provider: endpoint,
model_parameters,
model,
tools,
};
};
/**
* Load an agent based on the provided ID
*
* @param {Object} params
* @param {ServerRequest} params.req
* @param {string} params.agent_id
* @param {string} params.endpoint
* @param {import('@librechat/agents').ClientOptions} [params.model_parameters]
* @returns {Promise<Agent|null>} The agent document as a plain object, or null if not found.
*/
const loadAgent = async ({ req, agent_id }) => {
const loadAgent = async ({ req, agent_id, endpoint, model_parameters }) => {
if (!agent_id) {
return null;
}
if (agent_id === EPHEMERAL_AGENT_ID) {
return loadEphemeralAgent({ req, agent_id, endpoint, model_parameters });
}
const agent = await getAgent({
id: agent_id,
});

View file

@ -20,11 +20,9 @@ const {
const {
Constants,
VisionModes,
openAISchema,
ContentTypes,
EModelEndpoint,
KnownEndpoints,
anthropicSchema,
isAgentsEndpoint,
AgentCapabilities,
bedrockInputSchema,
@ -43,11 +41,18 @@ const { createRun } = require('./run');
/** @typedef {import('@librechat/agents').MessageContentComplex} MessageContentComplex */
/** @typedef {import('@langchain/core/runnables').RunnableConfig} RunnableConfig */
const providerParsers = {
[EModelEndpoint.openAI]: openAISchema.parse,
[EModelEndpoint.azureOpenAI]: openAISchema.parse,
[EModelEndpoint.anthropic]: anthropicSchema.parse,
[EModelEndpoint.bedrock]: bedrockInputSchema.parse,
/**
* @param {ServerRequest} req
* @param {Agent} agent
* @param {string} endpoint
*/
const payloadParser = ({ req, agent, endpoint }) => {
if (isAgentsEndpoint(endpoint)) {
return { model: undefined };
} else if (endpoint === EModelEndpoint.bedrock) {
return bedrockInputSchema.parse(agent.model_parameters);
}
return req.body.endpointOption.model_parameters;
};
const legacyContentEndpoints = new Set([KnownEndpoints.groq, KnownEndpoints.deepseek]);
@ -180,28 +185,19 @@ class AgentClient extends BaseClient {
}
getSaveOptions() {
const parseOptions = providerParsers[this.options.endpoint];
let runOptions =
this.options.endpoint === EModelEndpoint.agents
? {
model: undefined,
// TODO:
// would need to be override settings; otherwise, model needs to be undefined
// model: this.override.model,
// instructions: this.override.instructions,
// additional_instructions: this.override.additional_instructions,
}
: {};
if (parseOptions) {
try {
runOptions = parseOptions(this.options.agent.model_parameters);
} catch (error) {
logger.error(
'[api/server/controllers/agents/client.js #getSaveOptions] Error parsing options',
error,
);
}
// TODO:
// would need to be override settings; otherwise, model needs to be undefined
// model: this.override.model,
// instructions: this.override.instructions,
// additional_instructions: this.override.additional_instructions,
let runOptions = {};
try {
runOptions = payloadParser(this.options);
} catch (error) {
logger.error(
'[api/server/controllers/agents/client.js #getSaveOptions] Error parsing options',
error,
);
}
return removeNullishValues(

View file

@ -1,6 +1,11 @@
const { parseCompactConvo, EModelEndpoint, isAgentsEndpoint } = require('librechat-data-provider');
const { getModelsConfig } = require('~/server/controllers/ModelController');
const {
parseCompactConvo,
EModelEndpoint,
isAgentsEndpoint,
EndpointURLs,
} = require('librechat-data-provider');
const azureAssistants = require('~/server/services/Endpoints/azureAssistants');
const { getModelsConfig } = require('~/server/controllers/ModelController');
const assistants = require('~/server/services/Endpoints/assistants');
const gptPlugins = require('~/server/services/Endpoints/gptPlugins');
const { processFiles } = require('~/server/services/Files/process');
@ -77,8 +82,9 @@ async function buildEndpointOption(req, res, next) {
}
try {
const isAgents = isAgentsEndpoint(endpoint);
const endpointFn = buildFunction[endpointType ?? endpoint];
const isAgents =
isAgentsEndpoint(endpoint) || req.baseUrl.startsWith(EndpointURLs[EModelEndpoint.agents]);
const endpointFn = buildFunction[isAgents ? EModelEndpoint.agents : (endpointType ?? endpoint)];
const builder = isAgents ? (...args) => endpointFn(req, ...args) : endpointFn;
// TODO: use object params

View file

@ -20,24 +20,33 @@ router.post('/abort', handleAbort());
const checkAgentAccess = generateCheckAccess(PermissionTypes.AGENTS, [Permissions.USE]);
router.use(checkAgentAccess);
router.use(validateConvoAccess);
router.use(buildEndpointOption);
router.use(setHeaders);
const controller = async (req, res, next) => {
await AgentController(req, res, next, initializeClient, addTitle);
};
/**
* @route POST /
* @route POST / (regular endpoint)
* @desc Chat with an assistant
* @access Public
* @param {express.Request} req - The request object, containing the request data.
* @param {express.Response} res - The response object, used to send back a response.
* @returns {void}
*/
router.post(
'/',
// validateModel,
checkAgentAccess,
validateConvoAccess,
buildEndpointOption,
setHeaders,
async (req, res, next) => {
await AgentController(req, res, next, initializeClient, addTitle);
},
);
router.post('/', controller);
/**
* @route POST /:endpoint (ephemeral agents)
* @desc Chat with an assistant
* @access Public
* @param {express.Request} req - The request object, containing the request data.
* @param {express.Response} res - The response object, used to send back a response.
* @returns {void}
*/
router.post('/:endpoint', controller);
module.exports = router;

View file

@ -1,12 +1,15 @@
const { isAgentsEndpoint, Constants } = require('librechat-data-provider');
const { loadAgent } = require('~/models/Agent');
const { logger } = require('~/config');
const buildOptions = (req, endpoint, parsedBody) => {
const buildOptions = (req, endpoint, parsedBody, endpointType) => {
const { spec, iconURL, agent_id, instructions, maxContextTokens, ...model_parameters } =
parsedBody;
const agentPromise = loadAgent({
req,
agent_id,
agent_id: isAgentsEndpoint(endpoint) ? agent_id : Constants.EPHEMERAL_AGENT_ID,
endpoint,
model_parameters,
}).catch((error) => {
logger.error(`[/agents/:${agent_id}] Error retrieving agent during build options step`, error);
return undefined;
@ -17,6 +20,7 @@ const buildOptions = (req, endpoint, parsedBody) => {
iconURL,
endpoint,
agent_id,
endpointType,
instructions,
maxContextTokens,
model_parameters,

View file

@ -1,5 +1,6 @@
const { createContentAggregator, Providers } = require('@librechat/agents');
const {
Constants,
ErrorTypes,
EModelEndpoint,
getResponseSender,
@ -322,10 +323,14 @@ const initializeClient = async ({ req, res, endpointOption }) => {
agent: primaryConfig,
spec: endpointOption.spec,
iconURL: endpointOption.iconURL,
endpoint: EModelEndpoint.agents,
attachments: primaryConfig.attachments,
endpointType: endpointOption.endpointType,
maxContextTokens: primaryConfig.maxContextTokens,
resendFiles: primaryConfig.model_parameters?.resendFiles ?? true,
endpoint:
primaryConfig.id === Constants.EPHEMERAL_AGENT_ID
? primaryConfig.endpoint
: EModelEndpoint.agents,
});
return { client };

View file

@ -1,4 +1,5 @@
import React, {
memo,
useState,
useRef,
useEffect,
@ -12,11 +13,14 @@ import type { LucideIcon } from 'lucide-react';
import type { BadgeItem } from '~/common';
import { useChatBadges } from '~/hooks';
import { Badge } from '~/components/ui';
import MCPSelect from './MCPSelect';
import store from '~/store';
interface BadgeRowProps {
showMCPServers?: boolean;
onChange: (badges: Pick<BadgeItem, 'id'>[]) => void;
onToggle?: (badgeId: string, currentActive: boolean) => void;
conversationId?: string | null;
isInChat: boolean;
}
@ -33,7 +37,8 @@ interface BadgeWrapperProps {
const BadgeWrapper = React.memo(
forwardRef<HTMLDivElement, BadgeWrapperProps>(
({ badge, isEditing, isInChat, onToggle, onDelete, onMouseDown, badgeRefs }, ref) => {
const isActive = badge.atom ? useRecoilValue(badge.atom) : false;
const atomBadge = useRecoilValue(badge.atom);
const isActive = badge.atom ? atomBadge : false;
return (
<div
@ -126,7 +131,7 @@ const dragReducer = (state: DragState, action: DragAction): DragState => {
}
};
export function BadgeRow({ onChange, onToggle, isInChat }: BadgeRowProps) {
function BadgeRow({ showMCPServers, conversationId, onChange, onToggle, isInChat }: BadgeRowProps) {
const [orderedBadges, setOrderedBadges] = useState<BadgeItem[]>([]);
const [dragState, dispatch] = useReducer(dragReducer, {
draggedBadge: null,
@ -340,6 +345,7 @@ export function BadgeRow({ onChange, onToggle, isInChat }: BadgeRowProps) {
/>
</div>
)}
{showMCPServers === true && <MCPSelect conversationId={conversationId} />}
{ghostBadge && (
<div
className="ghost-badge h-full"
@ -367,3 +373,5 @@ export function BadgeRow({ onChange, onToggle, isInChat }: BadgeRowProps) {
</div>
);
}
export default memo(BadgeRow);

View file

@ -1,7 +1,7 @@
import { memo, useRef, useMemo, useEffect, useState, useCallback } from 'react';
import { useWatch } from 'react-hook-form';
import { useRecoilState, useRecoilValue } from 'recoil';
import { Constants, isAssistantsEndpoint } from 'librechat-data-provider';
import { Constants, isAssistantsEndpoint, isAgentsEndpoint } from 'librechat-data-provider';
import {
useChatContext,
useChatFormContext,
@ -28,8 +28,8 @@ import CollapseChat from './CollapseChat';
import StreamAudio from './StreamAudio';
import StopButton from './StopButton';
import SendButton from './SendButton';
import { BadgeRow } from './BadgeRow';
import EditBadges from './EditBadges';
import BadgeRow from './BadgeRow';
import Mention from './Mention';
import store from '~/store';
@ -289,7 +289,9 @@ const ChatForm = memo(({ index = 0 }: { index?: number }) => {
<AttachFileChat disableInputs={disableInputs} />
</div>
<BadgeRow
onChange={(newBadges) => setBadges(newBadges)}
showMCPServers={!isAgentsEndpoint(endpoint) && !isAssistantsEndpoint(endpoint)}
conversationId={conversation?.conversationId ?? Constants.NEW_CONVO}
onChange={setBadges}
isInChat={
Array.isArray(conversation?.messages) && conversation.messages.length >= 1
}

View file

@ -0,0 +1,82 @@
import React, { memo, useCallback } from 'react';
import { useRecoilState } from 'recoil';
import { Constants, EModelEndpoint, LocalStorageKeys } from 'librechat-data-provider';
import { useAvailableToolsQuery } from '~/data-provider';
import useLocalStorage from '~/hooks/useLocalStorageAlt';
import MultiSelect from '~/components/ui/MultiSelect';
import { ephemeralAgentByConvoId } from '~/store';
import MCPIcon from '~/components/ui/MCPIcon';
import { useLocalize } from '~/hooks';
function MCPSelect({ conversationId }: { conversationId?: string | null }) {
const localize = useLocalize();
const key = conversationId ?? Constants.NEW_CONVO;
const [ephemeralAgent, setEphemeralAgent] = useRecoilState(ephemeralAgentByConvoId(key));
const setSelectedValues = useCallback(
(values: string[] | null | undefined) => {
if (!values) {
return;
}
if (!Array.isArray(values)) {
return;
}
setEphemeralAgent((prev) => ({
...prev,
mcp: values,
}));
},
[setEphemeralAgent],
);
const [mcpValues, setMCPValues] = useLocalStorage<string[]>(
`${LocalStorageKeys.LAST_MCP_}${key}`,
ephemeralAgent?.mcp ?? [],
setSelectedValues,
);
const { data: mcpServers } = useAvailableToolsQuery(EModelEndpoint.agents, {
select: (data) => {
const serverNames = new Set<string>();
data.forEach((tool) => {
if (tool.pluginKey.includes(Constants.mcp_delimiter)) {
const parts = tool.pluginKey.split(Constants.mcp_delimiter);
serverNames.add(parts[parts.length - 1]);
}
});
return [...serverNames];
},
});
const renderSelectedValues = useCallback(
(values: string[], placeholder?: string) => {
if (values.length === 0) {
return placeholder || localize('com_ui_select') + '...';
}
if (values.length === 1) {
return values[0];
}
return localize('com_ui_x_selected', { 0: values.length });
},
[localize],
);
if (!mcpServers || mcpServers.length === 0) {
return null;
}
return (
<MultiSelect
items={mcpServers ?? []}
selectedValues={mcpValues ?? []}
setSelectedValues={setMCPValues}
defaultSelectedValues={mcpValues ?? []}
renderSelectedValues={renderSelectedValues}
placeholder={localize('com_ui_mcp_servers')}
popoverClassName="min-w-[200px]"
className="badge-icon h-full min-w-[150px]"
selectIcon={<MCPIcon className="icon-md text-text-primary" />}
selectItemsClassName="border border-blue-600/50 bg-blue-500/10 hover:bg-blue-700/10"
selectClassName="group relative inline-flex items-center justify-center md:justify-start gap-1.5 rounded-full border border-border-medium text-sm font-medium transition-shadow md:w-full size-9 p-2 md:p-3 bg-surface-chat shadow-sm hover:bg-surface-hover hover:shadow-md active:shadow-inner"
/>
);
}
export default memo(MCPSelect);

View file

@ -0,0 +1,31 @@
export default function MCPIcon({ className }: { className?: string }) {
return (
<svg
width="195"
height="195"
viewBox="0 2 195 195"
fill="none"
xmlns="http://www.w3.org/2000/svg"
className={className}
>
<path
d="M25 97.8528L92.8823 29.9706C102.255 20.598 117.451 20.598 126.823 29.9706V29.9706C136.196 39.3431 136.196 54.5391 126.823 63.9117L75.5581 115.177"
stroke="currentColor"
strokeWidth="12"
strokeLinecap="round"
/>
<path
d="M76.2653 114.47L126.823 63.9117C136.196 54.5391 151.392 54.5391 160.765 63.9117L161.118 64.2652C170.491 73.6378 170.491 88.8338 161.118 98.2063L99.7248 159.6C96.6006 162.724 96.6006 167.789 99.7248 170.913L112.331 183.52"
stroke="currentColor"
strokeWidth="12"
strokeLinecap="round"
/>
<path
d="M109.853 46.9411L59.6482 97.1457C50.2757 106.518 50.2757 121.714 59.6482 131.087V131.087C69.0208 140.459 84.2168 140.459 93.5894 131.087L143.794 80.8822"
stroke="currentColor"
strokeWidth="12"
strokeLinecap="round"
/>
</svg>
);
}

View file

@ -0,0 +1,128 @@
import React, { useRef } from 'react';
import {
Select,
SelectArrow,
SelectItem,
SelectItemCheck,
SelectLabel,
SelectPopover,
SelectProvider,
} from '@ariakit/react';
import { cn } from '~/utils';
interface MultiSelectProps<T extends string> {
items: T[];
label?: string;
placeholder?: string;
defaultSelectedValues?: T[];
onSelectedValuesChange?: (values: T[]) => void;
renderSelectedValues?: (values: T[], placeholder?: string) => React.ReactNode;
className?: string;
itemClassName?: string;
labelClassName?: string;
selectClassName?: string;
selectIcon?: React.ReactNode;
popoverClassName?: string;
selectItemsClassName?: string;
selectedValues: T[];
setSelectedValues: (values: T[]) => void;
}
function defaultRender<T extends string>(values: T[], placeholder?: string) {
if (values.length === 0) {
return placeholder || 'Select...';
}
if (values.length === 1) {
return values[0];
}
return `${values.length} items selected`;
}
export default function MultiSelect<T extends string>({
items,
label,
placeholder = 'Select...',
defaultSelectedValues = [],
onSelectedValuesChange,
renderSelectedValues = defaultRender,
className,
selectIcon,
itemClassName,
labelClassName,
selectClassName,
popoverClassName,
selectItemsClassName,
selectedValues = [],
setSelectedValues,
}: MultiSelectProps<T>) {
const selectRef = useRef<HTMLButtonElement>(null);
// const [selectedValues, setSelectedValues] = React.useState<T[]>(defaultSelectedValues);
const handleValueChange = (values: T[]) => {
setSelectedValues(values);
if (onSelectedValuesChange) {
onSelectedValuesChange(values);
}
};
return (
<div className={cn('h-full', className)}>
<SelectProvider value={selectedValues} setValue={handleValueChange}>
{label && (
<SelectLabel className={cn('mb-1 block text-sm text-text-primary', labelClassName)}>
{label}
</SelectLabel>
)}
<Select
ref={selectRef}
className={cn(
'flex items-center justify-between gap-2 rounded-xl px-3 py-2 text-sm',
'bg-surface-tertiary text-text-primary shadow-sm hover:cursor-pointer hover:bg-surface-hover',
'outline-none focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-white focus-visible:ring-opacity-75',
selectClassName,
selectedValues.length > 0 && selectItemsClassName != null && selectItemsClassName,
)}
>
{selectIcon && selectIcon}
<span className="hidden truncate md:block">
{renderSelectedValues(selectedValues, placeholder)}
</span>
<SelectArrow className="ml-1 hidden stroke-1 text-base opacity-75 md:block" />
</Select>
<SelectPopover
gutter={4}
sameWidth
modal
unmountOnHide
finalFocus={selectRef}
className={cn(
'animate-popover z-50 flex max-h-[300px]',
'flex-col overflow-auto overscroll-contain rounded-xl',
'bg-surface-secondary px-1.5 py-1 text-text-primary shadow-lg',
'border border-border-light',
'outline-none',
popoverClassName,
)}
>
{items.map((value) => (
<SelectItem
key={value}
value={value}
className={cn(
'flex items-center gap-2 rounded-lg px-2 py-1.5 hover:cursor-pointer',
'scroll-m-1 outline-none transition-colors',
'hover:bg-black/[0.075] dark:hover:bg-white/10',
'data-[active-item]:bg-black/[0.075] dark:data-[active-item]:bg-white/10',
'w-full min-w-0 text-sm',
itemClassName,
)}
>
<SelectItemCheck className="text-primary" />
<span className="truncate">{value}</span>
</SelectItem>
))}
</SelectPopover>
</SelectProvider>
</div>
);
}

View file

@ -27,6 +27,7 @@ export * from './Pagination';
export * from './Progress';
export * from './InputOTP';
export { default as Badge } from './Badge';
export { default as MCPIcon } from './MCPIcon';
export { default as Combobox } from './Combobox';
export { default as Dropdown } from './Dropdown';
export { default as SplitText } from './SplitText';

View file

@ -191,9 +191,10 @@ export const useConversationTagsQuery = (
/**
* Hook for getting all available tools for Assistants
*/
export const useAvailableToolsQuery = (
export const useAvailableToolsQuery = <TData = t.TPlugin[]>(
endpoint: t.AssistantsEndpoint | EModelEndpoint.agents,
): QueryObserverResult<TPlugin[]> => {
config?: UseQueryOptions<t.TPlugin[], unknown, TData>,
): QueryObserverResult<TData> => {
const queryClient = useQueryClient();
const endpointsConfig = queryClient.getQueryData<TEndpointsConfig>([QueryKeys.endpoints]);
const keyExpiry = queryClient.getQueryData<TCheckUserKeyResponse>([QueryKeys.name, endpoint]);
@ -202,7 +203,7 @@ export const useAvailableToolsQuery = (
const enabled = !!endpointsConfig?.[endpoint] && keyProvided;
const version: string | number | undefined =
endpointsConfig?.[endpoint]?.version ?? defaultAssistantsVersion[endpoint];
return useQuery<TPlugin[]>(
return useQuery<t.TPlugin[], unknown, TData>(
[QueryKeys.tools],
() => dataService.getAvailableTools(endpoint, version),
{
@ -210,6 +211,7 @@ export const useAvailableToolsQuery = (
refetchOnReconnect: false,
refetchOnMount: false,
enabled,
...config,
},
);
};

View file

@ -20,10 +20,10 @@ import type { SetterOrUpdater } from 'recoil';
import type { TAskFunction, ExtendedFile } from '~/common';
import useSetFilesToDelete from '~/hooks/Files/useSetFilesToDelete';
import useGetSender from '~/hooks/Conversations/useGetSender';
import store, { useGetEphemeralAgent } from '~/store';
import { getArtifactsMode } from '~/utils/artifacts';
import { getEndpointField, logger } from '~/utils';
import useUserKey from '~/hooks/Input/useUserKey';
import store from '~/store';
const logChatRequest = (request: Record<string, unknown>) => {
logger.log('=====================================\nAsk function called with:');
@ -64,6 +64,7 @@ export default function useChatFunctions({
setSubmission: SetterOrUpdater<TSubmission | null>;
setLatestMessage?: SetterOrUpdater<TMessage | null>;
}) {
const getEphemeralAgent = useGetEphemeralAgent();
const codeArtifacts = useRecoilValue(store.codeArtifacts);
const includeShadcnui = useRecoilValue(store.includeShadcnui);
const customPromptMode = useRecoilValue(store.customPromptMode);
@ -118,6 +119,7 @@ export default function useChatFunctions({
return;
}
const ephemeralAgent = getEphemeralAgent(conversationId ?? Constants.NEW_CONVO);
const isEditOrContinue = isEdited || isContinued;
let currentMessages: TMessage[] | null = overrideMessages ?? getMessages() ?? [];
@ -297,6 +299,7 @@ export default function useChatFunctions({
isRegenerate,
initialResponse,
isTemporary,
ephemeralAgent,
};
if (isRegenerate) {

View file

@ -31,11 +31,11 @@ import {
} from '~/utils';
import useAttachmentHandler from '~/hooks/SSE/useAttachmentHandler';
import useContentHandler from '~/hooks/SSE/useContentHandler';
import store, { useApplyNewAgentTemplate } from '~/store';
import useStepHandler from '~/hooks/SSE/useStepHandler';
import { useAuthContext } from '~/hooks/AuthContext';
import { MESSAGE_UPDATE_INTERVAL } from '~/common';
import { useLiveAnnouncer } from '~/Providers';
import store from '~/store';
type TSyncData = {
sync: boolean;
@ -140,8 +140,9 @@ export default function useEventHandlers({
resetLatestMessage,
}: EventHandlerParams) {
const queryClient = useQueryClient();
const setAbortScroll = useSetRecoilState(store.abortScroll);
const { announcePolite } = useLiveAnnouncer();
const applyAgentTemplate = useApplyNewAgentTemplate();
const setAbortScroll = useSetRecoilState(store.abortScroll);
const lastAnnouncementTimeRef = useRef(Date.now());
const { conversationId: paramId } = useParams();
@ -364,6 +365,9 @@ export default function useEventHandlers({
});
let update = {} as TConversation;
if (conversationId) {
applyAgentTemplate(conversationId, submission.conversation.conversationId);
}
if (setConversation && !isAddedRequest) {
setConversation((prevState) => {
let title = prevState?.title;

View file

@ -0,0 +1,61 @@
/* `useLocalStorage`
*
* Features:
* - JSON Serializing
* - Also value will be updated everywhere, when value updated (via `storage` event)
*/
import { useEffect, useState } from 'react';
export default function useLocalStorage<T>(
key: string,
defaultValue: T,
globalSetState?: (value: T) => void,
): [T, (value: T) => void] {
const [value, setValue] = useState(defaultValue);
useEffect(() => {
const item = localStorage.getItem(key);
if (!item) {
localStorage.setItem(key, JSON.stringify(defaultValue));
}
const initialValue = item ? JSON.parse(item) : defaultValue;
setValue(initialValue);
if (globalSetState) {
globalSetState(initialValue);
}
function handler(e: StorageEvent) {
if (e.key !== key) {
return;
}
const lsi = localStorage.getItem(key);
setValue(JSON.parse(lsi ?? ''));
}
window.addEventListener('storage', handler);
return () => {
window.removeEventListener('storage', handler);
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [key, globalSetState]);
const setValueWrap = (value: T) => {
try {
setValue(value);
localStorage.setItem(key, JSON.stringify(value));
if (typeof window !== 'undefined') {
window.dispatchEvent(new StorageEvent('storage', { key }));
}
globalSetState?.(value);
} catch (e) {
console.error(e);
}
};
return [value, setValueWrap];
}

View file

@ -689,6 +689,7 @@
"com_ui_include_shadcnui_agent": "Include shadcn/ui instructions",
"com_ui_input": "Input",
"com_ui_instructions": "Instructions",
"com_ui_x_selected": "{{0}} selected",
"com_ui_late_night": "Happy late night",
"com_ui_latest_footer": "Every AI for Everyone.",
"com_ui_latest_production_version": "Latest production version",
@ -701,6 +702,7 @@
"com_ui_logo": "{{0}} Logo",
"com_ui_manage": "Manage",
"com_ui_max_tags": "Maximum number allowed is {{0}}, using latest values.",
"com_ui_mcp_servers": "MCP Servers",
"com_ui_mention": "Mention an endpoint, assistant, or preset to quickly switch to it",
"com_ui_min_tags": "Cannot remove more values, a minimum of {{0}} are required.",
"com_ui_misc": "Misc.",
@ -855,12 +857,6 @@
"com_ui_write": "Writing",
"com_ui_yes": "Yes",
"com_ui_zoom": "Zoom",
"com_ui_save_badge_changes": "Save badge changes?",
"com_ui_late_night": "Happy late night",
"com_ui_weekend_morning": "Happy weekend",
"com_ui_good_morning": "Good morning",
"com_ui_good_afternoon": "Good afternoon",
"com_ui_good_evening": "Good evening",
"com_endpoint_deprecated": "Deprecated",
"com_endpoint_deprecated_info": "This endpoint is deprecated and may be removed in future versions, please use the agent endpoint instead",
"com_endpoint_deprecated_info_a11y": "The plugin endpoint is deprecated and may be removed in future versions, please use the agent endpoint instead",

View file

@ -0,0 +1,88 @@
import { Constants } from 'librechat-data-provider';
import { atomFamily, useRecoilCallback } from 'recoil';
import type { TEphemeralAgent } from 'librechat-data-provider';
import { logger } from '~/utils';
export const ephemeralAgentByConvoId = atomFamily<TEphemeralAgent | null, string>({
key: 'ephemeralAgentByConvoId',
default: null,
effects: [
({ onSet, node }) => {
onSet(async (newValue) => {
const conversationId = node.key.split('__')[1]?.replaceAll('"', '');
logger.log('agents', 'Setting ephemeral agent:', { conversationId, newValue });
});
},
] as const,
});
/**
* Creates a callback function to apply the ephemeral agent state
* from the "new" conversation template to a specified conversation ID.
*/
export function useApplyNewAgentTemplate() {
const applyTemplate = useRecoilCallback(
({ snapshot, set }) =>
async (targetId: string, _sourceId: string | null = Constants.NEW_CONVO) => {
const sourceId = _sourceId || Constants.NEW_CONVO;
logger.log('agents', `Attempting to apply template from "${sourceId}" to "${targetId}"`);
if (targetId === sourceId) {
logger.warn('agents', `Attempted to apply template to itself ("${sourceId}"). Skipping.`);
return;
}
try {
// 1. Get the current agent state from the "new" conversation template using snapshot
// getPromise reads the value without subscribing
const agentTemplate = await snapshot.getPromise(ephemeralAgentByConvoId(sourceId));
// 2. Check if a template state actually exists
if (agentTemplate) {
logger.log('agents', `Applying agent template to "${targetId}":`, agentTemplate);
// 3. Set the state for the target conversation ID using the template value
set(ephemeralAgentByConvoId(targetId), agentTemplate);
} else {
// 4. Handle the case where the "new" template has no agent state (is null)
logger.warn(
'agents',
`Agent template from "${sourceId}" is null or unset. Setting agent for "${targetId}" to null.`,
);
// Explicitly set to null (or a default empty state if preferred)
set(ephemeralAgentByConvoId(targetId), null);
// Example: Or set to a default empty state:
// set(ephemeralAgentByConvoId(targetId), { mcp: [] });
}
} catch (error) {
logger.error(
'agents',
`Error applying agent template from "${sourceId}" to "${targetId}":`,
error,
);
set(ephemeralAgentByConvoId(targetId), null);
}
},
[],
);
return applyTemplate;
}
/**
* Creates a callback function to get the current ephemeral agent state
* for a specified conversation ID without subscribing the component.
* Returns a Loadable object synchronously.
*/
export function useGetEphemeralAgent() {
const getEphemeralAgent = useRecoilCallback(
({ snapshot }) =>
(conversationId: string): TEphemeralAgent | null => {
logger.log('agents', `[useGetEphemeralAgent] Getting loadable for ID: ${conversationId}`);
const agentLoadable = snapshot.getLoadable(ephemeralAgentByConvoId(conversationId));
return agentLoadable.contents as TEphemeralAgent | null;
},
[],
);
return getEphemeralAgent;
}

View file

@ -12,6 +12,7 @@ import lang from './language';
import settings from './settings';
import misc from './misc';
import isTemporary from './temporary';
export * from './agents';
export default {
...artifacts,

View file

@ -31,6 +31,7 @@ export function clearLocalStorage(skipFirst?: boolean) {
return;
}
if (
key.startsWith(LocalStorageKeys.LAST_MCP_) ||
key.startsWith(LocalStorageKeys.ASST_ID_PREFIX) ||
key.startsWith(LocalStorageKeys.AGENT_ID_PREFIX) ||
key.startsWith(LocalStorageKeys.LAST_CONVO_SETUP) ||

2
package-lock.json generated
View file

@ -43615,7 +43615,7 @@
},
"packages/data-provider": {
"name": "librechat-data-provider",
"version": "0.7.789",
"version": "0.7.790",
"license": "ISC",
"dependencies": {
"axios": "^1.8.2",

View file

@ -1,6 +1,6 @@
{
"name": "librechat-data-provider",
"version": "0.7.789",
"version": "0.7.790",
"description": "data services for librechat apps",
"main": "dist/index.js",
"module": "dist/index.es.js",

View file

@ -1246,6 +1246,8 @@ export enum Constants {
GLOBAL_PROJECT_NAME = 'instance',
/** Delimiter for MCP tools */
mcp_delimiter = '_mcp_',
/** Placeholder Agent ID for Ephemeral Agents */
EPHEMERAL_AGENT_ID = 'ephemeral',
}
export enum LocalStorageKeys {
@ -1281,6 +1283,8 @@ export enum LocalStorageKeys {
ENABLE_USER_MSG_MARKDOWN = 'enableUserMsgMarkdown',
/** Key for displaying analysis tool code input */
SHOW_ANALYSIS_CODE = 'showAnalysisCode',
/** Last selected MCP values per conversation ID */
LAST_MCP_ = 'LAST_MCP_',
}
export enum ForkOptions {

View file

@ -3,8 +3,15 @@ import { EndpointURLs } from './config';
import * as s from './schemas';
export default function createPayload(submission: t.TSubmission) {
const { conversation, userMessage, endpointOption, isEdited, isContinued, isTemporary } =
submission;
const {
conversation,
userMessage,
endpointOption,
isEdited,
isContinued,
isTemporary,
ephemeralAgent,
} = submission;
const { conversationId } = s.tConvoUpdateSchema.parse(conversation);
const { endpoint, endpointType } = endpointOption as {
endpoint: s.EModelEndpoint;
@ -12,16 +19,20 @@ export default function createPayload(submission: t.TSubmission) {
};
let server = EndpointURLs[endpointType ?? endpoint];
const isEphemeralAgent = (ephemeralAgent?.mcp?.length ?? 0) > 0 && !s.isAgentsEndpoint(endpoint);
if (isEdited && s.isAssistantsEndpoint(endpoint)) {
server += '/modify';
} else if (isEdited) {
server = server.replace('/ask/', '/edit/');
} else if (isEphemeralAgent) {
server = `${EndpointURLs[s.EModelEndpoint.agents]}/${endpoint}`;
}
const payload: t.TPayload = {
...userMessage,
...endpointOption,
ephemeralAgent: isEphemeralAgent ? ephemeralAgent : undefined,
isContinued: !!(isEdited && isContinued),
conversationId,
isTemporary,

View file

@ -13,8 +13,6 @@ import {
// agentsSchema,
compactAgentsSchema,
compactGoogleSchema,
compactChatGPTSchema,
chatGPTBrowserSchema,
compactPluginsSchema,
compactAssistantSchema,
} from './schemas';
@ -26,19 +24,19 @@ type EndpointSchema =
| typeof openAISchema
| typeof googleSchema
| typeof anthropicSchema
| typeof chatGPTBrowserSchema
| typeof gptPluginsSchema
| typeof assistantSchema
| typeof compactAgentsSchema
| typeof bedrockInputSchema;
const endpointSchemas: Record<EModelEndpoint, EndpointSchema> = {
type EndpointSchemaKey = Exclude<EModelEndpoint, EModelEndpoint.chatGPTBrowser>;
const endpointSchemas: Record<EndpointSchemaKey, EndpointSchema> = {
[EModelEndpoint.openAI]: openAISchema,
[EModelEndpoint.azureOpenAI]: openAISchema,
[EModelEndpoint.custom]: openAISchema,
[EModelEndpoint.google]: googleSchema,
[EModelEndpoint.anthropic]: anthropicSchema,
[EModelEndpoint.chatGPTBrowser]: chatGPTBrowserSchema,
[EModelEndpoint.gptPlugins]: gptPluginsSchema,
[EModelEndpoint.assistants]: assistantSchema,
[EModelEndpoint.azureAssistants]: assistantSchema,
@ -167,8 +165,8 @@ export const parseConvo = ({
conversation,
possibleValues,
}: {
endpoint: EModelEndpoint;
endpointType?: EModelEndpoint | null;
endpoint: EndpointSchemaKey;
endpointType?: EndpointSchemaKey | null;
conversation: Partial<s.TConversation | s.TPreset> | null;
possibleValues?: TPossibleValues;
// TODO: POC for default schema
@ -252,7 +250,7 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
return modelLabel;
} else if (model && extractOmniVersion(model)) {
return extractOmniVersion(model);
} else if (model && model.includes('mistral')) {
} else if (model && (model.includes('mistral') || model.includes('codestral'))) {
return 'Mistral';
} else if (model && model.includes('gpt-')) {
const gptVersion = extractGPTVersion(model);
@ -288,7 +286,7 @@ export const getResponseSender = (endpointOption: t.TEndpointOption): string =>
return chatGptLabel;
} else if (model && extractOmniVersion(model)) {
return extractOmniVersion(model);
} else if (model && model.includes('mistral')) {
} else if (model && (model.includes('mistral') || model.includes('codestral'))) {
return 'Mistral';
} else if (model && model.includes('gpt-')) {
const gptVersion = extractGPTVersion(model);
@ -309,11 +307,10 @@ type CompactEndpointSchema =
| typeof compactAgentsSchema
| typeof compactGoogleSchema
| typeof anthropicSchema
| typeof compactChatGPTSchema
| typeof bedrockInputSchema
| typeof compactPluginsSchema;
const compactEndpointSchemas: Record<string, CompactEndpointSchema> = {
const compactEndpointSchemas: Record<EndpointSchemaKey, CompactEndpointSchema> = {
[EModelEndpoint.openAI]: openAISchema,
[EModelEndpoint.azureOpenAI]: openAISchema,
[EModelEndpoint.custom]: openAISchema,
@ -323,7 +320,6 @@ const compactEndpointSchemas: Record<string, CompactEndpointSchema> = {
[EModelEndpoint.google]: compactGoogleSchema,
[EModelEndpoint.bedrock]: bedrockInputSchema,
[EModelEndpoint.anthropic]: anthropicSchema,
[EModelEndpoint.chatGPTBrowser]: compactChatGPTSchema,
[EModelEndpoint.gptPlugins]: compactPluginsSchema,
};
@ -333,8 +329,8 @@ export const parseCompactConvo = ({
conversation,
possibleValues,
}: {
endpoint?: EModelEndpoint;
endpointType?: EModelEndpoint | null;
endpoint?: EndpointSchemaKey;
endpointType?: EndpointSchemaKey | null;
conversation: Partial<s.TConversation | s.TPreset>;
possibleValues?: TPossibleValues;
// TODO: POC for default schema
@ -371,7 +367,10 @@ export const parseCompactConvo = ({
return convo;
};
export function parseTextParts(contentParts: a.TMessageContentParts[]): string {
export function parseTextParts(
contentParts: a.TMessageContentParts[],
skipReasoning: boolean = false,
): string {
let result = '';
for (const part of contentParts) {
@ -390,7 +389,7 @@ export function parseTextParts(contentParts: a.TMessageContentParts[]): string {
result += ' ';
}
result += textValue;
} else if (part.type === ContentTypes.THINK) {
} else if (part.type === ContentTypes.THINK && !skipReasoning) {
const textValue = typeof part.think === 'string' ? part.think : '';
if (
result.length > 0 &&

View file

@ -752,22 +752,23 @@ export const tConversationTagSchema = z.object({
});
export type TConversationTag = z.infer<typeof tConversationTagSchema>;
export const googleSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
promptPrefix: true,
examples: true,
temperature: true,
maxOutputTokens: true,
artifacts: true,
topP: true,
topK: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
export const googleBaseSchema = tConversationSchema.pick({
model: true,
modelLabel: true,
promptPrefix: true,
examples: true,
temperature: true,
maxOutputTokens: true,
artifacts: true,
topP: true,
topK: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
});
export const googleSchema = googleBaseSchema
.transform((obj: Partial<TConversation>) => removeNullishValues(obj))
.catch(() => ({}));
@ -790,36 +791,25 @@ export const googleGenConfigSchema = z
.strip()
.optional();
export const chatGPTBrowserSchema = tConversationSchema
.pick({
model: true,
})
.transform((obj) => ({
...obj,
model: obj.model ?? 'text-davinci-002-render-sha',
}))
.catch(() => ({
model: 'text-davinci-002-render-sha',
}));
const gptPluginsBaseSchema = tConversationSchema.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
artifacts: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
tools: true,
agentOptions: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
});
export const gptPluginsSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
artifacts: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
tools: true,
agentOptions: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
export const gptPluginsSchema = gptPluginsBaseSchema
.transform((obj) => {
const result = {
...obj,
@ -889,18 +879,19 @@ export function removeNullishValues<T extends Record<string, unknown>>(
return newObj;
}
export const assistantSchema = tConversationSchema
.pick({
model: true,
assistant_id: true,
instructions: true,
artifacts: true,
promptPrefix: true,
iconURL: true,
greeting: true,
spec: true,
append_current_datetime: true,
})
const assistantBaseSchema = tConversationSchema.pick({
model: true,
assistant_id: true,
instructions: true,
artifacts: true,
promptPrefix: true,
iconURL: true,
greeting: true,
spec: true,
append_current_datetime: true,
});
export const assistantSchema = assistantBaseSchema
.transform((obj) => ({
...obj,
model: obj.model ?? openAISettings.model.default,
@ -923,37 +914,39 @@ export const assistantSchema = tConversationSchema
append_current_datetime: false,
}));
export const compactAssistantSchema = tConversationSchema
.pick({
model: true,
assistant_id: true,
instructions: true,
promptPrefix: true,
artifacts: true,
iconURL: true,
greeting: true,
spec: true,
})
const compactAssistantBaseSchema = tConversationSchema.pick({
model: true,
assistant_id: true,
instructions: true,
promptPrefix: true,
artifacts: true,
iconURL: true,
greeting: true,
spec: true,
});
export const compactAssistantSchema = compactAssistantBaseSchema
.transform((obj) => removeNullishValues(obj))
.catch(() => ({}));
export const agentsSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
temperature: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
resendFiles: true,
imageDetail: true,
agent_id: true,
instructions: true,
promptPrefix: true,
iconURL: true,
greeting: true,
maxContextTokens: true,
})
export const agentsBaseSchema = tConversationSchema.pick({
model: true,
modelLabel: true,
temperature: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
resendFiles: true,
imageDetail: true,
agent_id: true,
instructions: true,
promptPrefix: true,
iconURL: true,
greeting: true,
maxContextTokens: true,
});
export const agentsSchema = agentsBaseSchema
.transform((obj) => ({
...obj,
model: obj.model ?? agentsSettings.model.default,
@ -989,46 +982,32 @@ export const agentsSchema = tConversationSchema
maxContextTokens: undefined,
}));
export const openAISchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
resendFiles: true,
artifacts: true,
imageDetail: true,
stop: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
max_tokens: true,
reasoning_effort: true,
})
export const openAIBaseSchema = tConversationSchema.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
resendFiles: true,
artifacts: true,
imageDetail: true,
stop: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
max_tokens: true,
reasoning_effort: true,
});
export const openAISchema = openAIBaseSchema
.transform((obj: Partial<TConversation>) => removeNullishValues(obj))
.catch(() => ({}));
export const compactGoogleSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
promptPrefix: true,
examples: true,
temperature: true,
maxOutputTokens: true,
artifacts: true,
topP: true,
topK: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
export const compactGoogleSchema = googleBaseSchema
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
if (newObj.temperature === google.temperature.default) {
@ -1048,55 +1027,30 @@ export const compactGoogleSchema = tConversationSchema
})
.catch(() => ({}));
export const anthropicSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
promptPrefix: true,
temperature: true,
maxOutputTokens: true,
topP: true,
topK: true,
resendFiles: true,
promptCache: true,
thinking: true,
thinkingBudget: true,
artifacts: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
export const anthropicBaseSchema = tConversationSchema.pick({
model: true,
modelLabel: true,
promptPrefix: true,
temperature: true,
maxOutputTokens: true,
topP: true,
topK: true,
resendFiles: true,
promptCache: true,
thinking: true,
thinkingBudget: true,
artifacts: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
});
export const anthropicSchema = anthropicBaseSchema
.transform((obj) => removeNullishValues(obj))
.catch(() => ({}));
export const compactChatGPTSchema = tConversationSchema
.pick({
model: true,
})
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
return removeNullishValues(newObj);
})
.catch(() => ({}));
export const compactPluginsSchema = tConversationSchema
.pick({
model: true,
modelLabel: true,
chatGptLabel: true,
promptPrefix: true,
temperature: true,
top_p: true,
presence_penalty: true,
frequency_penalty: true,
tools: true,
agentOptions: true,
iconURL: true,
greeting: true,
spec: true,
maxContextTokens: true,
})
export const compactPluginsSchema = gptPluginsBaseSchema
.transform((obj) => {
const newObj: Partial<TConversation> = { ...obj };
if (newObj.modelLabel === null) {
@ -1149,15 +1103,16 @@ export const tBannerSchema = z.object({
});
export type TBanner = z.infer<typeof tBannerSchema>;
export const compactAgentsSchema = tConversationSchema
.pick({
spec: true,
// model: true,
iconURL: true,
greeting: true,
agent_id: true,
instructions: true,
additional_instructions: true,
})
export const compactAgentsBaseSchema = tConversationSchema.pick({
spec: true,
// model: true,
iconURL: true,
greeting: true,
agent_id: true,
instructions: true,
additional_instructions: true,
});
export const compactAgentsSchema = compactAgentsBaseSchema
.transform((obj) => removeNullishValues(obj))
.catch(() => ({}));

View file

@ -41,12 +41,17 @@ export type TEndpointOption = {
overrideUserMessageId?: string;
};
export type TEphemeralAgent = {
mcp: string[];
};
export type TPayload = Partial<TMessage> &
Partial<TEndpointOption> & {
isContinued: boolean;
conversationId: string | null;
messages?: TMessages;
isTemporary: boolean;
ephemeralAgent?: TEphemeralAgent | null;
};
export type TSubmission = {
@ -63,6 +68,7 @@ export type TSubmission = {
conversation: Partial<TConversation>;
endpointOption: TEndpointOption;
clientTimestamp?: string;
ephemeralAgent?: TEphemeralAgent | null;
};
export type EventSubmission = Omit<TSubmission, 'initialResponse'> & { initialResponse: TMessage };