🔃 fix: Draft Clearing, Claude Titles, Remove Default Vision Max Tokens (#6501)

* refactor: remove legacy max_tokens setting for vision models in OpenAIClient (intended for gpt-4-preview)

* refactor: streamline capability checks in loadAgentTools function, still allow actions if tools are disabled

* fix: enhance error handling for token limits in AnthropicClient and update error message in translations

* feat: append timestamp to cloned agent names for better identification

* chore: update @librechat/agents dependency to version 2.3.94

* refactor: remove clearDraft helper from useSubmitMessage and centralize draft clearing logic to SSE handling, helps prevent user message loss if logout occurs

* refactor: increase debounce time for clearDraft function to improve auto-save performance
This commit is contained in:
Danny Avila 2025-03-23 18:47:40 -04:00 committed by GitHub
parent 20f353630e
commit 4b85fe9206
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 634 additions and 590 deletions

View file

@ -2,6 +2,7 @@ const Anthropic = require('@anthropic-ai/sdk');
const { HttpsProxyAgent } = require('https-proxy-agent');
const {
Constants,
ErrorTypes,
EModelEndpoint,
anthropicSettings,
getResponseSender,
@ -147,12 +148,17 @@ class AnthropicClient extends BaseClient {
this.maxPromptTokens =
this.options.maxPromptTokens || this.maxContextTokens - this.maxResponseTokens;
if (this.maxPromptTokens + this.maxResponseTokens > this.maxContextTokens) {
throw new Error(
`maxPromptTokens + maxOutputTokens (${this.maxPromptTokens} + ${this.maxResponseTokens} = ${
this.maxPromptTokens + this.maxResponseTokens
}) must be less than or equal to maxContextTokens (${this.maxContextTokens})`,
);
const reservedTokens = this.maxPromptTokens + this.maxResponseTokens;
if (reservedTokens > this.maxContextTokens) {
const info = `Total Possible Tokens + Max Output Tokens must be less than or equal to Max Context Tokens: ${this.maxPromptTokens} (total possible output) + ${this.maxResponseTokens} (max output) = ${reservedTokens}/${this.maxContextTokens} (max context)`;
const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
logger.warn(info);
throw new Error(errorMessage);
} else if (this.maxResponseTokens === this.maxContextTokens) {
const info = `Max Output Tokens must be less than Max Context Tokens: ${this.maxResponseTokens} (max output) = ${this.maxContextTokens} (max context)`;
const errorMessage = `{ "type": "${ErrorTypes.INPUT_LENGTH}", "info": "${info}" }`;
logger.warn(info);
throw new Error(errorMessage);
}
this.sender =

View file

@ -1185,10 +1185,6 @@ ${convo}
opts.httpAgent = new HttpsProxyAgent(this.options.proxy);
}
if (this.isVisionModel) {
modelOptions.max_tokens = 4000;
}
/** @type {TAzureConfig | undefined} */
const azureConfig = this.options?.req?.app?.locals?.[EModelEndpoint.azureOpenAI];

View file

@ -49,7 +49,7 @@
"@langchain/google-genai": "^0.1.11",
"@langchain/google-vertexai": "^0.2.2",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.3.93",
"@librechat/agents": "^2.3.94",
"@librechat/data-schemas": "*",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"axios": "^1.8.2",

View file

@ -212,6 +212,11 @@ const duplicateAgentHandler = async (req, res) => {
tool_resources: _tool_resources = {},
...cloneData
} = agent;
cloneData.name = `${agent.name} (${new Date().toLocaleString('en-US', {
dateStyle: 'short',
timeStyle: 'short',
hour12: false,
})})`;
if (_tool_resources?.[EToolResources.ocr]) {
cloneData.tool_resources = {

View file

@ -425,21 +425,16 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
}
const endpointsConfig = await getEndpointsConfig(req);
const capabilities = endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? [];
const areToolsEnabled = capabilities.includes(AgentCapabilities.tools);
if (!areToolsEnabled) {
logger.debug('Tools are not enabled for this agent.');
return {};
}
const isFileSearchEnabled = capabilities.includes(AgentCapabilities.file_search);
const isCodeEnabled = capabilities.includes(AgentCapabilities.execute_code);
const areActionsEnabled = capabilities.includes(AgentCapabilities.actions);
const enabledCapabilities = new Set(endpointsConfig?.[EModelEndpoint.agents]?.capabilities ?? []);
const checkCapability = (capability) => enabledCapabilities.has(capability);
const areToolsEnabled = checkCapability(AgentCapabilities.tools);
const _agentTools = agent.tools?.filter((tool) => {
if (tool === Tools.file_search && !isFileSearchEnabled) {
if (tool === Tools.file_search && !checkCapability(AgentCapabilities.file_search)) {
return false;
} else if (tool === Tools.execute_code && !isCodeEnabled) {
} else if (tool === Tools.execute_code && !checkCapability(AgentCapabilities.execute_code)) {
return false;
} else if (!areToolsEnabled && !tool.includes(actionDelimiter)) {
return false;
}
return true;
@ -473,6 +468,10 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
continue;
}
if (!areToolsEnabled) {
continue;
}
if (tool.mcp === true) {
agentTools.push(tool);
continue;
@ -505,7 +504,7 @@ async function loadAgentTools({ req, res, agent, tool_resources, openAIApiKey })
return map;
}, {});
if (!areActionsEnabled) {
if (!checkCapability(AgentCapabilities.actions)) {
return {
tools: agentTools,
toolContextMap,

View file

@ -93,7 +93,7 @@ const ChatForm = ({ index = 0 }) => {
} = useAddedChatContext();
const showStopAdded = useRecoilValue(store.showStopButtonByIndex(addedIndex));
const { clearDraft } = useAutoSave({
useAutoSave({
conversationId: useMemo(() => conversation?.conversationId, [conversation]),
textAreaRef,
files,
@ -101,7 +101,7 @@ const ChatForm = ({ index = 0 }) => {
});
const assistantMap = useAssistantsMapContext();
const { submitMessage, submitPrompt } = useSubmitMessage({ clearDraft });
const { submitMessage, submitPrompt } = useSubmitMessage();
const { endpoint: _endpoint, endpointType } = conversation ?? { endpoint: null };
const endpoint = endpointType ?? _endpoint;

View file

@ -7,6 +7,10 @@ import { useChatFormContext } from '~/Providers';
import { useGetFiles } from '~/data-provider';
import store from '~/store';
const clearDraft = debounce((id?: string | null) => {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${id ?? ''}`);
}, 2500);
export const useAutoSave = ({
conversationId,
textAreaRef,
@ -103,7 +107,7 @@ export const useAutoSave = ({
}
// Save the draft of the current conversation before switching
if (textAreaRef.current.value === '') {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${id}`);
clearDraft(id);
} else {
localStorage.setItem(
`${LocalStorageKeys.TEXT_DRAFT}${id}`,
@ -208,13 +212,4 @@ export const useAutoSave = ({
);
}
}, [files, conversationId, saveDrafts, currentConversationId, fileIds]);
const clearDraft = useCallback(() => {
if (conversationId != null && conversationId) {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${conversationId}`);
localStorage.removeItem(`${LocalStorageKeys.FILES_DRAFT}${conversationId}`);
}
}, [conversationId]);
return { clearDraft };
};

View file

@ -14,7 +14,7 @@ const appendIndex = (index: number, value?: string) => {
return `${value}${Constants.COMMON_DIVIDER}${index}`;
};
export default function useSubmitMessage(helpers?: { clearDraft?: () => void }) {
export default function useSubmitMessage() {
const { user } = useAuthContext();
const methods = useChatFormContext();
const { ask, index, getMessages, setMessages, latestMessage } = useChatContext();
@ -66,12 +66,10 @@ export default function useSubmitMessage(helpers?: { clearDraft?: () => void })
);
}
methods.reset();
helpers?.clearDraft && helpers.clearDraft();
},
[
ask,
methods,
helpers,
addedIndex,
addedConvo,
setMessages,

View file

@ -4,9 +4,11 @@ import { SSE } from 'sse.js';
import { useSetRecoilState } from 'recoil';
import {
request,
Constants,
/* @ts-ignore */
createPayload,
isAgentsEndpoint,
LocalStorageKeys,
removeNullishValues,
isAssistantsEndpoint,
} from 'librechat-data-provider';
@ -18,6 +20,16 @@ import { useAuthContext } from '~/hooks/AuthContext';
import useEventHandlers from './useEventHandlers';
import store from '~/store';
const clearDraft = (conversationId?: string | null) => {
if (conversationId) {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${conversationId}`);
localStorage.removeItem(`${LocalStorageKeys.FILES_DRAFT}${conversationId}`);
} else {
localStorage.removeItem(`${LocalStorageKeys.TEXT_DRAFT}${Constants.NEW_CONVO}`);
localStorage.removeItem(`${LocalStorageKeys.FILES_DRAFT}${Constants.NEW_CONVO}`);
}
};
type ChatHelpers = Pick<
EventHandlerParams,
| 'setMessages'
@ -112,6 +124,7 @@ export default function useSSE(
const data = JSON.parse(e.data);
if (data.final != null) {
clearDraft(submission.conversationId);
const { plugins } = data;
finalHandler(data, { ...submission, plugins } as EventSubmission);
(startupConfig?.balance?.enabled ?? false) && balanceQuery.refetch();

View file

@ -264,7 +264,7 @@
"com_error_files_upload": "An error occurred while uploading the file.",
"com_error_files_upload_canceled": "The file upload request was canceled. Note: the file upload may still be processing and will need to be manually deleted.",
"com_error_files_validation": "An error occurred while validating the file.",
"com_error_input_length": "The latest message token count is too long, exceeding the token limit ({{0}} respectively). Please shorten your message, adjust the max context size from the conversation parameters, or fork the conversation to continue.",
"com_error_input_length": "The latest message token count is too long, exceeding the token limit, or your token limit parameters are misconfigured, adversely affecting the context window. More info: {{0}}. Please shorten your message, adjust the max context size from the conversation parameters, or fork the conversation to continue.",
"com_error_invalid_user_key": "Invalid key provided. Please provide a valid key and try again.",
"com_error_moderation": "It appears that the content submitted has been flagged by our moderation system for not aligning with our community guidelines. We're unable to proceed with this specific topic. If you have any other questions or topics you'd like to explore, please edit your message, or create a new conversation.",
"com_error_no_base_url": "No base URL found. Please provide one and try again.",

1130
package-lock.json generated

File diff suppressed because it is too large Load diff