🏃‍♂️ refactor: Improve Agent Run Context & Misc. Changes (#6448)

* chore: bump Model Context Protocol SDK dependencies

* fix: correct indentation in MCPConnection class

* refactor: enhance SSE transport with abort controller and add error handling for empty results

* chore: remove outdated Model Context Protocol SDK dependency

* chore: update @modelcontextprotocol/sdk dependency to version 1.7.0

* chore: add debugging comments for PingRequest handling in MCPConnection class

* refactor: update callTool method to accept structured arguments and options

* refactor: simplify maxContextTokens calculation in initializeAgentOptions

* chore: update @babel/runtime dependency to version 7.26.10

* chore: update @librechat/agents dependency to version 2.2.9

* chore: update @librechat/agents dependency to version 2.3.6

* refactor: imports and prevent s3 initialization if strategy not configured

* refactor: mark redis as non-experimental

* refactor: add missing `maxContextTokens` for OpenAI parameters

* refactor: improve log message for Redis initialization

* chore: update @librechat/agents dependency to version 2.3.8

* refactor: extend `streamBuffer` condition to include BEDROCK provider as easily gets throttled by AWS

* refactor: filter out 'think' parts from message content in Anthropic and OpenAI clients
This commit is contained in:
Danny Avila 2025-03-20 22:56:57 -04:00 committed by GitHub
parent e768a07738
commit bc88ac846d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 446 additions and 382 deletions

View file

@ -201,15 +201,17 @@ const initializeAgentOptions = async ({
const tokensModel =
agent.provider === EModelEndpoint.azureOpenAI ? agent.model : agent.model_parameters.model;
const maxTokens = agent.model_parameters.maxOutputTokens ?? agent.model_parameters.maxTokens ?? 0;
const maxContextTokens =
agent.model_parameters.maxContextTokens ??
agent.max_context_tokens ??
getModelMaxTokens(tokensModel, providerEndpointMap[provider]) ??
4096;
return {
...agent,
tools,
attachments,
toolContextMap,
maxContextTokens:
agent.max_context_tokens ??
((getModelMaxTokens(tokensModel, providerEndpointMap[provider]) ?? 4000) - maxTokens) * 0.9,
maxContextTokens: (maxContextTokens - maxTokens) * 0.9,
};
};