🔧 fix: Update Token Calculations/Mapping, MCP env Initialization (#6406)

* fix: Enhance MCP initialization to process environment variables

* fix: only build tokenCountMap with messages that are being used in the payload

* fix: Adjust maxContextTokens calculation to account for maxOutputTokens

* refactor: Make processMCPEnv optional in MCPManager initialization

* chore: Bump version of librechat-data-provider to 0.7.73
This commit is contained in:
Danny Avila 2025-03-18 23:16:45 -04:00 committed by GitHub
parent d6a17784dc
commit efb616d600
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 46 additions and 20 deletions

View file

@ -178,6 +178,7 @@ const initializeAgentOptions = async ({
agent.provider = options.provider;
}
/** @type {import('@librechat/agents').ClientOptions} */
agent.model_parameters = Object.assign(model_parameters, options.llmConfig);
if (options.configOptions) {
agent.model_parameters.configuration = options.configOptions;
@ -196,6 +197,7 @@ const initializeAgentOptions = async ({
const tokensModel =
agent.provider === EModelEndpoint.azureOpenAI ? agent.model : agent.model_parameters.model;
const maxTokens = agent.model_parameters.maxOutputTokens ?? agent.model_parameters.maxTokens ?? 0;
return {
...agent,
@ -204,7 +206,7 @@ const initializeAgentOptions = async ({
toolContextMap,
maxContextTokens:
agent.max_context_tokens ??
(getModelMaxTokens(tokensModel, providerEndpointMap[provider]) ?? 4000) * 0.9,
((getModelMaxTokens(tokensModel, providerEndpointMap[provider]) ?? 4000) - maxTokens) * 0.9,
};
};