mirror of
https://github.com/danny-avila/LibreChat.git
synced 2025-12-20 10:20:15 +01:00
🔧 fix: Update Token Calculations/Mapping, MCP env Initialization (#6406)
* fix: Enhance MCP initialization to process environment variables * fix: only build tokenCountMap with messages that are being used in the payload * fix: Adjust maxContextTokens calculation to account for maxOutputTokens * refactor: Make processMCPEnv optional in MCPManager initialization * chore: Bump version of librechat-data-provider to 0.7.73
This commit is contained in:
parent
d6a17784dc
commit
efb616d600
8 changed files with 46 additions and 20 deletions
|
|
@ -178,6 +178,7 @@ const initializeAgentOptions = async ({
|
|||
agent.provider = options.provider;
|
||||
}
|
||||
|
||||
/** @type {import('@librechat/agents').ClientOptions} */
|
||||
agent.model_parameters = Object.assign(model_parameters, options.llmConfig);
|
||||
if (options.configOptions) {
|
||||
agent.model_parameters.configuration = options.configOptions;
|
||||
|
|
@ -196,6 +197,7 @@ const initializeAgentOptions = async ({
|
|||
|
||||
const tokensModel =
|
||||
agent.provider === EModelEndpoint.azureOpenAI ? agent.model : agent.model_parameters.model;
|
||||
const maxTokens = agent.model_parameters.maxOutputTokens ?? agent.model_parameters.maxTokens ?? 0;
|
||||
|
||||
return {
|
||||
...agent,
|
||||
|
|
@ -204,7 +206,7 @@ const initializeAgentOptions = async ({
|
|||
toolContextMap,
|
||||
maxContextTokens:
|
||||
agent.max_context_tokens ??
|
||||
(getModelMaxTokens(tokensModel, providerEndpointMap[provider]) ?? 4000) * 0.9,
|
||||
((getModelMaxTokens(tokensModel, providerEndpointMap[provider]) ?? 4000) - maxTokens) * 0.9,
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue