🏃‍♂️ refactor: More Agent Context Improvements during Run (#6477)

* fix: Add optional chaining utility and update agent parameter types

* v2.3.9

* chore: Update @librechat/agents version to 2.3.93
This commit is contained in:
Danny Avila 2025-03-22 12:38:44 -04:00 committed by GitHub
parent 3a62a2633d
commit 2ecb167761
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 31 additions and 12 deletions

View file

@ -49,7 +49,7 @@
"@langchain/google-genai": "^0.1.11",
"@langchain/google-vertexai": "^0.2.2",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.3.8",
"@librechat/agents": "^2.3.93",
"@librechat/data-schemas": "*",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"axios": "^1.8.2",

View file

@ -99,6 +99,19 @@ const primeResources = async (req, _attachments, _tool_resources) => {
}
};
/**
* @param {...string | number} values
* @returns {string | number | undefined}
*/
function optionalChainWithEmptyCheck(...values) {
for (const value of values) {
if (value !== undefined && value !== null && value !== '') {
return value;
}
}
return values[values.length - 1];
}
/**
* @param {object} params
* @param {ServerRequest} params.req
@ -200,12 +213,17 @@ const initializeAgentOptions = async ({
const tokensModel =
agent.provider === EModelEndpoint.azureOpenAI ? agent.model : agent.model_parameters.model;
const maxTokens = agent.model_parameters.maxOutputTokens ?? agent.model_parameters.maxTokens ?? 0;
const maxContextTokens =
agent.model_parameters.maxContextTokens ??
agent.max_context_tokens ??
getModelMaxTokens(tokensModel, providerEndpointMap[provider]) ??
4096;
const maxTokens = optionalChainWithEmptyCheck(
agent.model_parameters.maxOutputTokens,
agent.model_parameters.maxTokens,
0,
);
const maxContextTokens = optionalChainWithEmptyCheck(
agent.model_parameters.maxContextTokens,
agent.max_context_tokens,
getModelMaxTokens(tokensModel, providerEndpointMap[provider]),
4096,
);
return {
...agent,
tools,

8
package-lock.json generated
View file

@ -65,7 +65,7 @@
"@langchain/google-genai": "^0.1.11",
"@langchain/google-vertexai": "^0.2.2",
"@langchain/textsplitters": "^0.1.0",
"@librechat/agents": "^2.3.8",
"@librechat/agents": "^2.3.93",
"@librechat/data-schemas": "*",
"@waylaidwanderer/fetch-event-source": "^3.0.1",
"axios": "^1.8.2",
@ -671,9 +671,9 @@
}
},
"api/node_modules/@librechat/agents": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.3.8.tgz",
"integrity": "sha512-S8v1EmEBvE/cJ85inApFCkYay97EJ2s0goUfXGK86wPssC0EZRUJ/iYhcD61cnAQ+vRTk0a+Hn7P42Yw68V93A==",
"version": "2.3.93",
"resolved": "https://registry.npmjs.org/@librechat/agents/-/agents-2.3.93.tgz",
"integrity": "sha512-TlGpxfO+fBs+6xecNlX2mQcHh7BdAGRsBBri1DNpzAaPclMVWQGxIdUGJBqrqcnrlVLKkb4xctExxmmXe9mt0A==",
"dependencies": {
"@aws-crypto/sha256-js": "^5.2.0",
"@aws-sdk/credential-provider-node": "^3.613.0",

View file

@ -150,11 +150,12 @@ export type File = {
/* Agent types */
export type AgentParameterValue = number | null;
export type AgentParameterValue = number | string | null;
export type AgentModelParameters = {
model?: string;
temperature: AgentParameterValue;
maxContextTokens: AgentParameterValue;
max_context_tokens: AgentParameterValue;
max_output_tokens: AgentParameterValue;
top_p: AgentParameterValue;