🪙 refactor: Remove Title maxTokens & Support New LMStudio/Ollama Reasoning Format (#9085)

* 📦 chore: bump `@librechat/agents` to v2.4.76

* refactor: remove default maxTokens from title `clientOptions`
This commit is contained in:
Danny Avila 2025-08-15 15:29:16 -04:00 committed by GitHub
parent 4ec7bcb60f
commit 81186312ef
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 12 additions and 15 deletions

View file

@ -1080,7 +1080,6 @@ class AgentClient extends BaseClient {
/** @type {import('@librechat/agents').ClientOptions} */
let clientOptions = {
maxTokens: 75,
model: agent.model || agent.model_parameters.model,
};
@ -1147,15 +1146,13 @@ class AgentClient extends BaseClient {
clientOptions.configuration = options.configOptions;
}
const shouldRemoveMaxTokens = /\b(o\d|gpt-[5-9])\b/i.test(clientOptions.model);
if (shouldRemoveMaxTokens && clientOptions.maxTokens != null) {
if (clientOptions.maxTokens != null) {
delete clientOptions.maxTokens;
} else if (!shouldRemoveMaxTokens && !clientOptions.maxTokens) {
clientOptions.maxTokens = 75;
}
if (shouldRemoveMaxTokens && clientOptions?.modelKwargs?.max_completion_tokens != null) {
if (clientOptions?.modelKwargs?.max_completion_tokens != null) {
delete clientOptions.modelKwargs.max_completion_tokens;
} else if (shouldRemoveMaxTokens && clientOptions?.modelKwargs?.max_output_tokens != null) {
}
if (clientOptions?.modelKwargs?.max_output_tokens != null) {
delete clientOptions.modelKwargs.max_output_tokens;
}