🪙 fix: Max Output Tokens Refactor for Responses API (#8972)

🪙 fix: Max Output Tokens Refactor for Responses API (#8972)

chore: Remove `max_output_tokens` from model kwargs in `titleConvo` if provided
This commit is contained in:
Dustin Healy 2025-08-10 10:58:25 -07:00 committed by Danny Avila
parent da3730b7d6
commit 21e00168b1
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
7 changed files with 143 additions and 4 deletions

View file

@ -352,7 +352,11 @@ ${memory ?? 'No existing memories'}`;
// Move maxTokens to modelKwargs for GPT-5+ models
if ('maxTokens' in finalLLMConfig && finalLLMConfig.maxTokens != null) {
const modelKwargs = (finalLLMConfig as OpenAIClientOptions).modelKwargs ?? {};
modelKwargs.max_completion_tokens = finalLLMConfig.maxTokens;
const paramName =
(finalLLMConfig as OpenAIClientOptions).useResponsesApi === true
? 'max_output_tokens'
: 'max_completion_tokens';
modelKwargs[paramName] = finalLLMConfig.maxTokens;
delete finalLLMConfig.maxTokens;
(finalLLMConfig as OpenAIClientOptions).modelKwargs = modelKwargs;
}