🪙 fix: Max Output Tokens Refactor for Responses API (#8972)

🪙 fix: Max Output Tokens Refactor for Responses API (#8972)

chore: Remove `max_output_tokens` from model kwargs in `titleConvo` if provided
This commit is contained in:
Dustin Healy 2025-08-10 10:58:25 -07:00 committed by Danny Avila
parent da3730b7d6
commit 21e00168b1
No known key found for this signature in database
GPG key ID: BF31EEB2C5CA0956
7 changed files with 143 additions and 4 deletions

View file

@ -300,7 +300,9 @@ export function getOpenAIConfig(
}
if (llmConfig.model && /\bgpt-[5-9]\b/i.test(llmConfig.model) && llmConfig.maxTokens != null) {
modelKwargs.max_completion_tokens = llmConfig.maxTokens;
const paramName =
llmConfig.useResponsesApi === true ? 'max_output_tokens' : 'max_completion_tokens';
modelKwargs[paramName] = llmConfig.maxTokens;
delete llmConfig.maxTokens;
hasModelKwargs = true;
}