🤖 feat: GPT-5.1 (#10491)

This commit is contained in:
Danny Avila 2025-11-14 12:28:20 -05:00 committed by GitHub
parent e71c48ec3d
commit 6522789f5b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 50 additions and 14 deletions

View file

@ -345,7 +345,7 @@ ${memory ?? 'No existing memories'}`;
};
// Handle GPT-5+ models
if ('model' in finalLLMConfig && /\bgpt-[5-9]\b/i.test(finalLLMConfig.model ?? '')) {
if ('model' in finalLLMConfig && /\bgpt-[5-9](?:\.\d+)?\b/i.test(finalLLMConfig.model ?? '')) {
// Remove temperature for GPT-5+ models
delete finalLLMConfig.temperature;

View file

@ -940,6 +940,16 @@ describe('getOpenAIConfig', () => {
{ reasoning_effort: null, reasoning_summary: null, shouldHaveReasoning: false },
{ reasoning_effort: undefined, reasoning_summary: undefined, shouldHaveReasoning: false },
{ reasoning_effort: '', reasoning_summary: '', shouldHaveReasoning: false },
{
reasoning_effort: ReasoningEffort.unset,
reasoning_summary: '',
shouldHaveReasoning: false,
},
{
reasoning_effort: ReasoningEffort.none,
reasoning_summary: null,
shouldHaveReasoning: true,
},
{
reasoning_effort: null,
reasoning_summary: ReasoningSummary.concise,

View file

@ -300,7 +300,11 @@ export function getOpenAILLMConfig({
delete modelKwargs.verbosity;
}
if (llmConfig.model && /\bgpt-[5-9]\b/i.test(llmConfig.model) && llmConfig.maxTokens != null) {
if (
llmConfig.model &&
/\bgpt-[5-9](?:\.\d+)?\b/i.test(llmConfig.model) &&
llmConfig.maxTokens != null
) {
const paramName =
llmConfig.useResponsesApi === true ? 'max_output_tokens' : 'max_completion_tokens';
modelKwargs[paramName] = llmConfig.maxTokens;