🤖 feat: Support o4-mini and o3 Models (#6928)

* feat: Add support for new OpenAI models (o4-mini, o3) and update related logic

* 🔧 fix: Rename 'resubmitFiles' to 'isResubmission' for consistency across types and hooks

* 🔧 fix: Replace hardcoded 'pending_req' with CacheKeys.PENDING_REQ for consistency in cache handling

* 🔧 fix: Update cache handling to use Time.ONE_MINUTE instead of hardcoded TTL and streamline imports

* 🔧 fix: Enhance message handling logic to correctly identify parent messages and streamline imports in useSSE
This commit is contained in:
Danny Avila 2025-04-17 00:40:26 -04:00 committed by GitHub
parent 88f4ad7c47
commit 52f146dd97
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
19 changed files with 69 additions and 53 deletions

View file

@ -108,7 +108,7 @@ class OpenAIClient extends BaseClient {
this.checkVisionRequest(this.options.attachments);
}
const omniPattern = /\b(o1|o3)\b/i;
const omniPattern = /\b(o\d)\b/i;
this.isOmni = omniPattern.test(this.modelOptions.model);
const { OPENAI_FORCE_PROMPT } = process.env ?? {};
@ -1237,6 +1237,9 @@ ${convo}
modelOptions.max_completion_tokens = modelOptions.max_tokens;
delete modelOptions.max_tokens;
}
if (this.isOmni === true && modelOptions.temperature != null) {
delete modelOptions.temperature;
}
if (process.env.OPENAI_ORGANIZATION) {
opts.organization = process.env.OPENAI_ORGANIZATION;